gpt2_tiny_zh-hk-shikoto / trainer_state.json
jed351's picture
Upload 15 files
86a5699
raw
history blame
94.3 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 36.078289889059256,
"global_step": 400000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.05,
"learning_rate": 4.9937625e-05,
"loss": 4.4121,
"step": 500
},
{
"epoch": 0.09,
"learning_rate": 4.9875125e-05,
"loss": 4.1777,
"step": 1000
},
{
"epoch": 0.14,
"learning_rate": 4.9812625e-05,
"loss": 4.0899,
"step": 1500
},
{
"epoch": 0.18,
"learning_rate": 4.9750125000000005e-05,
"loss": 4.029,
"step": 2000
},
{
"epoch": 0.23,
"learning_rate": 4.9687625000000004e-05,
"loss": 3.9892,
"step": 2500
},
{
"epoch": 0.27,
"learning_rate": 4.9625125e-05,
"loss": 3.9563,
"step": 3000
},
{
"epoch": 0.32,
"learning_rate": 4.9562625e-05,
"loss": 3.9319,
"step": 3500
},
{
"epoch": 0.36,
"learning_rate": 4.9500125000000006e-05,
"loss": 3.9077,
"step": 4000
},
{
"epoch": 0.41,
"learning_rate": 4.9437625000000004e-05,
"loss": 3.8883,
"step": 4500
},
{
"epoch": 0.45,
"learning_rate": 4.9375125e-05,
"loss": 3.8724,
"step": 5000
},
{
"epoch": 0.5,
"learning_rate": 4.9312625e-05,
"loss": 3.8566,
"step": 5500
},
{
"epoch": 0.54,
"learning_rate": 4.9250125e-05,
"loss": 3.8431,
"step": 6000
},
{
"epoch": 0.59,
"learning_rate": 4.9187625000000005e-05,
"loss": 3.8284,
"step": 6500
},
{
"epoch": 0.63,
"learning_rate": 4.9125125e-05,
"loss": 3.8168,
"step": 7000
},
{
"epoch": 0.68,
"learning_rate": 4.9062625e-05,
"loss": 3.8098,
"step": 7500
},
{
"epoch": 0.72,
"learning_rate": 4.900025e-05,
"loss": 3.7967,
"step": 8000
},
{
"epoch": 0.77,
"learning_rate": 4.893775000000001e-05,
"loss": 3.7899,
"step": 8500
},
{
"epoch": 0.81,
"learning_rate": 4.887525e-05,
"loss": 3.7773,
"step": 9000
},
{
"epoch": 0.86,
"learning_rate": 4.8812750000000005e-05,
"loss": 3.7716,
"step": 9500
},
{
"epoch": 0.9,
"learning_rate": 4.8750375000000005e-05,
"loss": 3.7606,
"step": 10000
},
{
"epoch": 0.95,
"learning_rate": 4.8687875000000004e-05,
"loss": 3.755,
"step": 10500
},
{
"epoch": 0.99,
"learning_rate": 4.8625375e-05,
"loss": 3.7495,
"step": 11000
},
{
"epoch": 1.04,
"learning_rate": 4.8562875e-05,
"loss": 3.7424,
"step": 11500
},
{
"epoch": 1.08,
"learning_rate": 4.8500375000000006e-05,
"loss": 3.73,
"step": 12000
},
{
"epoch": 1.13,
"learning_rate": 4.8438e-05,
"loss": 3.7284,
"step": 12500
},
{
"epoch": 1.17,
"learning_rate": 4.8375500000000005e-05,
"loss": 3.7267,
"step": 13000
},
{
"epoch": 1.22,
"learning_rate": 4.8313e-05,
"loss": 3.7213,
"step": 13500
},
{
"epoch": 1.26,
"learning_rate": 4.82505e-05,
"loss": 3.7122,
"step": 14000
},
{
"epoch": 1.31,
"learning_rate": 4.8188125e-05,
"loss": 3.7055,
"step": 14500
},
{
"epoch": 1.35,
"learning_rate": 4.8125625e-05,
"loss": 3.7036,
"step": 15000
},
{
"epoch": 1.4,
"learning_rate": 4.8063125000000006e-05,
"loss": 3.7003,
"step": 15500
},
{
"epoch": 1.44,
"learning_rate": 4.8000625000000004e-05,
"loss": 3.6933,
"step": 16000
},
{
"epoch": 1.49,
"learning_rate": 4.7938125e-05,
"loss": 3.6898,
"step": 16500
},
{
"epoch": 1.53,
"learning_rate": 4.7875625e-05,
"loss": 3.6875,
"step": 17000
},
{
"epoch": 1.58,
"learning_rate": 4.781325e-05,
"loss": 3.6848,
"step": 17500
},
{
"epoch": 1.62,
"learning_rate": 4.775075e-05,
"loss": 3.6827,
"step": 18000
},
{
"epoch": 1.67,
"learning_rate": 4.768825e-05,
"loss": 3.6729,
"step": 18500
},
{
"epoch": 1.71,
"learning_rate": 4.7625750000000004e-05,
"loss": 3.6718,
"step": 19000
},
{
"epoch": 1.76,
"learning_rate": 4.7563375000000004e-05,
"loss": 3.6674,
"step": 19500
},
{
"epoch": 1.8,
"learning_rate": 4.7500875e-05,
"loss": 3.6626,
"step": 20000
},
{
"epoch": 1.85,
"learning_rate": 4.7438375e-05,
"loss": 3.659,
"step": 20500
},
{
"epoch": 1.89,
"learning_rate": 4.7375875e-05,
"loss": 3.6569,
"step": 21000
},
{
"epoch": 1.94,
"learning_rate": 4.7313375000000005e-05,
"loss": 3.6508,
"step": 21500
},
{
"epoch": 1.98,
"learning_rate": 4.7250875e-05,
"loss": 3.6536,
"step": 22000
},
{
"epoch": 2.03,
"learning_rate": 4.7188375e-05,
"loss": 3.6467,
"step": 22500
},
{
"epoch": 2.07,
"learning_rate": 4.7125875e-05,
"loss": 3.6441,
"step": 23000
},
{
"epoch": 2.12,
"learning_rate": 4.7063375000000006e-05,
"loss": 3.6382,
"step": 23500
},
{
"epoch": 2.16,
"learning_rate": 4.7000875e-05,
"loss": 3.6401,
"step": 24000
},
{
"epoch": 2.21,
"learning_rate": 4.6938500000000005e-05,
"loss": 3.6339,
"step": 24500
},
{
"epoch": 2.25,
"learning_rate": 4.6876125000000005e-05,
"loss": 3.633,
"step": 25000
},
{
"epoch": 2.3,
"learning_rate": 4.6813625e-05,
"loss": 3.6301,
"step": 25500
},
{
"epoch": 2.35,
"learning_rate": 4.6751125e-05,
"loss": 3.6277,
"step": 26000
},
{
"epoch": 2.39,
"learning_rate": 4.6688625e-05,
"loss": 3.6243,
"step": 26500
},
{
"epoch": 2.44,
"learning_rate": 4.6626125000000006e-05,
"loss": 3.6222,
"step": 27000
},
{
"epoch": 2.48,
"learning_rate": 4.6563625e-05,
"loss": 3.6189,
"step": 27500
},
{
"epoch": 2.53,
"learning_rate": 4.6501125e-05,
"loss": 3.6232,
"step": 28000
},
{
"epoch": 2.57,
"learning_rate": 4.6438625e-05,
"loss": 3.6158,
"step": 28500
},
{
"epoch": 2.62,
"learning_rate": 4.6376125000000007e-05,
"loss": 3.6137,
"step": 29000
},
{
"epoch": 2.66,
"learning_rate": 4.6313625000000005e-05,
"loss": 3.6101,
"step": 29500
},
{
"epoch": 2.71,
"learning_rate": 4.625125e-05,
"loss": 3.6095,
"step": 30000
},
{
"epoch": 2.75,
"learning_rate": 4.6188750000000004e-05,
"loss": 3.6098,
"step": 30500
},
{
"epoch": 2.8,
"learning_rate": 4.612625e-05,
"loss": 3.6091,
"step": 31000
},
{
"epoch": 2.84,
"learning_rate": 4.606375e-05,
"loss": 3.6047,
"step": 31500
},
{
"epoch": 2.89,
"learning_rate": 4.6001375e-05,
"loss": 3.603,
"step": 32000
},
{
"epoch": 2.93,
"learning_rate": 4.593887500000001e-05,
"loss": 3.6038,
"step": 32500
},
{
"epoch": 2.98,
"learning_rate": 4.5876375000000005e-05,
"loss": 3.592,
"step": 33000
},
{
"epoch": 3.02,
"learning_rate": 4.5813875000000004e-05,
"loss": 3.5909,
"step": 33500
},
{
"epoch": 3.07,
"learning_rate": 4.5751375e-05,
"loss": 3.5902,
"step": 34000
},
{
"epoch": 3.11,
"learning_rate": 4.5688875e-05,
"loss": 3.5931,
"step": 34500
},
{
"epoch": 3.16,
"learning_rate": 4.56265e-05,
"loss": 3.5901,
"step": 35000
},
{
"epoch": 3.2,
"learning_rate": 4.5564e-05,
"loss": 3.5858,
"step": 35500
},
{
"epoch": 3.25,
"learning_rate": 4.5501500000000005e-05,
"loss": 3.5849,
"step": 36000
},
{
"epoch": 3.29,
"learning_rate": 4.5439e-05,
"loss": 3.5811,
"step": 36500
},
{
"epoch": 3.34,
"learning_rate": 4.53765e-05,
"loss": 3.5823,
"step": 37000
},
{
"epoch": 3.38,
"learning_rate": 4.5314e-05,
"loss": 3.5768,
"step": 37500
},
{
"epoch": 3.43,
"learning_rate": 4.5251625e-05,
"loss": 3.5796,
"step": 38000
},
{
"epoch": 3.47,
"learning_rate": 4.5189125000000006e-05,
"loss": 3.581,
"step": 38500
},
{
"epoch": 3.52,
"learning_rate": 4.5126625e-05,
"loss": 3.5798,
"step": 39000
},
{
"epoch": 3.56,
"learning_rate": 4.5064125e-05,
"loss": 3.5744,
"step": 39500
},
{
"epoch": 3.61,
"learning_rate": 4.5001625e-05,
"loss": 3.5701,
"step": 40000
},
{
"epoch": 3.65,
"learning_rate": 4.4939125000000007e-05,
"loss": 3.5754,
"step": 40500
},
{
"epoch": 3.7,
"learning_rate": 4.487675e-05,
"loss": 3.5716,
"step": 41000
},
{
"epoch": 3.74,
"learning_rate": 4.481425e-05,
"loss": 3.5696,
"step": 41500
},
{
"epoch": 3.79,
"learning_rate": 4.4751750000000004e-05,
"loss": 3.5696,
"step": 42000
},
{
"epoch": 3.83,
"learning_rate": 4.468925e-05,
"loss": 3.565,
"step": 42500
},
{
"epoch": 3.88,
"learning_rate": 4.462675e-05,
"loss": 3.5641,
"step": 43000
},
{
"epoch": 3.92,
"learning_rate": 4.456425e-05,
"loss": 3.5667,
"step": 43500
},
{
"epoch": 3.97,
"learning_rate": 4.4501750000000005e-05,
"loss": 3.5658,
"step": 44000
},
{
"epoch": 4.01,
"learning_rate": 4.443925e-05,
"loss": 3.5593,
"step": 44500
},
{
"epoch": 4.06,
"learning_rate": 4.4376875000000004e-05,
"loss": 3.5572,
"step": 45000
},
{
"epoch": 4.1,
"learning_rate": 4.4314500000000004e-05,
"loss": 3.5618,
"step": 45500
},
{
"epoch": 4.15,
"learning_rate": 4.4252e-05,
"loss": 3.5545,
"step": 46000
},
{
"epoch": 4.19,
"learning_rate": 4.41895e-05,
"loss": 3.5556,
"step": 46500
},
{
"epoch": 4.24,
"learning_rate": 4.4127e-05,
"loss": 3.5497,
"step": 47000
},
{
"epoch": 4.28,
"learning_rate": 4.406462500000001e-05,
"loss": 3.5504,
"step": 47500
},
{
"epoch": 4.33,
"learning_rate": 4.4002125e-05,
"loss": 3.5517,
"step": 48000
},
{
"epoch": 4.37,
"learning_rate": 4.3939625000000004e-05,
"loss": 3.5524,
"step": 48500
},
{
"epoch": 4.42,
"learning_rate": 4.3877125e-05,
"loss": 3.5483,
"step": 49000
},
{
"epoch": 4.46,
"learning_rate": 4.3814625e-05,
"loss": 3.5497,
"step": 49500
},
{
"epoch": 4.51,
"learning_rate": 4.375225e-05,
"loss": 3.5479,
"step": 50000
},
{
"epoch": 4.55,
"learning_rate": 4.368975e-05,
"loss": 3.5457,
"step": 50500
},
{
"epoch": 4.6,
"learning_rate": 4.3627250000000005e-05,
"loss": 3.5427,
"step": 51000
},
{
"epoch": 4.65,
"learning_rate": 4.356475e-05,
"loss": 3.5399,
"step": 51500
},
{
"epoch": 4.69,
"learning_rate": 4.3502375000000004e-05,
"loss": 3.5412,
"step": 52000
},
{
"epoch": 4.74,
"learning_rate": 4.3439875e-05,
"loss": 3.5412,
"step": 52500
},
{
"epoch": 4.78,
"learning_rate": 4.3377375e-05,
"loss": 3.5444,
"step": 53000
},
{
"epoch": 4.83,
"learning_rate": 4.3314875000000006e-05,
"loss": 3.5404,
"step": 53500
},
{
"epoch": 4.87,
"learning_rate": 4.32525e-05,
"loss": 3.5368,
"step": 54000
},
{
"epoch": 4.92,
"learning_rate": 4.3190000000000005e-05,
"loss": 3.5374,
"step": 54500
},
{
"epoch": 4.96,
"learning_rate": 4.3127500000000003e-05,
"loss": 3.5409,
"step": 55000
},
{
"epoch": 5.01,
"learning_rate": 4.3065e-05,
"loss": 3.533,
"step": 55500
},
{
"epoch": 5.05,
"learning_rate": 4.30025e-05,
"loss": 3.5265,
"step": 56000
},
{
"epoch": 5.1,
"learning_rate": 4.2940125e-05,
"loss": 3.53,
"step": 56500
},
{
"epoch": 5.14,
"learning_rate": 4.2877625000000006e-05,
"loss": 3.5315,
"step": 57000
},
{
"epoch": 5.19,
"learning_rate": 4.2815125e-05,
"loss": 3.531,
"step": 57500
},
{
"epoch": 5.23,
"learning_rate": 4.2752625e-05,
"loss": 3.5327,
"step": 58000
},
{
"epoch": 5.28,
"learning_rate": 4.2690125e-05,
"loss": 3.526,
"step": 58500
},
{
"epoch": 5.32,
"learning_rate": 4.262762500000001e-05,
"loss": 3.5263,
"step": 59000
},
{
"epoch": 5.37,
"learning_rate": 4.2565125e-05,
"loss": 3.5248,
"step": 59500
},
{
"epoch": 5.41,
"learning_rate": 4.250275e-05,
"loss": 3.5257,
"step": 60000
},
{
"epoch": 5.46,
"learning_rate": 4.2440250000000004e-05,
"loss": 3.5275,
"step": 60500
},
{
"epoch": 5.5,
"learning_rate": 4.237775e-05,
"loss": 3.5263,
"step": 61000
},
{
"epoch": 5.55,
"learning_rate": 4.231525e-05,
"loss": 3.5228,
"step": 61500
},
{
"epoch": 5.59,
"learning_rate": 4.225275e-05,
"loss": 3.5231,
"step": 62000
},
{
"epoch": 5.64,
"learning_rate": 4.219037500000001e-05,
"loss": 3.5251,
"step": 62500
},
{
"epoch": 5.68,
"learning_rate": 4.2127875e-05,
"loss": 3.5208,
"step": 63000
},
{
"epoch": 5.73,
"learning_rate": 4.2065375000000004e-05,
"loss": 3.5222,
"step": 63500
},
{
"epoch": 5.77,
"learning_rate": 4.2002875e-05,
"loss": 3.522,
"step": 64000
},
{
"epoch": 5.82,
"learning_rate": 4.1940499999999996e-05,
"loss": 3.5171,
"step": 64500
},
{
"epoch": 5.86,
"learning_rate": 4.1878e-05,
"loss": 3.52,
"step": 65000
},
{
"epoch": 5.91,
"learning_rate": 4.18155e-05,
"loss": 3.5124,
"step": 65500
},
{
"epoch": 5.95,
"learning_rate": 4.1753000000000005e-05,
"loss": 3.5136,
"step": 66000
},
{
"epoch": 6.0,
"learning_rate": 4.1690500000000003e-05,
"loss": 3.5168,
"step": 66500
},
{
"epoch": 6.04,
"learning_rate": 4.162825e-05,
"loss": 3.5105,
"step": 67000
},
{
"epoch": 6.09,
"learning_rate": 4.1565750000000004e-05,
"loss": 3.5102,
"step": 67500
},
{
"epoch": 6.13,
"learning_rate": 4.150325e-05,
"loss": 3.505,
"step": 68000
},
{
"epoch": 6.18,
"learning_rate": 4.144075e-05,
"loss": 3.5073,
"step": 68500
},
{
"epoch": 6.22,
"learning_rate": 4.137825e-05,
"loss": 3.5137,
"step": 69000
},
{
"epoch": 6.27,
"learning_rate": 4.1315750000000005e-05,
"loss": 3.5111,
"step": 69500
},
{
"epoch": 6.31,
"learning_rate": 4.1253250000000004e-05,
"loss": 3.513,
"step": 70000
},
{
"epoch": 6.36,
"learning_rate": 4.119075e-05,
"loss": 3.5095,
"step": 70500
},
{
"epoch": 6.4,
"learning_rate": 4.112825e-05,
"loss": 3.5081,
"step": 71000
},
{
"epoch": 6.45,
"learning_rate": 4.1065875e-05,
"loss": 3.5059,
"step": 71500
},
{
"epoch": 6.49,
"learning_rate": 4.1003375e-05,
"loss": 3.5076,
"step": 72000
},
{
"epoch": 6.54,
"learning_rate": 4.0940875e-05,
"loss": 3.5035,
"step": 72500
},
{
"epoch": 6.58,
"learning_rate": 4.0878375e-05,
"loss": 3.5087,
"step": 73000
},
{
"epoch": 6.63,
"learning_rate": 4.0816000000000004e-05,
"loss": 3.5044,
"step": 73500
},
{
"epoch": 6.67,
"learning_rate": 4.07535e-05,
"loss": 3.503,
"step": 74000
},
{
"epoch": 6.72,
"learning_rate": 4.0691e-05,
"loss": 3.5033,
"step": 74500
},
{
"epoch": 6.76,
"learning_rate": 4.0628500000000006e-05,
"loss": 3.5035,
"step": 75000
},
{
"epoch": 6.81,
"learning_rate": 4.0566000000000004e-05,
"loss": 3.5037,
"step": 75500
},
{
"epoch": 6.85,
"learning_rate": 4.05035e-05,
"loss": 3.4997,
"step": 76000
},
{
"epoch": 6.9,
"learning_rate": 4.0441e-05,
"loss": 3.4997,
"step": 76500
},
{
"epoch": 6.95,
"learning_rate": 4.03785e-05,
"loss": 3.4969,
"step": 77000
},
{
"epoch": 6.99,
"learning_rate": 4.031612500000001e-05,
"loss": 3.4987,
"step": 77500
},
{
"epoch": 7.04,
"learning_rate": 4.0253625e-05,
"loss": 3.5014,
"step": 78000
},
{
"epoch": 7.08,
"learning_rate": 4.0191125000000004e-05,
"loss": 3.4939,
"step": 78500
},
{
"epoch": 7.13,
"learning_rate": 4.0128625e-05,
"loss": 3.4961,
"step": 79000
},
{
"epoch": 7.17,
"learning_rate": 4.006625e-05,
"loss": 3.4964,
"step": 79500
},
{
"epoch": 7.22,
"learning_rate": 4.000375e-05,
"loss": 3.4956,
"step": 80000
},
{
"epoch": 7.26,
"learning_rate": 3.994125e-05,
"loss": 3.4967,
"step": 80500
},
{
"epoch": 7.31,
"learning_rate": 3.9878750000000005e-05,
"loss": 3.4964,
"step": 81000
},
{
"epoch": 7.35,
"learning_rate": 3.9816375e-05,
"loss": 3.4924,
"step": 81500
},
{
"epoch": 7.4,
"learning_rate": 3.9753875000000004e-05,
"loss": 3.4903,
"step": 82000
},
{
"epoch": 7.44,
"learning_rate": 3.9691375e-05,
"loss": 3.4869,
"step": 82500
},
{
"epoch": 7.49,
"learning_rate": 3.9628875e-05,
"loss": 3.4938,
"step": 83000
},
{
"epoch": 7.53,
"learning_rate": 3.9566375e-05,
"loss": 3.4892,
"step": 83500
},
{
"epoch": 7.58,
"learning_rate": 3.9503875e-05,
"loss": 3.4888,
"step": 84000
},
{
"epoch": 7.62,
"learning_rate": 3.9441500000000005e-05,
"loss": 3.4882,
"step": 84500
},
{
"epoch": 7.67,
"learning_rate": 3.9379e-05,
"loss": 3.4885,
"step": 85000
},
{
"epoch": 7.71,
"learning_rate": 3.93165e-05,
"loss": 3.4893,
"step": 85500
},
{
"epoch": 7.76,
"learning_rate": 3.9254e-05,
"loss": 3.4895,
"step": 86000
},
{
"epoch": 7.8,
"learning_rate": 3.9191500000000006e-05,
"loss": 3.4839,
"step": 86500
},
{
"epoch": 7.85,
"learning_rate": 3.9129125e-05,
"loss": 3.4906,
"step": 87000
},
{
"epoch": 7.89,
"learning_rate": 3.9066625e-05,
"loss": 3.4846,
"step": 87500
},
{
"epoch": 7.94,
"learning_rate": 3.9004125e-05,
"loss": 3.487,
"step": 88000
},
{
"epoch": 7.98,
"learning_rate": 3.8941625e-05,
"loss": 3.4836,
"step": 88500
},
{
"epoch": 8.03,
"learning_rate": 3.8879125e-05,
"loss": 3.4813,
"step": 89000
},
{
"epoch": 8.07,
"learning_rate": 3.881675e-05,
"loss": 3.4797,
"step": 89500
},
{
"epoch": 8.12,
"learning_rate": 3.8754250000000006e-05,
"loss": 3.4797,
"step": 90000
},
{
"epoch": 8.16,
"learning_rate": 3.8691750000000004e-05,
"loss": 3.4799,
"step": 90500
},
{
"epoch": 8.21,
"learning_rate": 3.862925e-05,
"loss": 3.4791,
"step": 91000
},
{
"epoch": 8.25,
"learning_rate": 3.8566875e-05,
"loss": 3.4776,
"step": 91500
},
{
"epoch": 8.3,
"learning_rate": 3.8504375e-05,
"loss": 3.4789,
"step": 92000
},
{
"epoch": 8.34,
"learning_rate": 3.8441875e-05,
"loss": 3.4818,
"step": 92500
},
{
"epoch": 8.39,
"learning_rate": 3.8379375e-05,
"loss": 3.477,
"step": 93000
},
{
"epoch": 8.43,
"learning_rate": 3.8316875000000004e-05,
"loss": 3.4787,
"step": 93500
},
{
"epoch": 8.48,
"learning_rate": 3.8254500000000004e-05,
"loss": 3.4792,
"step": 94000
},
{
"epoch": 8.52,
"learning_rate": 3.8192e-05,
"loss": 3.4806,
"step": 94500
},
{
"epoch": 8.57,
"learning_rate": 3.81295e-05,
"loss": 3.48,
"step": 95000
},
{
"epoch": 8.61,
"learning_rate": 3.8067e-05,
"loss": 3.4793,
"step": 95500
},
{
"epoch": 8.66,
"learning_rate": 3.8004625e-05,
"loss": 3.4767,
"step": 96000
},
{
"epoch": 8.7,
"learning_rate": 3.7942125e-05,
"loss": 3.4792,
"step": 96500
},
{
"epoch": 8.75,
"learning_rate": 3.7879625000000004e-05,
"loss": 3.4751,
"step": 97000
},
{
"epoch": 8.79,
"learning_rate": 3.7817125e-05,
"loss": 3.4747,
"step": 97500
},
{
"epoch": 8.84,
"learning_rate": 3.775475e-05,
"loss": 3.4771,
"step": 98000
},
{
"epoch": 8.88,
"learning_rate": 3.769225e-05,
"loss": 3.4755,
"step": 98500
},
{
"epoch": 8.93,
"learning_rate": 3.762975e-05,
"loss": 3.4746,
"step": 99000
},
{
"epoch": 8.97,
"learning_rate": 3.7567250000000005e-05,
"loss": 3.4749,
"step": 99500
},
{
"epoch": 9.02,
"learning_rate": 3.7504875e-05,
"loss": 3.4744,
"step": 100000
},
{
"epoch": 9.06,
"learning_rate": 3.7442375000000004e-05,
"loss": 3.4658,
"step": 100500
},
{
"epoch": 9.11,
"learning_rate": 3.7379875e-05,
"loss": 3.4723,
"step": 101000
},
{
"epoch": 9.15,
"learning_rate": 3.7317499999999996e-05,
"loss": 3.4672,
"step": 101500
},
{
"epoch": 9.2,
"learning_rate": 3.7255e-05,
"loss": 3.4716,
"step": 102000
},
{
"epoch": 9.25,
"learning_rate": 3.71925e-05,
"loss": 3.4695,
"step": 102500
},
{
"epoch": 9.29,
"learning_rate": 3.7130000000000005e-05,
"loss": 3.474,
"step": 103000
},
{
"epoch": 9.34,
"learning_rate": 3.70675e-05,
"loss": 3.4701,
"step": 103500
},
{
"epoch": 9.38,
"learning_rate": 3.7005e-05,
"loss": 3.4665,
"step": 104000
},
{
"epoch": 9.43,
"learning_rate": 3.69425e-05,
"loss": 3.468,
"step": 104500
},
{
"epoch": 9.47,
"learning_rate": 3.6880000000000006e-05,
"loss": 3.4654,
"step": 105000
},
{
"epoch": 9.52,
"learning_rate": 3.68175e-05,
"loss": 3.4674,
"step": 105500
},
{
"epoch": 9.56,
"learning_rate": 3.6755125e-05,
"loss": 3.4652,
"step": 106000
},
{
"epoch": 9.61,
"learning_rate": 3.6692625000000003e-05,
"loss": 3.4673,
"step": 106500
},
{
"epoch": 9.65,
"learning_rate": 3.6630125e-05,
"loss": 3.464,
"step": 107000
},
{
"epoch": 9.7,
"learning_rate": 3.6567625e-05,
"loss": 3.4656,
"step": 107500
},
{
"epoch": 9.74,
"learning_rate": 3.6505125e-05,
"loss": 3.4704,
"step": 108000
},
{
"epoch": 9.79,
"learning_rate": 3.6442750000000006e-05,
"loss": 3.4671,
"step": 108500
},
{
"epoch": 9.83,
"learning_rate": 3.638025e-05,
"loss": 3.462,
"step": 109000
},
{
"epoch": 9.88,
"learning_rate": 3.631775e-05,
"loss": 3.4675,
"step": 109500
},
{
"epoch": 9.92,
"learning_rate": 3.625525e-05,
"loss": 3.4658,
"step": 110000
},
{
"epoch": 9.97,
"learning_rate": 3.6192875e-05,
"loss": 3.4662,
"step": 110500
},
{
"epoch": 10.01,
"learning_rate": 3.6130375e-05,
"loss": 3.462,
"step": 111000
},
{
"epoch": 10.06,
"learning_rate": 3.6068e-05,
"loss": 3.4601,
"step": 111500
},
{
"epoch": 10.1,
"learning_rate": 3.6005500000000006e-05,
"loss": 3.4581,
"step": 112000
},
{
"epoch": 10.15,
"learning_rate": 3.5943000000000005e-05,
"loss": 3.4608,
"step": 112500
},
{
"epoch": 10.19,
"learning_rate": 3.58805e-05,
"loss": 3.4583,
"step": 113000
},
{
"epoch": 10.24,
"learning_rate": 3.5818e-05,
"loss": 3.4628,
"step": 113500
},
{
"epoch": 10.28,
"learning_rate": 3.5755625e-05,
"loss": 3.4592,
"step": 114000
},
{
"epoch": 10.33,
"learning_rate": 3.5693125e-05,
"loss": 3.4598,
"step": 114500
},
{
"epoch": 10.37,
"learning_rate": 3.5630625e-05,
"loss": 3.4596,
"step": 115000
},
{
"epoch": 10.42,
"learning_rate": 3.5568125000000004e-05,
"loss": 3.4609,
"step": 115500
},
{
"epoch": 10.46,
"learning_rate": 3.5505625e-05,
"loss": 3.4563,
"step": 116000
},
{
"epoch": 10.51,
"learning_rate": 3.5443125e-05,
"loss": 3.4563,
"step": 116500
},
{
"epoch": 10.55,
"learning_rate": 3.5380625e-05,
"loss": 3.4557,
"step": 117000
},
{
"epoch": 10.6,
"learning_rate": 3.5318125000000005e-05,
"loss": 3.458,
"step": 117500
},
{
"epoch": 10.64,
"learning_rate": 3.5255750000000005e-05,
"loss": 3.4586,
"step": 118000
},
{
"epoch": 10.69,
"learning_rate": 3.519325e-05,
"loss": 3.4586,
"step": 118500
},
{
"epoch": 10.73,
"learning_rate": 3.513075e-05,
"loss": 3.4584,
"step": 119000
},
{
"epoch": 10.78,
"learning_rate": 3.506825e-05,
"loss": 3.4515,
"step": 119500
},
{
"epoch": 10.82,
"learning_rate": 3.5005875e-05,
"loss": 3.4551,
"step": 120000
},
{
"epoch": 10.87,
"learning_rate": 3.4943375e-05,
"loss": 3.4565,
"step": 120500
},
{
"epoch": 10.91,
"learning_rate": 3.4880875000000005e-05,
"loss": 3.4596,
"step": 121000
},
{
"epoch": 10.96,
"learning_rate": 3.4818375000000004e-05,
"loss": 3.4562,
"step": 121500
},
{
"epoch": 11.0,
"learning_rate": 3.4756e-05,
"loss": 3.4551,
"step": 122000
},
{
"epoch": 11.05,
"learning_rate": 3.4693625000000004e-05,
"loss": 3.4466,
"step": 122500
},
{
"epoch": 11.09,
"learning_rate": 3.4631125e-05,
"loss": 3.4494,
"step": 123000
},
{
"epoch": 11.14,
"learning_rate": 3.4568625e-05,
"loss": 3.4485,
"step": 123500
},
{
"epoch": 11.18,
"learning_rate": 3.4506125e-05,
"loss": 3.4511,
"step": 124000
},
{
"epoch": 11.23,
"learning_rate": 3.444375e-05,
"loss": 3.4521,
"step": 124500
},
{
"epoch": 11.27,
"learning_rate": 3.4381250000000006e-05,
"loss": 3.4494,
"step": 125000
},
{
"epoch": 11.32,
"learning_rate": 3.431875e-05,
"loss": 3.4515,
"step": 125500
},
{
"epoch": 11.36,
"learning_rate": 3.425625e-05,
"loss": 3.4499,
"step": 126000
},
{
"epoch": 11.41,
"learning_rate": 3.419375e-05,
"loss": 3.4532,
"step": 126500
},
{
"epoch": 11.45,
"learning_rate": 3.4131250000000006e-05,
"loss": 3.4527,
"step": 127000
},
{
"epoch": 11.5,
"learning_rate": 3.406875e-05,
"loss": 3.4505,
"step": 127500
},
{
"epoch": 11.55,
"learning_rate": 3.400625e-05,
"loss": 3.4496,
"step": 128000
},
{
"epoch": 11.59,
"learning_rate": 3.3943875000000004e-05,
"loss": 3.4526,
"step": 128500
},
{
"epoch": 11.64,
"learning_rate": 3.3881375e-05,
"loss": 3.4503,
"step": 129000
},
{
"epoch": 11.68,
"learning_rate": 3.3818875e-05,
"loss": 3.4498,
"step": 129500
},
{
"epoch": 11.73,
"learning_rate": 3.3756375e-05,
"loss": 3.4492,
"step": 130000
},
{
"epoch": 11.77,
"learning_rate": 3.3694000000000006e-05,
"loss": 3.4471,
"step": 130500
},
{
"epoch": 11.82,
"learning_rate": 3.36315e-05,
"loss": 3.4504,
"step": 131000
},
{
"epoch": 11.86,
"learning_rate": 3.3569e-05,
"loss": 3.449,
"step": 131500
},
{
"epoch": 11.91,
"learning_rate": 3.35065e-05,
"loss": 3.4478,
"step": 132000
},
{
"epoch": 11.95,
"learning_rate": 3.3444125e-05,
"loss": 3.4483,
"step": 132500
},
{
"epoch": 12.0,
"learning_rate": 3.3381625e-05,
"loss": 3.4496,
"step": 133000
},
{
"epoch": 12.04,
"learning_rate": 3.3319125e-05,
"loss": 3.4443,
"step": 133500
},
{
"epoch": 12.09,
"learning_rate": 3.3256625000000004e-05,
"loss": 3.4451,
"step": 134000
},
{
"epoch": 12.13,
"learning_rate": 3.3194125e-05,
"loss": 3.4406,
"step": 134500
},
{
"epoch": 12.18,
"learning_rate": 3.3131750000000003e-05,
"loss": 3.4448,
"step": 135000
},
{
"epoch": 12.22,
"learning_rate": 3.306925e-05,
"loss": 3.4466,
"step": 135500
},
{
"epoch": 12.27,
"learning_rate": 3.300675e-05,
"loss": 3.4472,
"step": 136000
},
{
"epoch": 12.31,
"learning_rate": 3.2944375e-05,
"loss": 3.4445,
"step": 136500
},
{
"epoch": 12.36,
"learning_rate": 3.2881875e-05,
"loss": 3.4465,
"step": 137000
},
{
"epoch": 12.4,
"learning_rate": 3.2819375000000005e-05,
"loss": 3.445,
"step": 137500
},
{
"epoch": 12.45,
"learning_rate": 3.2756875e-05,
"loss": 3.4443,
"step": 138000
},
{
"epoch": 12.49,
"learning_rate": 3.2694375e-05,
"loss": 3.4402,
"step": 138500
},
{
"epoch": 12.54,
"learning_rate": 3.2631875e-05,
"loss": 3.4419,
"step": 139000
},
{
"epoch": 12.58,
"learning_rate": 3.2569375000000005e-05,
"loss": 3.4387,
"step": 139500
},
{
"epoch": 12.63,
"learning_rate": 3.2506875000000004e-05,
"loss": 3.4432,
"step": 140000
},
{
"epoch": 12.67,
"learning_rate": 3.24445e-05,
"loss": 3.4392,
"step": 140500
},
{
"epoch": 12.72,
"learning_rate": 3.2382e-05,
"loss": 3.442,
"step": 141000
},
{
"epoch": 12.76,
"learning_rate": 3.23195e-05,
"loss": 3.4443,
"step": 141500
},
{
"epoch": 12.81,
"learning_rate": 3.2257000000000006e-05,
"loss": 3.4405,
"step": 142000
},
{
"epoch": 12.85,
"learning_rate": 3.21945e-05,
"loss": 3.4427,
"step": 142500
},
{
"epoch": 12.9,
"learning_rate": 3.2132e-05,
"loss": 3.4398,
"step": 143000
},
{
"epoch": 12.94,
"learning_rate": 3.20695e-05,
"loss": 3.4392,
"step": 143500
},
{
"epoch": 12.99,
"learning_rate": 3.2007e-05,
"loss": 3.4396,
"step": 144000
},
{
"epoch": 13.03,
"learning_rate": 3.1944625e-05,
"loss": 3.4359,
"step": 144500
},
{
"epoch": 13.08,
"learning_rate": 3.1882125e-05,
"loss": 3.4373,
"step": 145000
},
{
"epoch": 13.12,
"learning_rate": 3.1819625000000005e-05,
"loss": 3.4352,
"step": 145500
},
{
"epoch": 13.17,
"learning_rate": 3.1757125e-05,
"loss": 3.4361,
"step": 146000
},
{
"epoch": 13.21,
"learning_rate": 3.1694625e-05,
"loss": 3.4386,
"step": 146500
},
{
"epoch": 13.26,
"learning_rate": 3.163225e-05,
"loss": 3.4412,
"step": 147000
},
{
"epoch": 13.3,
"learning_rate": 3.156975e-05,
"loss": 3.4356,
"step": 147500
},
{
"epoch": 13.35,
"learning_rate": 3.150725e-05,
"loss": 3.4377,
"step": 148000
},
{
"epoch": 13.39,
"learning_rate": 3.144475e-05,
"loss": 3.4351,
"step": 148500
},
{
"epoch": 13.44,
"learning_rate": 3.1382375000000005e-05,
"loss": 3.4402,
"step": 149000
},
{
"epoch": 13.48,
"learning_rate": 3.1319875e-05,
"loss": 3.4362,
"step": 149500
},
{
"epoch": 13.53,
"learning_rate": 3.1257375e-05,
"loss": 3.4402,
"step": 150000
},
{
"epoch": 13.57,
"learning_rate": 3.1194875e-05,
"loss": 3.4366,
"step": 150500
},
{
"epoch": 13.62,
"learning_rate": 3.11325e-05,
"loss": 3.4356,
"step": 151000
},
{
"epoch": 13.66,
"learning_rate": 3.107e-05,
"loss": 3.4332,
"step": 151500
},
{
"epoch": 13.71,
"learning_rate": 3.10075e-05,
"loss": 3.4343,
"step": 152000
},
{
"epoch": 13.75,
"learning_rate": 3.0945e-05,
"loss": 3.4366,
"step": 152500
},
{
"epoch": 13.8,
"learning_rate": 3.08825e-05,
"loss": 3.436,
"step": 153000
},
{
"epoch": 13.85,
"learning_rate": 3.0820125e-05,
"loss": 3.4278,
"step": 153500
},
{
"epoch": 13.89,
"learning_rate": 3.0757625e-05,
"loss": 3.4351,
"step": 154000
},
{
"epoch": 13.94,
"learning_rate": 3.0695125000000005e-05,
"loss": 3.4341,
"step": 154500
},
{
"epoch": 13.98,
"learning_rate": 3.0632625000000004e-05,
"loss": 3.4362,
"step": 155000
},
{
"epoch": 14.03,
"learning_rate": 3.057025e-05,
"loss": 3.4335,
"step": 155500
},
{
"epoch": 14.07,
"learning_rate": 3.0507750000000003e-05,
"loss": 3.4289,
"step": 156000
},
{
"epoch": 14.12,
"learning_rate": 3.044525e-05,
"loss": 3.4283,
"step": 156500
},
{
"epoch": 14.16,
"learning_rate": 3.0382875e-05,
"loss": 3.4298,
"step": 157000
},
{
"epoch": 14.21,
"learning_rate": 3.0320375e-05,
"loss": 3.4257,
"step": 157500
},
{
"epoch": 14.25,
"learning_rate": 3.0257875000000002e-05,
"loss": 3.4315,
"step": 158000
},
{
"epoch": 14.3,
"learning_rate": 3.0195375000000004e-05,
"loss": 3.4319,
"step": 158500
},
{
"epoch": 14.34,
"learning_rate": 3.0132875e-05,
"loss": 3.4311,
"step": 159000
},
{
"epoch": 14.39,
"learning_rate": 3.0070375e-05,
"loss": 3.4312,
"step": 159500
},
{
"epoch": 14.43,
"learning_rate": 3.0007875000000003e-05,
"loss": 3.4293,
"step": 160000
},
{
"epoch": 14.48,
"learning_rate": 2.9945375000000005e-05,
"loss": 3.4291,
"step": 160500
},
{
"epoch": 14.52,
"learning_rate": 2.9882875e-05,
"loss": 3.4323,
"step": 161000
},
{
"epoch": 14.57,
"learning_rate": 2.98205e-05,
"loss": 3.4282,
"step": 161500
},
{
"epoch": 14.61,
"learning_rate": 2.9758000000000002e-05,
"loss": 3.4317,
"step": 162000
},
{
"epoch": 14.66,
"learning_rate": 2.9695500000000004e-05,
"loss": 3.4295,
"step": 162500
},
{
"epoch": 14.7,
"learning_rate": 2.9633e-05,
"loss": 3.429,
"step": 163000
},
{
"epoch": 14.75,
"learning_rate": 2.9570625000000003e-05,
"loss": 3.4297,
"step": 163500
},
{
"epoch": 14.79,
"learning_rate": 2.9508125000000005e-05,
"loss": 3.4336,
"step": 164000
},
{
"epoch": 14.84,
"learning_rate": 2.9445625e-05,
"loss": 3.4315,
"step": 164500
},
{
"epoch": 14.88,
"learning_rate": 2.9383125e-05,
"loss": 3.4266,
"step": 165000
},
{
"epoch": 14.93,
"learning_rate": 2.9320750000000002e-05,
"loss": 3.4312,
"step": 165500
},
{
"epoch": 14.97,
"learning_rate": 2.9258250000000004e-05,
"loss": 3.4297,
"step": 166000
},
{
"epoch": 15.02,
"learning_rate": 2.919575e-05,
"loss": 3.4267,
"step": 166500
},
{
"epoch": 15.06,
"learning_rate": 2.913325e-05,
"loss": 3.4227,
"step": 167000
},
{
"epoch": 15.11,
"learning_rate": 2.9070750000000003e-05,
"loss": 3.4243,
"step": 167500
},
{
"epoch": 15.15,
"learning_rate": 2.9008375e-05,
"loss": 3.4238,
"step": 168000
},
{
"epoch": 15.2,
"learning_rate": 2.8945875000000002e-05,
"loss": 3.4273,
"step": 168500
},
{
"epoch": 15.24,
"learning_rate": 2.8883375e-05,
"loss": 3.4242,
"step": 169000
},
{
"epoch": 15.29,
"learning_rate": 2.8820875000000002e-05,
"loss": 3.4284,
"step": 169500
},
{
"epoch": 15.33,
"learning_rate": 2.87585e-05,
"loss": 3.4299,
"step": 170000
},
{
"epoch": 15.38,
"learning_rate": 2.8696e-05,
"loss": 3.4234,
"step": 170500
},
{
"epoch": 15.42,
"learning_rate": 2.8633500000000003e-05,
"loss": 3.4272,
"step": 171000
},
{
"epoch": 15.47,
"learning_rate": 2.8571000000000005e-05,
"loss": 3.4253,
"step": 171500
},
{
"epoch": 15.51,
"learning_rate": 2.85085e-05,
"loss": 3.4262,
"step": 172000
},
{
"epoch": 15.56,
"learning_rate": 2.8446000000000002e-05,
"loss": 3.4278,
"step": 172500
},
{
"epoch": 15.6,
"learning_rate": 2.83835e-05,
"loss": 3.4224,
"step": 173000
},
{
"epoch": 15.65,
"learning_rate": 2.8321000000000002e-05,
"loss": 3.4207,
"step": 173500
},
{
"epoch": 15.69,
"learning_rate": 2.8258625e-05,
"loss": 3.4205,
"step": 174000
},
{
"epoch": 15.74,
"learning_rate": 2.8196125e-05,
"loss": 3.4271,
"step": 174500
},
{
"epoch": 15.78,
"learning_rate": 2.8133625000000003e-05,
"loss": 3.4237,
"step": 175000
},
{
"epoch": 15.83,
"learning_rate": 2.8071125000000005e-05,
"loss": 3.4261,
"step": 175500
},
{
"epoch": 15.87,
"learning_rate": 2.8008750000000002e-05,
"loss": 3.4213,
"step": 176000
},
{
"epoch": 15.92,
"learning_rate": 2.794625e-05,
"loss": 3.4281,
"step": 176500
},
{
"epoch": 15.96,
"learning_rate": 2.7883750000000002e-05,
"loss": 3.4257,
"step": 177000
},
{
"epoch": 16.01,
"learning_rate": 2.7821249999999997e-05,
"loss": 3.4206,
"step": 177500
},
{
"epoch": 16.05,
"learning_rate": 2.7758875e-05,
"loss": 3.4193,
"step": 178000
},
{
"epoch": 16.1,
"learning_rate": 2.7696375000000003e-05,
"loss": 3.4245,
"step": 178500
},
{
"epoch": 16.15,
"learning_rate": 2.7633875000000005e-05,
"loss": 3.4176,
"step": 179000
},
{
"epoch": 16.19,
"learning_rate": 2.7571375e-05,
"loss": 3.4192,
"step": 179500
},
{
"epoch": 16.24,
"learning_rate": 2.7508875000000002e-05,
"loss": 3.4212,
"step": 180000
},
{
"epoch": 16.28,
"learning_rate": 2.7446500000000002e-05,
"loss": 3.4189,
"step": 180500
},
{
"epoch": 16.33,
"learning_rate": 2.7383999999999997e-05,
"loss": 3.418,
"step": 181000
},
{
"epoch": 16.37,
"learning_rate": 2.73215e-05,
"loss": 3.4228,
"step": 181500
},
{
"epoch": 16.42,
"learning_rate": 2.7259e-05,
"loss": 3.4215,
"step": 182000
},
{
"epoch": 16.46,
"learning_rate": 2.7196625000000005e-05,
"loss": 3.4161,
"step": 182500
},
{
"epoch": 16.51,
"learning_rate": 2.7134125e-05,
"loss": 3.4222,
"step": 183000
},
{
"epoch": 16.55,
"learning_rate": 2.7071625000000002e-05,
"loss": 3.4198,
"step": 183500
},
{
"epoch": 16.6,
"learning_rate": 2.7009125e-05,
"loss": 3.4212,
"step": 184000
},
{
"epoch": 16.64,
"learning_rate": 2.6946750000000004e-05,
"loss": 3.4148,
"step": 184500
},
{
"epoch": 16.69,
"learning_rate": 2.6884375e-05,
"loss": 3.4235,
"step": 185000
},
{
"epoch": 16.73,
"learning_rate": 2.6821875000000003e-05,
"loss": 3.4216,
"step": 185500
},
{
"epoch": 16.78,
"learning_rate": 2.6759375e-05,
"loss": 3.4211,
"step": 186000
},
{
"epoch": 16.82,
"learning_rate": 2.6696875e-05,
"loss": 3.4161,
"step": 186500
},
{
"epoch": 16.87,
"learning_rate": 2.66345e-05,
"loss": 3.4201,
"step": 187000
},
{
"epoch": 16.91,
"learning_rate": 2.6572000000000002e-05,
"loss": 3.4175,
"step": 187500
},
{
"epoch": 16.96,
"learning_rate": 2.6509500000000004e-05,
"loss": 3.421,
"step": 188000
},
{
"epoch": 17.0,
"learning_rate": 2.6447e-05,
"loss": 3.4195,
"step": 188500
},
{
"epoch": 17.05,
"learning_rate": 2.63845e-05,
"loss": 3.4126,
"step": 189000
},
{
"epoch": 17.09,
"learning_rate": 2.6322000000000003e-05,
"loss": 3.4153,
"step": 189500
},
{
"epoch": 17.14,
"learning_rate": 2.6259500000000005e-05,
"loss": 3.4149,
"step": 190000
},
{
"epoch": 17.18,
"learning_rate": 2.6197e-05,
"loss": 3.4204,
"step": 190500
},
{
"epoch": 17.23,
"learning_rate": 2.6134625e-05,
"loss": 3.412,
"step": 191000
},
{
"epoch": 17.27,
"learning_rate": 2.6072125000000002e-05,
"loss": 3.4135,
"step": 191500
},
{
"epoch": 17.32,
"learning_rate": 2.6009624999999997e-05,
"loss": 3.4191,
"step": 192000
},
{
"epoch": 17.36,
"learning_rate": 2.5947125e-05,
"loss": 3.4165,
"step": 192500
},
{
"epoch": 17.41,
"learning_rate": 2.5884750000000003e-05,
"loss": 3.4179,
"step": 193000
},
{
"epoch": 17.45,
"learning_rate": 2.5822250000000005e-05,
"loss": 3.4153,
"step": 193500
},
{
"epoch": 17.5,
"learning_rate": 2.575975e-05,
"loss": 3.4167,
"step": 194000
},
{
"epoch": 17.54,
"learning_rate": 2.5697250000000002e-05,
"loss": 3.4153,
"step": 194500
},
{
"epoch": 17.59,
"learning_rate": 2.5634875000000002e-05,
"loss": 3.4107,
"step": 195000
},
{
"epoch": 17.63,
"learning_rate": 2.5572375000000004e-05,
"loss": 3.4158,
"step": 195500
},
{
"epoch": 17.68,
"learning_rate": 2.5509875e-05,
"loss": 3.4149,
"step": 196000
},
{
"epoch": 17.72,
"learning_rate": 2.5447500000000003e-05,
"loss": 3.4192,
"step": 196500
},
{
"epoch": 17.77,
"learning_rate": 2.5385000000000002e-05,
"loss": 3.4126,
"step": 197000
},
{
"epoch": 17.81,
"learning_rate": 2.53225e-05,
"loss": 3.4128,
"step": 197500
},
{
"epoch": 17.86,
"learning_rate": 2.526e-05,
"loss": 3.4174,
"step": 198000
},
{
"epoch": 17.9,
"learning_rate": 2.51975e-05,
"loss": 3.4133,
"step": 198500
},
{
"epoch": 17.95,
"learning_rate": 2.5135125000000004e-05,
"loss": 3.4146,
"step": 199000
},
{
"epoch": 17.99,
"learning_rate": 2.5072625e-05,
"loss": 3.4183,
"step": 199500
},
{
"epoch": 18.04,
"learning_rate": 2.5010125e-05,
"loss": 3.4109,
"step": 200000
},
{
"epoch": 18.08,
"learning_rate": 2.4947625000000003e-05,
"loss": 3.4156,
"step": 200500
},
{
"epoch": 18.13,
"learning_rate": 2.488525e-05,
"loss": 3.4113,
"step": 201000
},
{
"epoch": 18.17,
"learning_rate": 2.482275e-05,
"loss": 3.4134,
"step": 201500
},
{
"epoch": 18.22,
"learning_rate": 2.476025e-05,
"loss": 3.4117,
"step": 202000
},
{
"epoch": 18.26,
"learning_rate": 2.469775e-05,
"loss": 3.4121,
"step": 202500
},
{
"epoch": 18.31,
"learning_rate": 2.4635375000000003e-05,
"loss": 3.413,
"step": 203000
},
{
"epoch": 18.35,
"learning_rate": 2.4572875e-05,
"loss": 3.412,
"step": 203500
},
{
"epoch": 18.4,
"learning_rate": 2.4510375000000003e-05,
"loss": 3.4081,
"step": 204000
},
{
"epoch": 18.45,
"learning_rate": 2.4447875000000002e-05,
"loss": 3.4136,
"step": 204500
},
{
"epoch": 18.49,
"learning_rate": 2.4385375e-05,
"loss": 3.4101,
"step": 205000
},
{
"epoch": 18.54,
"learning_rate": 2.4322875000000002e-05,
"loss": 3.4107,
"step": 205500
},
{
"epoch": 18.58,
"learning_rate": 2.4260375e-05,
"loss": 3.4144,
"step": 206000
},
{
"epoch": 18.63,
"learning_rate": 2.4197875e-05,
"loss": 3.41,
"step": 206500
},
{
"epoch": 18.67,
"learning_rate": 2.4135375e-05,
"loss": 3.4122,
"step": 207000
},
{
"epoch": 18.72,
"learning_rate": 2.4073e-05,
"loss": 3.4093,
"step": 207500
},
{
"epoch": 18.76,
"learning_rate": 2.4010625000000002e-05,
"loss": 3.4078,
"step": 208000
},
{
"epoch": 18.81,
"learning_rate": 2.3948125e-05,
"loss": 3.4094,
"step": 208500
},
{
"epoch": 18.85,
"learning_rate": 2.3885625e-05,
"loss": 3.4083,
"step": 209000
},
{
"epoch": 18.9,
"learning_rate": 2.3823125e-05,
"loss": 3.4111,
"step": 209500
},
{
"epoch": 18.94,
"learning_rate": 2.3760625e-05,
"loss": 3.4105,
"step": 210000
},
{
"epoch": 18.99,
"learning_rate": 2.3698125e-05,
"loss": 3.4112,
"step": 210500
},
{
"epoch": 19.03,
"learning_rate": 2.3635625e-05,
"loss": 3.4081,
"step": 211000
},
{
"epoch": 19.08,
"learning_rate": 2.3573125e-05,
"loss": 3.4082,
"step": 211500
},
{
"epoch": 19.12,
"learning_rate": 2.3510750000000002e-05,
"loss": 3.4088,
"step": 212000
},
{
"epoch": 19.17,
"learning_rate": 2.3448250000000004e-05,
"loss": 3.4083,
"step": 212500
},
{
"epoch": 19.21,
"learning_rate": 2.3385875e-05,
"loss": 3.4091,
"step": 213000
},
{
"epoch": 19.26,
"learning_rate": 2.3323375e-05,
"loss": 3.4074,
"step": 213500
},
{
"epoch": 19.3,
"learning_rate": 2.3260875e-05,
"loss": 3.4093,
"step": 214000
},
{
"epoch": 19.35,
"learning_rate": 2.3198375e-05,
"loss": 3.4044,
"step": 214500
},
{
"epoch": 19.39,
"learning_rate": 2.3135875e-05,
"loss": 3.4078,
"step": 215000
},
{
"epoch": 19.44,
"learning_rate": 2.3073375e-05,
"loss": 3.4072,
"step": 215500
},
{
"epoch": 19.48,
"learning_rate": 2.3011e-05,
"loss": 3.4091,
"step": 216000
},
{
"epoch": 19.53,
"learning_rate": 2.29485e-05,
"loss": 3.4021,
"step": 216500
},
{
"epoch": 19.57,
"learning_rate": 2.2886e-05,
"loss": 3.4095,
"step": 217000
},
{
"epoch": 19.62,
"learning_rate": 2.28235e-05,
"loss": 3.4028,
"step": 217500
},
{
"epoch": 19.66,
"learning_rate": 2.2761e-05,
"loss": 3.407,
"step": 218000
},
{
"epoch": 19.71,
"learning_rate": 2.26985e-05,
"loss": 3.4132,
"step": 218500
},
{
"epoch": 19.75,
"learning_rate": 2.2636e-05,
"loss": 3.4077,
"step": 219000
},
{
"epoch": 19.8,
"learning_rate": 2.25735e-05,
"loss": 3.4138,
"step": 219500
},
{
"epoch": 19.84,
"learning_rate": 2.2511125000000004e-05,
"loss": 3.4038,
"step": 220000
},
{
"epoch": 19.89,
"learning_rate": 2.244875e-05,
"loss": 3.4052,
"step": 220500
},
{
"epoch": 19.93,
"learning_rate": 2.238625e-05,
"loss": 3.4074,
"step": 221000
},
{
"epoch": 19.98,
"learning_rate": 2.232375e-05,
"loss": 3.4074,
"step": 221500
},
{
"epoch": 20.02,
"learning_rate": 2.226125e-05,
"loss": 3.404,
"step": 222000
},
{
"epoch": 20.07,
"learning_rate": 2.219875e-05,
"loss": 3.4,
"step": 222500
},
{
"epoch": 20.11,
"learning_rate": 2.213625e-05,
"loss": 3.4001,
"step": 223000
},
{
"epoch": 20.16,
"learning_rate": 2.2073750000000002e-05,
"loss": 3.3976,
"step": 223500
},
{
"epoch": 20.2,
"learning_rate": 2.2011375000000002e-05,
"loss": 3.4042,
"step": 224000
},
{
"epoch": 20.25,
"learning_rate": 2.1948875e-05,
"loss": 3.4025,
"step": 224500
},
{
"epoch": 20.29,
"learning_rate": 2.1886375e-05,
"loss": 3.4024,
"step": 225000
},
{
"epoch": 20.34,
"learning_rate": 2.1823875e-05,
"loss": 3.4071,
"step": 225500
},
{
"epoch": 20.38,
"learning_rate": 2.1761375e-05,
"loss": 3.3998,
"step": 226000
},
{
"epoch": 20.43,
"learning_rate": 2.1698875e-05,
"loss": 3.4028,
"step": 226500
},
{
"epoch": 20.47,
"learning_rate": 2.1636375e-05,
"loss": 3.4084,
"step": 227000
},
{
"epoch": 20.52,
"learning_rate": 2.1573875000000002e-05,
"loss": 3.4082,
"step": 227500
},
{
"epoch": 20.56,
"learning_rate": 2.1511500000000002e-05,
"loss": 3.4051,
"step": 228000
},
{
"epoch": 20.61,
"learning_rate": 2.1449e-05,
"loss": 3.4011,
"step": 228500
},
{
"epoch": 20.65,
"learning_rate": 2.1386625e-05,
"loss": 3.4041,
"step": 229000
},
{
"epoch": 20.7,
"learning_rate": 2.1324125e-05,
"loss": 3.4057,
"step": 229500
},
{
"epoch": 20.75,
"learning_rate": 2.1261625e-05,
"loss": 3.4035,
"step": 230000
},
{
"epoch": 20.79,
"learning_rate": 2.1199125e-05,
"loss": 3.404,
"step": 230500
},
{
"epoch": 20.84,
"learning_rate": 2.1136625000000002e-05,
"loss": 3.4056,
"step": 231000
},
{
"epoch": 20.88,
"learning_rate": 2.1074125e-05,
"loss": 3.4059,
"step": 231500
},
{
"epoch": 20.93,
"learning_rate": 2.101175e-05,
"loss": 3.4038,
"step": 232000
},
{
"epoch": 20.97,
"learning_rate": 2.094925e-05,
"loss": 3.4031,
"step": 232500
},
{
"epoch": 21.02,
"learning_rate": 2.088675e-05,
"loss": 3.4027,
"step": 233000
},
{
"epoch": 21.06,
"learning_rate": 2.082425e-05,
"loss": 3.3979,
"step": 233500
},
{
"epoch": 21.11,
"learning_rate": 2.076175e-05,
"loss": 3.4026,
"step": 234000
},
{
"epoch": 21.15,
"learning_rate": 2.069925e-05,
"loss": 3.4007,
"step": 234500
},
{
"epoch": 21.2,
"learning_rate": 2.0636750000000002e-05,
"loss": 3.3966,
"step": 235000
},
{
"epoch": 21.24,
"learning_rate": 2.057425e-05,
"loss": 3.3991,
"step": 235500
},
{
"epoch": 21.29,
"learning_rate": 2.0511875e-05,
"loss": 3.3993,
"step": 236000
},
{
"epoch": 21.33,
"learning_rate": 2.0449375000000003e-05,
"loss": 3.4017,
"step": 236500
},
{
"epoch": 21.38,
"learning_rate": 2.0386875e-05,
"loss": 3.3995,
"step": 237000
},
{
"epoch": 21.42,
"learning_rate": 2.0324375e-05,
"loss": 3.4066,
"step": 237500
},
{
"epoch": 21.47,
"learning_rate": 2.0262e-05,
"loss": 3.3995,
"step": 238000
},
{
"epoch": 21.51,
"learning_rate": 2.0199500000000002e-05,
"loss": 3.4038,
"step": 238500
},
{
"epoch": 21.56,
"learning_rate": 2.0137e-05,
"loss": 3.3997,
"step": 239000
},
{
"epoch": 21.6,
"learning_rate": 2.0074500000000002e-05,
"loss": 3.3984,
"step": 239500
},
{
"epoch": 21.65,
"learning_rate": 2.0012125e-05,
"loss": 3.4012,
"step": 240000
},
{
"epoch": 21.69,
"learning_rate": 1.9949625e-05,
"loss": 3.3983,
"step": 240500
},
{
"epoch": 21.74,
"learning_rate": 1.9887125e-05,
"loss": 3.4043,
"step": 241000
},
{
"epoch": 21.78,
"learning_rate": 1.9824625e-05,
"loss": 3.3983,
"step": 241500
},
{
"epoch": 21.83,
"learning_rate": 1.9762250000000002e-05,
"loss": 3.4018,
"step": 242000
},
{
"epoch": 21.87,
"learning_rate": 1.969975e-05,
"loss": 3.4004,
"step": 242500
},
{
"epoch": 21.92,
"learning_rate": 1.9637250000000002e-05,
"loss": 3.4026,
"step": 243000
},
{
"epoch": 21.96,
"learning_rate": 1.9574875e-05,
"loss": 3.4022,
"step": 243500
},
{
"epoch": 22.01,
"learning_rate": 1.9512375e-05,
"loss": 3.3985,
"step": 244000
},
{
"epoch": 22.05,
"learning_rate": 1.9449875e-05,
"loss": 3.3935,
"step": 244500
},
{
"epoch": 22.1,
"learning_rate": 1.9387375000000002e-05,
"loss": 3.4019,
"step": 245000
},
{
"epoch": 22.14,
"learning_rate": 1.9324875e-05,
"loss": 3.396,
"step": 245500
},
{
"epoch": 22.19,
"learning_rate": 1.9262375000000002e-05,
"loss": 3.3981,
"step": 246000
},
{
"epoch": 22.23,
"learning_rate": 1.9200125e-05,
"loss": 3.3935,
"step": 246500
},
{
"epoch": 22.28,
"learning_rate": 1.9137625e-05,
"loss": 3.396,
"step": 247000
},
{
"epoch": 22.32,
"learning_rate": 1.9075125e-05,
"loss": 3.3978,
"step": 247500
},
{
"epoch": 22.37,
"learning_rate": 1.9012625e-05,
"loss": 3.3973,
"step": 248000
},
{
"epoch": 22.41,
"learning_rate": 1.8950125000000002e-05,
"loss": 3.3974,
"step": 248500
},
{
"epoch": 22.46,
"learning_rate": 1.8887625e-05,
"loss": 3.3993,
"step": 249000
},
{
"epoch": 22.5,
"learning_rate": 1.882525e-05,
"loss": 3.4011,
"step": 249500
},
{
"epoch": 22.55,
"learning_rate": 1.8762750000000003e-05,
"loss": 3.3976,
"step": 250000
},
{
"epoch": 22.59,
"learning_rate": 1.870025e-05,
"loss": 3.3981,
"step": 250500
},
{
"epoch": 22.64,
"learning_rate": 1.863775e-05,
"loss": 3.4044,
"step": 251000
},
{
"epoch": 22.68,
"learning_rate": 1.8575249999999998e-05,
"loss": 3.3969,
"step": 251500
},
{
"epoch": 22.73,
"learning_rate": 1.851275e-05,
"loss": 3.3933,
"step": 252000
},
{
"epoch": 22.77,
"learning_rate": 1.8450250000000002e-05,
"loss": 3.3981,
"step": 252500
},
{
"epoch": 22.82,
"learning_rate": 1.838775e-05,
"loss": 3.3936,
"step": 253000
},
{
"epoch": 22.86,
"learning_rate": 1.8325250000000002e-05,
"loss": 3.3964,
"step": 253500
},
{
"epoch": 22.91,
"learning_rate": 1.826275e-05,
"loss": 3.397,
"step": 254000
},
{
"epoch": 22.95,
"learning_rate": 1.8200250000000002e-05,
"loss": 3.3963,
"step": 254500
},
{
"epoch": 23.0,
"learning_rate": 1.813775e-05,
"loss": 3.3977,
"step": 255000
},
{
"epoch": 23.05,
"learning_rate": 1.8075375e-05,
"loss": 3.3935,
"step": 255500
},
{
"epoch": 23.09,
"learning_rate": 1.8013000000000002e-05,
"loss": 3.3964,
"step": 256000
},
{
"epoch": 23.14,
"learning_rate": 1.79505e-05,
"loss": 3.401,
"step": 256500
},
{
"epoch": 23.18,
"learning_rate": 1.7888125e-05,
"loss": 3.3938,
"step": 257000
},
{
"epoch": 23.23,
"learning_rate": 1.7825625000000003e-05,
"loss": 3.3931,
"step": 257500
},
{
"epoch": 23.27,
"learning_rate": 1.7763125e-05,
"loss": 3.3949,
"step": 258000
},
{
"epoch": 23.32,
"learning_rate": 1.7700625e-05,
"loss": 3.396,
"step": 258500
},
{
"epoch": 23.36,
"learning_rate": 1.7638124999999998e-05,
"loss": 3.3942,
"step": 259000
},
{
"epoch": 23.41,
"learning_rate": 1.7575625e-05,
"loss": 3.3946,
"step": 259500
},
{
"epoch": 23.45,
"learning_rate": 1.7513125e-05,
"loss": 3.3992,
"step": 260000
},
{
"epoch": 23.5,
"learning_rate": 1.7450625e-05,
"loss": 3.391,
"step": 260500
},
{
"epoch": 23.54,
"learning_rate": 1.7388125e-05,
"loss": 3.3936,
"step": 261000
},
{
"epoch": 23.59,
"learning_rate": 1.7325625e-05,
"loss": 3.3946,
"step": 261500
},
{
"epoch": 23.63,
"learning_rate": 1.7263125000000003e-05,
"loss": 3.3935,
"step": 262000
},
{
"epoch": 23.68,
"learning_rate": 1.7200625e-05,
"loss": 3.3954,
"step": 262500
},
{
"epoch": 23.72,
"learning_rate": 1.713825e-05,
"loss": 3.3972,
"step": 263000
},
{
"epoch": 23.77,
"learning_rate": 1.707575e-05,
"loss": 3.3912,
"step": 263500
},
{
"epoch": 23.81,
"learning_rate": 1.701325e-05,
"loss": 3.3925,
"step": 264000
},
{
"epoch": 23.86,
"learning_rate": 1.6950875000000002e-05,
"loss": 3.3921,
"step": 264500
},
{
"epoch": 23.9,
"learning_rate": 1.6888375e-05,
"loss": 3.3894,
"step": 265000
},
{
"epoch": 23.95,
"learning_rate": 1.6825875000000003e-05,
"loss": 3.3949,
"step": 265500
},
{
"epoch": 23.99,
"learning_rate": 1.6763375e-05,
"loss": 3.3925,
"step": 266000
},
{
"epoch": 24.04,
"learning_rate": 1.6700875000000003e-05,
"loss": 3.3923,
"step": 266500
},
{
"epoch": 24.08,
"learning_rate": 1.66385e-05,
"loss": 3.3892,
"step": 267000
},
{
"epoch": 24.13,
"learning_rate": 1.6576e-05,
"loss": 3.3893,
"step": 267500
},
{
"epoch": 24.17,
"learning_rate": 1.65135e-05,
"loss": 3.3872,
"step": 268000
},
{
"epoch": 24.22,
"learning_rate": 1.6451e-05,
"loss": 3.3936,
"step": 268500
},
{
"epoch": 24.26,
"learning_rate": 1.63885e-05,
"loss": 3.3879,
"step": 269000
},
{
"epoch": 24.31,
"learning_rate": 1.6326e-05,
"loss": 3.3888,
"step": 269500
},
{
"epoch": 24.35,
"learning_rate": 1.6263625000000003e-05,
"loss": 3.392,
"step": 270000
},
{
"epoch": 24.4,
"learning_rate": 1.6201125e-05,
"loss": 3.3913,
"step": 270500
},
{
"epoch": 24.44,
"learning_rate": 1.6138625e-05,
"loss": 3.3901,
"step": 271000
},
{
"epoch": 24.49,
"learning_rate": 1.6076125000000002e-05,
"loss": 3.391,
"step": 271500
},
{
"epoch": 24.53,
"learning_rate": 1.6013625e-05,
"loss": 3.393,
"step": 272000
},
{
"epoch": 24.58,
"learning_rate": 1.5951125e-05,
"loss": 3.3953,
"step": 272500
},
{
"epoch": 24.62,
"learning_rate": 1.5888625e-05,
"loss": 3.3892,
"step": 273000
},
{
"epoch": 24.67,
"learning_rate": 1.5826125e-05,
"loss": 3.3964,
"step": 273500
},
{
"epoch": 24.71,
"learning_rate": 1.5763750000000003e-05,
"loss": 3.3945,
"step": 274000
},
{
"epoch": 24.76,
"learning_rate": 1.570125e-05,
"loss": 3.3964,
"step": 274500
},
{
"epoch": 24.8,
"learning_rate": 1.5638875e-05,
"loss": 3.3937,
"step": 275000
},
{
"epoch": 24.85,
"learning_rate": 1.5576375e-05,
"loss": 3.3897,
"step": 275500
},
{
"epoch": 24.89,
"learning_rate": 1.5513875e-05,
"loss": 3.3916,
"step": 276000
},
{
"epoch": 24.94,
"learning_rate": 1.5451375e-05,
"loss": 3.3917,
"step": 276500
},
{
"epoch": 24.98,
"learning_rate": 1.5389e-05,
"loss": 3.3941,
"step": 277000
},
{
"epoch": 25.03,
"learning_rate": 1.5326500000000003e-05,
"loss": 3.3929,
"step": 277500
},
{
"epoch": 25.07,
"learning_rate": 1.5264e-05,
"loss": 3.3903,
"step": 278000
},
{
"epoch": 25.12,
"learning_rate": 1.5201500000000002e-05,
"loss": 3.3851,
"step": 278500
},
{
"epoch": 25.16,
"learning_rate": 1.5139e-05,
"loss": 3.3887,
"step": 279000
},
{
"epoch": 25.21,
"learning_rate": 1.5076500000000002e-05,
"loss": 3.3908,
"step": 279500
},
{
"epoch": 25.25,
"learning_rate": 1.5014e-05,
"loss": 3.3888,
"step": 280000
},
{
"epoch": 25.3,
"learning_rate": 1.4951625e-05,
"loss": 3.392,
"step": 280500
},
{
"epoch": 25.34,
"learning_rate": 1.4889125000000001e-05,
"loss": 3.3876,
"step": 281000
},
{
"epoch": 25.39,
"learning_rate": 1.4826625e-05,
"loss": 3.3851,
"step": 281500
},
{
"epoch": 25.44,
"learning_rate": 1.4764125000000002e-05,
"loss": 3.3887,
"step": 282000
},
{
"epoch": 25.48,
"learning_rate": 1.4701625e-05,
"loss": 3.3898,
"step": 282500
},
{
"epoch": 25.53,
"learning_rate": 1.4639125e-05,
"loss": 3.3924,
"step": 283000
},
{
"epoch": 25.57,
"learning_rate": 1.4576625000000002e-05,
"loss": 3.3869,
"step": 283500
},
{
"epoch": 25.62,
"learning_rate": 1.4514125e-05,
"loss": 3.3871,
"step": 284000
},
{
"epoch": 25.66,
"learning_rate": 1.445175e-05,
"loss": 3.3909,
"step": 284500
},
{
"epoch": 25.71,
"learning_rate": 1.4389250000000001e-05,
"loss": 3.3883,
"step": 285000
},
{
"epoch": 25.75,
"learning_rate": 1.432675e-05,
"loss": 3.3925,
"step": 285500
},
{
"epoch": 25.8,
"learning_rate": 1.4264375000000002e-05,
"loss": 3.39,
"step": 286000
},
{
"epoch": 25.84,
"learning_rate": 1.4201875e-05,
"loss": 3.3893,
"step": 286500
},
{
"epoch": 25.89,
"learning_rate": 1.4139375000000002e-05,
"loss": 3.3943,
"step": 287000
},
{
"epoch": 25.93,
"learning_rate": 1.4076875e-05,
"loss": 3.386,
"step": 287500
},
{
"epoch": 25.98,
"learning_rate": 1.40145e-05,
"loss": 3.3857,
"step": 288000
},
{
"epoch": 26.02,
"learning_rate": 1.3952000000000001e-05,
"loss": 3.385,
"step": 288500
},
{
"epoch": 26.07,
"learning_rate": 1.38895e-05,
"loss": 3.3878,
"step": 289000
},
{
"epoch": 26.11,
"learning_rate": 1.3827000000000002e-05,
"loss": 3.385,
"step": 289500
},
{
"epoch": 26.16,
"learning_rate": 1.37645e-05,
"loss": 3.385,
"step": 290000
},
{
"epoch": 26.2,
"learning_rate": 1.3702125000000002e-05,
"loss": 3.3881,
"step": 290500
},
{
"epoch": 26.25,
"learning_rate": 1.3639625e-05,
"loss": 3.3883,
"step": 291000
},
{
"epoch": 26.29,
"learning_rate": 1.3577125000000001e-05,
"loss": 3.3865,
"step": 291500
},
{
"epoch": 26.34,
"learning_rate": 1.3514625e-05,
"loss": 3.3826,
"step": 292000
},
{
"epoch": 26.38,
"learning_rate": 1.3452125000000001e-05,
"loss": 3.3918,
"step": 292500
},
{
"epoch": 26.43,
"learning_rate": 1.3389625e-05,
"loss": 3.3854,
"step": 293000
},
{
"epoch": 26.47,
"learning_rate": 1.3327125000000002e-05,
"loss": 3.3873,
"step": 293500
},
{
"epoch": 26.52,
"learning_rate": 1.326475e-05,
"loss": 3.3906,
"step": 294000
},
{
"epoch": 26.56,
"learning_rate": 1.3202249999999999e-05,
"loss": 3.3904,
"step": 294500
},
{
"epoch": 26.61,
"learning_rate": 1.313975e-05,
"loss": 3.3852,
"step": 295000
},
{
"epoch": 26.65,
"learning_rate": 1.3077250000000003e-05,
"loss": 3.3843,
"step": 295500
},
{
"epoch": 26.7,
"learning_rate": 1.3014750000000001e-05,
"loss": 3.3857,
"step": 296000
},
{
"epoch": 26.74,
"learning_rate": 1.2952250000000001e-05,
"loss": 3.388,
"step": 296500
},
{
"epoch": 26.79,
"learning_rate": 1.2889875000000002e-05,
"loss": 3.3881,
"step": 297000
},
{
"epoch": 26.83,
"learning_rate": 1.2827375e-05,
"loss": 3.3875,
"step": 297500
},
{
"epoch": 26.88,
"learning_rate": 1.2764875e-05,
"loss": 3.3834,
"step": 298000
},
{
"epoch": 26.92,
"learning_rate": 1.2702375e-05,
"loss": 3.3867,
"step": 298500
},
{
"epoch": 26.97,
"learning_rate": 1.2639875e-05,
"loss": 3.3892,
"step": 299000
},
{
"epoch": 27.01,
"learning_rate": 1.2577375e-05,
"loss": 3.3863,
"step": 299500
},
{
"epoch": 27.06,
"learning_rate": 1.2514875000000001e-05,
"loss": 3.3841,
"step": 300000
},
{
"epoch": 27.1,
"learning_rate": 1.2452375000000001e-05,
"loss": 3.3849,
"step": 300500
},
{
"epoch": 27.15,
"learning_rate": 1.239e-05,
"loss": 3.383,
"step": 301000
},
{
"epoch": 27.19,
"learning_rate": 1.23275e-05,
"loss": 3.3832,
"step": 301500
},
{
"epoch": 27.24,
"learning_rate": 1.2265e-05,
"loss": 3.3854,
"step": 302000
},
{
"epoch": 27.28,
"learning_rate": 1.22025e-05,
"loss": 3.3843,
"step": 302500
},
{
"epoch": 27.33,
"learning_rate": 1.214025e-05,
"loss": 3.3839,
"step": 303000
},
{
"epoch": 27.37,
"learning_rate": 1.207775e-05,
"loss": 3.3902,
"step": 303500
},
{
"epoch": 27.42,
"learning_rate": 1.201525e-05,
"loss": 3.3849,
"step": 304000
},
{
"epoch": 27.46,
"learning_rate": 1.1952875e-05,
"loss": 3.3838,
"step": 304500
},
{
"epoch": 27.51,
"learning_rate": 1.1890375e-05,
"loss": 3.3831,
"step": 305000
},
{
"epoch": 27.55,
"learning_rate": 1.1827875e-05,
"loss": 3.387,
"step": 305500
},
{
"epoch": 27.6,
"learning_rate": 1.1765375e-05,
"loss": 3.3852,
"step": 306000
},
{
"epoch": 27.64,
"learning_rate": 1.1703e-05,
"loss": 3.3805,
"step": 306500
},
{
"epoch": 27.69,
"learning_rate": 1.16405e-05,
"loss": 3.385,
"step": 307000
},
{
"epoch": 27.74,
"learning_rate": 1.1578e-05,
"loss": 3.385,
"step": 307500
},
{
"epoch": 27.78,
"learning_rate": 1.15155e-05,
"loss": 3.3858,
"step": 308000
},
{
"epoch": 27.83,
"learning_rate": 1.1453000000000002e-05,
"loss": 3.3839,
"step": 308500
},
{
"epoch": 27.87,
"learning_rate": 1.13905e-05,
"loss": 3.3851,
"step": 309000
},
{
"epoch": 27.92,
"learning_rate": 1.1328e-05,
"loss": 3.3807,
"step": 309500
},
{
"epoch": 27.96,
"learning_rate": 1.12655e-05,
"loss": 3.3885,
"step": 310000
},
{
"epoch": 28.01,
"learning_rate": 1.1203125000000001e-05,
"loss": 3.3837,
"step": 310500
},
{
"epoch": 28.05,
"learning_rate": 1.1140625e-05,
"loss": 3.3781,
"step": 311000
},
{
"epoch": 28.1,
"learning_rate": 1.1078125e-05,
"loss": 3.382,
"step": 311500
},
{
"epoch": 28.14,
"learning_rate": 1.1015625e-05,
"loss": 3.3815,
"step": 312000
},
{
"epoch": 28.19,
"learning_rate": 1.0953125e-05,
"loss": 3.3853,
"step": 312500
},
{
"epoch": 28.23,
"learning_rate": 1.0890625e-05,
"loss": 3.3849,
"step": 313000
},
{
"epoch": 28.28,
"learning_rate": 1.0828250000000001e-05,
"loss": 3.3825,
"step": 313500
},
{
"epoch": 28.32,
"learning_rate": 1.0765875e-05,
"loss": 3.3811,
"step": 314000
},
{
"epoch": 28.37,
"learning_rate": 1.0703375e-05,
"loss": 3.3775,
"step": 314500
},
{
"epoch": 28.41,
"learning_rate": 1.0640875e-05,
"loss": 3.3851,
"step": 315000
},
{
"epoch": 28.46,
"learning_rate": 1.0578375e-05,
"loss": 3.388,
"step": 315500
},
{
"epoch": 28.5,
"learning_rate": 1.0515875e-05,
"loss": 3.3806,
"step": 316000
},
{
"epoch": 28.55,
"learning_rate": 1.0453375e-05,
"loss": 3.3841,
"step": 316500
},
{
"epoch": 28.59,
"learning_rate": 1.0390875e-05,
"loss": 3.3856,
"step": 317000
},
{
"epoch": 28.64,
"learning_rate": 1.0328375e-05,
"loss": 3.3835,
"step": 317500
},
{
"epoch": 28.68,
"learning_rate": 1.0265875e-05,
"loss": 3.38,
"step": 318000
},
{
"epoch": 28.73,
"learning_rate": 1.02035e-05,
"loss": 3.3829,
"step": 318500
},
{
"epoch": 28.77,
"learning_rate": 1.0141e-05,
"loss": 3.3794,
"step": 319000
},
{
"epoch": 28.82,
"learning_rate": 1.0078625e-05,
"loss": 3.3837,
"step": 319500
},
{
"epoch": 28.86,
"learning_rate": 1.0016125e-05,
"loss": 3.3798,
"step": 320000
},
{
"epoch": 28.91,
"learning_rate": 9.953625e-06,
"loss": 3.3836,
"step": 320500
},
{
"epoch": 28.95,
"learning_rate": 9.891250000000001e-06,
"loss": 3.3841,
"step": 321000
},
{
"epoch": 29.0,
"learning_rate": 9.82875e-06,
"loss": 3.38,
"step": 321500
},
{
"epoch": 29.04,
"learning_rate": 9.76625e-06,
"loss": 3.375,
"step": 322000
},
{
"epoch": 29.09,
"learning_rate": 9.70375e-06,
"loss": 3.3763,
"step": 322500
},
{
"epoch": 29.13,
"learning_rate": 9.64125e-06,
"loss": 3.3831,
"step": 323000
},
{
"epoch": 29.18,
"learning_rate": 9.57875e-06,
"loss": 3.3844,
"step": 323500
},
{
"epoch": 29.22,
"learning_rate": 9.51625e-06,
"loss": 3.3822,
"step": 324000
},
{
"epoch": 29.27,
"learning_rate": 9.45375e-06,
"loss": 3.3815,
"step": 324500
},
{
"epoch": 29.31,
"learning_rate": 9.39125e-06,
"loss": 3.382,
"step": 325000
},
{
"epoch": 29.36,
"learning_rate": 9.32875e-06,
"loss": 3.375,
"step": 325500
},
{
"epoch": 29.4,
"learning_rate": 9.26625e-06,
"loss": 3.3796,
"step": 326000
},
{
"epoch": 29.45,
"learning_rate": 9.20375e-06,
"loss": 3.3799,
"step": 326500
},
{
"epoch": 29.49,
"learning_rate": 9.14125e-06,
"loss": 3.3861,
"step": 327000
},
{
"epoch": 29.54,
"learning_rate": 9.078875e-06,
"loss": 3.3837,
"step": 327500
},
{
"epoch": 29.58,
"learning_rate": 9.016375e-06,
"loss": 3.3795,
"step": 328000
},
{
"epoch": 29.63,
"learning_rate": 8.953875e-06,
"loss": 3.3787,
"step": 328500
},
{
"epoch": 29.67,
"learning_rate": 8.891375000000001e-06,
"loss": 3.3819,
"step": 329000
},
{
"epoch": 29.72,
"learning_rate": 8.829e-06,
"loss": 3.3802,
"step": 329500
},
{
"epoch": 29.76,
"learning_rate": 8.766625e-06,
"loss": 3.383,
"step": 330000
},
{
"epoch": 29.81,
"learning_rate": 8.704125e-06,
"loss": 3.3832,
"step": 330500
},
{
"epoch": 29.85,
"learning_rate": 8.641625e-06,
"loss": 3.3784,
"step": 331000
},
{
"epoch": 29.9,
"learning_rate": 8.579125e-06,
"loss": 3.3788,
"step": 331500
},
{
"epoch": 29.94,
"learning_rate": 8.516625e-06,
"loss": 3.3797,
"step": 332000
},
{
"epoch": 29.99,
"learning_rate": 8.454125000000001e-06,
"loss": 3.3809,
"step": 332500
},
{
"epoch": 30.04,
"learning_rate": 8.391625e-06,
"loss": 3.3792,
"step": 333000
},
{
"epoch": 30.08,
"learning_rate": 8.329125e-06,
"loss": 3.3784,
"step": 333500
},
{
"epoch": 30.13,
"learning_rate": 8.266625e-06,
"loss": 3.377,
"step": 334000
},
{
"epoch": 30.17,
"learning_rate": 8.204125e-06,
"loss": 3.3774,
"step": 334500
},
{
"epoch": 30.22,
"learning_rate": 8.14175e-06,
"loss": 3.3822,
"step": 335000
},
{
"epoch": 30.26,
"learning_rate": 8.07925e-06,
"loss": 3.382,
"step": 335500
},
{
"epoch": 30.31,
"learning_rate": 8.01675e-06,
"loss": 3.3753,
"step": 336000
},
{
"epoch": 30.35,
"learning_rate": 7.954250000000001e-06,
"loss": 3.3784,
"step": 336500
},
{
"epoch": 30.4,
"learning_rate": 7.891750000000001e-06,
"loss": 3.3807,
"step": 337000
},
{
"epoch": 30.44,
"learning_rate": 7.829375e-06,
"loss": 3.3787,
"step": 337500
},
{
"epoch": 30.49,
"learning_rate": 7.766875e-06,
"loss": 3.3751,
"step": 338000
},
{
"epoch": 30.53,
"learning_rate": 7.704375e-06,
"loss": 3.3808,
"step": 338500
},
{
"epoch": 30.58,
"learning_rate": 7.641875e-06,
"loss": 3.3817,
"step": 339000
},
{
"epoch": 30.62,
"learning_rate": 7.579375e-06,
"loss": 3.3808,
"step": 339500
},
{
"epoch": 30.67,
"learning_rate": 7.517e-06,
"loss": 3.3784,
"step": 340000
},
{
"epoch": 30.71,
"learning_rate": 7.4545e-06,
"loss": 3.3774,
"step": 340500
},
{
"epoch": 30.76,
"learning_rate": 7.3920000000000005e-06,
"loss": 3.3788,
"step": 341000
},
{
"epoch": 30.8,
"learning_rate": 7.3295e-06,
"loss": 3.377,
"step": 341500
},
{
"epoch": 30.85,
"learning_rate": 7.267e-06,
"loss": 3.3779,
"step": 342000
},
{
"epoch": 30.89,
"learning_rate": 7.2045e-06,
"loss": 3.3786,
"step": 342500
},
{
"epoch": 30.94,
"learning_rate": 7.142e-06,
"loss": 3.3765,
"step": 343000
},
{
"epoch": 30.98,
"learning_rate": 7.0795e-06,
"loss": 3.3755,
"step": 343500
},
{
"epoch": 31.03,
"learning_rate": 7.017125000000001e-06,
"loss": 3.3787,
"step": 344000
},
{
"epoch": 31.07,
"learning_rate": 6.954625e-06,
"loss": 3.3739,
"step": 344500
},
{
"epoch": 31.12,
"learning_rate": 6.89225e-06,
"loss": 3.3776,
"step": 345000
},
{
"epoch": 31.16,
"learning_rate": 6.82975e-06,
"loss": 3.3787,
"step": 345500
},
{
"epoch": 31.21,
"learning_rate": 6.76725e-06,
"loss": 3.3725,
"step": 346000
},
{
"epoch": 31.25,
"learning_rate": 6.70475e-06,
"loss": 3.3765,
"step": 346500
},
{
"epoch": 31.3,
"learning_rate": 6.642375000000001e-06,
"loss": 3.3796,
"step": 347000
},
{
"epoch": 31.34,
"learning_rate": 6.579875e-06,
"loss": 3.3784,
"step": 347500
},
{
"epoch": 31.39,
"learning_rate": 6.5173750000000004e-06,
"loss": 3.3795,
"step": 348000
},
{
"epoch": 31.43,
"learning_rate": 6.454875000000001e-06,
"loss": 3.3788,
"step": 348500
},
{
"epoch": 31.48,
"learning_rate": 6.3925e-06,
"loss": 3.3806,
"step": 349000
},
{
"epoch": 31.52,
"learning_rate": 6.3299999999999995e-06,
"loss": 3.3773,
"step": 349500
},
{
"epoch": 31.57,
"learning_rate": 6.2675e-06,
"loss": 3.3744,
"step": 350000
},
{
"epoch": 31.61,
"learning_rate": 6.205000000000001e-06,
"loss": 3.378,
"step": 350500
},
{
"epoch": 31.66,
"learning_rate": 6.1425e-06,
"loss": 3.3752,
"step": 351000
},
{
"epoch": 31.7,
"learning_rate": 6.08e-06,
"loss": 3.3774,
"step": 351500
},
{
"epoch": 31.75,
"learning_rate": 6.0175e-06,
"loss": 3.3778,
"step": 352000
},
{
"epoch": 31.79,
"learning_rate": 5.955000000000001e-06,
"loss": 3.3737,
"step": 352500
},
{
"epoch": 31.84,
"learning_rate": 5.892625e-06,
"loss": 3.3813,
"step": 353000
},
{
"epoch": 31.88,
"learning_rate": 5.830250000000001e-06,
"loss": 3.3745,
"step": 353500
},
{
"epoch": 31.93,
"learning_rate": 5.76775e-06,
"loss": 3.3786,
"step": 354000
},
{
"epoch": 31.97,
"learning_rate": 5.70525e-06,
"loss": 3.3741,
"step": 354500
},
{
"epoch": 32.02,
"learning_rate": 5.64275e-06,
"loss": 3.374,
"step": 355000
},
{
"epoch": 32.06,
"learning_rate": 5.58025e-06,
"loss": 3.3763,
"step": 355500
},
{
"epoch": 32.11,
"learning_rate": 5.51775e-06,
"loss": 3.3787,
"step": 356000
},
{
"epoch": 32.15,
"learning_rate": 5.45525e-06,
"loss": 3.3719,
"step": 356500
},
{
"epoch": 32.2,
"learning_rate": 5.39275e-06,
"loss": 3.3775,
"step": 357000
},
{
"epoch": 32.24,
"learning_rate": 5.33025e-06,
"loss": 3.3785,
"step": 357500
},
{
"epoch": 32.29,
"learning_rate": 5.2677500000000005e-06,
"loss": 3.3763,
"step": 358000
},
{
"epoch": 32.34,
"learning_rate": 5.205375e-06,
"loss": 3.3768,
"step": 358500
},
{
"epoch": 32.38,
"learning_rate": 5.142875e-06,
"loss": 3.3751,
"step": 359000
},
{
"epoch": 32.43,
"learning_rate": 5.0803750000000005e-06,
"loss": 3.3749,
"step": 359500
},
{
"epoch": 32.47,
"learning_rate": 5.017875000000001e-06,
"loss": 3.377,
"step": 360000
},
{
"epoch": 32.52,
"learning_rate": 4.9555e-06,
"loss": 3.3744,
"step": 360500
},
{
"epoch": 32.56,
"learning_rate": 4.893e-06,
"loss": 3.3732,
"step": 361000
},
{
"epoch": 32.61,
"learning_rate": 4.830500000000001e-06,
"loss": 3.374,
"step": 361500
},
{
"epoch": 32.65,
"learning_rate": 4.768e-06,
"loss": 3.3755,
"step": 362000
},
{
"epoch": 32.7,
"learning_rate": 4.7055e-06,
"loss": 3.3753,
"step": 362500
},
{
"epoch": 32.74,
"learning_rate": 4.643e-06,
"loss": 3.38,
"step": 363000
},
{
"epoch": 32.79,
"learning_rate": 4.5805000000000004e-06,
"loss": 3.3785,
"step": 363500
},
{
"epoch": 32.83,
"learning_rate": 4.518e-06,
"loss": 3.3712,
"step": 364000
},
{
"epoch": 32.88,
"learning_rate": 4.4555e-06,
"loss": 3.3703,
"step": 364500
},
{
"epoch": 32.92,
"learning_rate": 4.393125e-06,
"loss": 3.375,
"step": 365000
},
{
"epoch": 32.97,
"learning_rate": 4.330625e-06,
"loss": 3.3708,
"step": 365500
},
{
"epoch": 33.01,
"learning_rate": 4.26825e-06,
"loss": 3.3748,
"step": 366000
},
{
"epoch": 33.06,
"learning_rate": 4.20575e-06,
"loss": 3.3734,
"step": 366500
},
{
"epoch": 33.1,
"learning_rate": 4.14325e-06,
"loss": 3.3797,
"step": 367000
},
{
"epoch": 33.15,
"learning_rate": 4.08075e-06,
"loss": 3.3736,
"step": 367500
},
{
"epoch": 33.19,
"learning_rate": 4.018250000000001e-06,
"loss": 3.374,
"step": 368000
},
{
"epoch": 33.24,
"learning_rate": 3.95575e-06,
"loss": 3.374,
"step": 368500
},
{
"epoch": 33.28,
"learning_rate": 3.89325e-06,
"loss": 3.3729,
"step": 369000
},
{
"epoch": 33.33,
"learning_rate": 3.830875000000001e-06,
"loss": 3.3742,
"step": 369500
},
{
"epoch": 33.37,
"learning_rate": 3.7683750000000006e-06,
"loss": 3.3769,
"step": 370000
},
{
"epoch": 33.42,
"learning_rate": 3.7058750000000003e-06,
"loss": 3.3745,
"step": 370500
},
{
"epoch": 33.46,
"learning_rate": 3.643375e-06,
"loss": 3.3769,
"step": 371000
},
{
"epoch": 33.51,
"learning_rate": 3.5808750000000003e-06,
"loss": 3.3707,
"step": 371500
},
{
"epoch": 33.55,
"learning_rate": 3.518375e-06,
"loss": 3.3746,
"step": 372000
},
{
"epoch": 33.6,
"learning_rate": 3.455875e-06,
"loss": 3.3734,
"step": 372500
},
{
"epoch": 33.64,
"learning_rate": 3.3935e-06,
"loss": 3.3732,
"step": 373000
},
{
"epoch": 33.69,
"learning_rate": 3.331e-06,
"loss": 3.3715,
"step": 373500
},
{
"epoch": 33.73,
"learning_rate": 3.2685e-06,
"loss": 3.3767,
"step": 374000
},
{
"epoch": 33.78,
"learning_rate": 3.206e-06,
"loss": 3.3736,
"step": 374500
},
{
"epoch": 33.82,
"learning_rate": 3.1435e-06,
"loss": 3.3745,
"step": 375000
},
{
"epoch": 33.87,
"learning_rate": 3.0810000000000002e-06,
"loss": 3.3703,
"step": 375500
},
{
"epoch": 33.91,
"learning_rate": 3.0185e-06,
"loss": 3.379,
"step": 376000
},
{
"epoch": 33.96,
"learning_rate": 2.956e-06,
"loss": 3.3694,
"step": 376500
},
{
"epoch": 34.0,
"learning_rate": 2.8935e-06,
"loss": 3.3717,
"step": 377000
},
{
"epoch": 34.05,
"learning_rate": 2.831e-06,
"loss": 3.3719,
"step": 377500
},
{
"epoch": 34.09,
"learning_rate": 2.7685000000000003e-06,
"loss": 3.3733,
"step": 378000
},
{
"epoch": 34.14,
"learning_rate": 2.706e-06,
"loss": 3.3738,
"step": 378500
},
{
"epoch": 34.18,
"learning_rate": 2.6436250000000005e-06,
"loss": 3.3772,
"step": 379000
},
{
"epoch": 34.23,
"learning_rate": 2.581125e-06,
"loss": 3.368,
"step": 379500
},
{
"epoch": 34.27,
"learning_rate": 2.518625e-06,
"loss": 3.3728,
"step": 380000
},
{
"epoch": 34.32,
"learning_rate": 2.456125e-06,
"loss": 3.3764,
"step": 380500
},
{
"epoch": 34.36,
"learning_rate": 2.393625e-06,
"loss": 3.3724,
"step": 381000
},
{
"epoch": 34.41,
"learning_rate": 2.331125e-06,
"loss": 3.3744,
"step": 381500
},
{
"epoch": 34.45,
"learning_rate": 2.2686250000000002e-06,
"loss": 3.3719,
"step": 382000
},
{
"epoch": 34.5,
"learning_rate": 2.2061250000000004e-06,
"loss": 3.3702,
"step": 382500
},
{
"epoch": 34.54,
"learning_rate": 2.143875e-06,
"loss": 3.3728,
"step": 383000
},
{
"epoch": 34.59,
"learning_rate": 2.081375e-06,
"loss": 3.3709,
"step": 383500
},
{
"epoch": 34.64,
"learning_rate": 2.018875e-06,
"loss": 3.3701,
"step": 384000
},
{
"epoch": 34.68,
"learning_rate": 1.9563750000000004e-06,
"loss": 3.3747,
"step": 384500
},
{
"epoch": 34.73,
"learning_rate": 1.894e-06,
"loss": 3.373,
"step": 385000
},
{
"epoch": 34.77,
"learning_rate": 1.8315000000000002e-06,
"loss": 3.3739,
"step": 385500
},
{
"epoch": 34.82,
"learning_rate": 1.7690000000000001e-06,
"loss": 3.3696,
"step": 386000
},
{
"epoch": 34.86,
"learning_rate": 1.7065e-06,
"loss": 3.3722,
"step": 386500
},
{
"epoch": 34.91,
"learning_rate": 1.644e-06,
"loss": 3.3705,
"step": 387000
},
{
"epoch": 34.95,
"learning_rate": 1.5815e-06,
"loss": 3.3753,
"step": 387500
},
{
"epoch": 35.0,
"learning_rate": 1.5190000000000002e-06,
"loss": 3.3714,
"step": 388000
},
{
"epoch": 35.04,
"learning_rate": 1.4565e-06,
"loss": 3.3704,
"step": 388500
},
{
"epoch": 35.09,
"learning_rate": 1.394e-06,
"loss": 3.3735,
"step": 389000
},
{
"epoch": 35.13,
"learning_rate": 1.3315e-06,
"loss": 3.3706,
"step": 389500
},
{
"epoch": 35.18,
"learning_rate": 1.269e-06,
"loss": 3.3742,
"step": 390000
},
{
"epoch": 35.22,
"learning_rate": 1.2067500000000001e-06,
"loss": 3.3698,
"step": 390500
},
{
"epoch": 35.27,
"learning_rate": 1.14425e-06,
"loss": 3.3695,
"step": 391000
},
{
"epoch": 35.31,
"learning_rate": 1.08175e-06,
"loss": 3.3717,
"step": 391500
},
{
"epoch": 35.36,
"learning_rate": 1.01925e-06,
"loss": 3.375,
"step": 392000
},
{
"epoch": 35.4,
"learning_rate": 9.5675e-07,
"loss": 3.3746,
"step": 392500
},
{
"epoch": 35.45,
"learning_rate": 8.943750000000001e-07,
"loss": 3.3715,
"step": 393000
},
{
"epoch": 35.49,
"learning_rate": 8.31875e-07,
"loss": 3.3717,
"step": 393500
},
{
"epoch": 35.54,
"learning_rate": 7.69375e-07,
"loss": 3.3714,
"step": 394000
},
{
"epoch": 35.58,
"learning_rate": 7.068750000000001e-07,
"loss": 3.3704,
"step": 394500
},
{
"epoch": 35.63,
"learning_rate": 6.44375e-07,
"loss": 3.3733,
"step": 395000
},
{
"epoch": 35.67,
"learning_rate": 5.81875e-07,
"loss": 3.3687,
"step": 395500
},
{
"epoch": 35.72,
"learning_rate": 5.19375e-07,
"loss": 3.3722,
"step": 396000
},
{
"epoch": 35.76,
"learning_rate": 4.56875e-07,
"loss": 3.375,
"step": 396500
},
{
"epoch": 35.81,
"learning_rate": 3.9437500000000004e-07,
"loss": 3.3661,
"step": 397000
},
{
"epoch": 35.85,
"learning_rate": 3.31875e-07,
"loss": 3.3716,
"step": 397500
},
{
"epoch": 35.9,
"learning_rate": 2.695e-07,
"loss": 3.3689,
"step": 398000
},
{
"epoch": 35.94,
"learning_rate": 2.07e-07,
"loss": 3.3683,
"step": 398500
},
{
"epoch": 35.99,
"learning_rate": 1.4450000000000003e-07,
"loss": 3.3696,
"step": 399000
},
{
"epoch": 36.03,
"learning_rate": 8.2e-08,
"loss": 3.3741,
"step": 399500
},
{
"epoch": 36.08,
"learning_rate": 1.9625000000000002e-08,
"loss": 3.3696,
"step": 400000
},
{
"epoch": 36.08,
"step": 400000,
"total_flos": 2.3048832150195405e+17,
"train_loss": 3.451316015625,
"train_runtime": 66733.7733,
"train_samples_per_second": 239.759,
"train_steps_per_second": 5.994
}
],
"max_steps": 400000,
"num_train_epochs": 37,
"total_flos": 2.3048832150195405e+17,
"trial_name": null,
"trial_params": null
}