|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 273072, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.990844905372942e-05, |
|
"loss": 1.2008, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9816898107458844e-05, |
|
"loss": 0.904, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.972534716118826e-05, |
|
"loss": 0.9083, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.963379621491768e-05, |
|
"loss": 0.9092, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9542245268647095e-05, |
|
"loss": 0.8994, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.945069432237652e-05, |
|
"loss": 0.9007, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9359143376105936e-05, |
|
"loss": 0.906, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.926759242983536e-05, |
|
"loss": 0.9026, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.917604148356478e-05, |
|
"loss": 0.8885, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9084490537294195e-05, |
|
"loss": 0.8877, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.899293959102362e-05, |
|
"loss": 0.8886, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.890138864475303e-05, |
|
"loss": 0.9007, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.880983769848245e-05, |
|
"loss": 0.8963, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.871828675221187e-05, |
|
"loss": 0.8941, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8626735805941294e-05, |
|
"loss": 0.8834, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.853518485967071e-05, |
|
"loss": 0.8852, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8443633913400135e-05, |
|
"loss": 0.8888, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.835208296712955e-05, |
|
"loss": 0.8922, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.826053202085897e-05, |
|
"loss": 0.8927, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.816898107458839e-05, |
|
"loss": 0.8933, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.807743012831781e-05, |
|
"loss": 0.8863, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.798587918204723e-05, |
|
"loss": 0.8867, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7894328235776645e-05, |
|
"loss": 0.8905, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.780277728950607e-05, |
|
"loss": 0.8957, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7711226343235486e-05, |
|
"loss": 0.8859, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.761967539696491e-05, |
|
"loss": 0.8905, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.752812445069432e-05, |
|
"loss": 0.8861, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7436573504423745e-05, |
|
"loss": 0.8919, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.734502255815316e-05, |
|
"loss": 0.8914, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7253471611882586e-05, |
|
"loss": 0.8973, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.7161920665612e-05, |
|
"loss": 0.893, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.707036971934142e-05, |
|
"loss": 0.8924, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.6978818773070844e-05, |
|
"loss": 0.8805, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6887267826800255e-05, |
|
"loss": 0.8978, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.679571688052968e-05, |
|
"loss": 0.89, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6704165934259096e-05, |
|
"loss": 0.8874, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.661261498798852e-05, |
|
"loss": 0.8893, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.652106404171794e-05, |
|
"loss": 0.8872, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.642951309544736e-05, |
|
"loss": 0.8905, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.633796214917678e-05, |
|
"loss": 0.8919, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6246411202906195e-05, |
|
"loss": 0.8846, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.615486025663561e-05, |
|
"loss": 0.8914, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.606330931036503e-05, |
|
"loss": 0.8863, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.5971758364094454e-05, |
|
"loss": 0.8921, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.588020741782387e-05, |
|
"loss": 0.8903, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5788656471553295e-05, |
|
"loss": 0.8882, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.569710552528271e-05, |
|
"loss": 0.8945, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5605554579012136e-05, |
|
"loss": 0.8944, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.5514003632741546e-05, |
|
"loss": 0.8784, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.542245268647097e-05, |
|
"loss": 0.8843, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.533090174020039e-05, |
|
"loss": 0.8882, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5239350793929805e-05, |
|
"loss": 0.8891, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.514779984765923e-05, |
|
"loss": 0.8896, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5056248901388646e-05, |
|
"loss": 0.8924, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.496469795511807e-05, |
|
"loss": 0.8812, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.487314700884749e-05, |
|
"loss": 0.8935, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.4781596062576904e-05, |
|
"loss": 0.8907, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.469004511630632e-05, |
|
"loss": 0.8887, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4598494170035745e-05, |
|
"loss": 0.8898, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.450694322376516e-05, |
|
"loss": 0.8846, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.441539227749458e-05, |
|
"loss": 0.8858, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4323841331224004e-05, |
|
"loss": 0.8933, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.423229038495342e-05, |
|
"loss": 0.8854, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.414073943868284e-05, |
|
"loss": 0.8931, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.4049188492412255e-05, |
|
"loss": 0.8904, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.395763754614168e-05, |
|
"loss": 0.884, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3866086599871097e-05, |
|
"loss": 0.8963, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.377453565360052e-05, |
|
"loss": 0.89, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.368298470732994e-05, |
|
"loss": 0.8852, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3591433761059355e-05, |
|
"loss": 0.8831, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.349988281478878e-05, |
|
"loss": 0.895, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3408331868518196e-05, |
|
"loss": 0.8907, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.331678092224761e-05, |
|
"loss": 0.8956, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.322522997597703e-05, |
|
"loss": 0.8895, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3133679029706454e-05, |
|
"loss": 0.8941, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.304212808343587e-05, |
|
"loss": 0.8919, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2950577137165296e-05, |
|
"loss": 0.8986, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.285902619089471e-05, |
|
"loss": 0.8929, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.276747524462413e-05, |
|
"loss": 0.8906, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.267592429835355e-05, |
|
"loss": 0.8919, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.258437335208297e-05, |
|
"loss": 0.8983, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.249282240581239e-05, |
|
"loss": 0.8801, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2401271459541805e-05, |
|
"loss": 0.8857, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.230972051327123e-05, |
|
"loss": 0.8778, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.221816956700065e-05, |
|
"loss": 0.8971, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.212661862073007e-05, |
|
"loss": 0.8845, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.203506767445948e-05, |
|
"loss": 0.8927, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.1943516728188905e-05, |
|
"loss": 0.8932, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.185196578191832e-05, |
|
"loss": 0.8867, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1760414835647746e-05, |
|
"loss": 0.8911, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.166886388937716e-05, |
|
"loss": 0.8915, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.157731294310658e-05, |
|
"loss": 0.8925, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1485761996836004e-05, |
|
"loss": 0.8828, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1394211050565415e-05, |
|
"loss": 0.8966, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.130266010429484e-05, |
|
"loss": 0.8939, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1211109158024256e-05, |
|
"loss": 0.8884, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.111955821175368e-05, |
|
"loss": 0.8925, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.10280072654831e-05, |
|
"loss": 0.8884, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.093645631921252e-05, |
|
"loss": 0.8912, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.084490537294194e-05, |
|
"loss": 0.8882, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0753354426671356e-05, |
|
"loss": 0.8879, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.066180348040077e-05, |
|
"loss": 0.8872, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.057025253413019e-05, |
|
"loss": 0.9009, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0478701587859614e-05, |
|
"loss": 0.8885, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.038715064158903e-05, |
|
"loss": 0.8896, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0295599695318455e-05, |
|
"loss": 0.8895, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.020404874904787e-05, |
|
"loss": 0.8945, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0112497802777296e-05, |
|
"loss": 0.8949, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.002094685650671e-05, |
|
"loss": 0.8928, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.992939591023613e-05, |
|
"loss": 0.9009, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.983784496396555e-05, |
|
"loss": 0.8918, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9746294017694965e-05, |
|
"loss": 0.8809, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.965474307142439e-05, |
|
"loss": 0.8929, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9563192125153806e-05, |
|
"loss": 0.8911, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.947164117888323e-05, |
|
"loss": 0.8933, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.938009023261265e-05, |
|
"loss": 0.8925, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9288539286342065e-05, |
|
"loss": 0.8909, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.919698834007148e-05, |
|
"loss": 0.893, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9105437393800906e-05, |
|
"loss": 0.8879, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.901388644753032e-05, |
|
"loss": 0.8947, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.892233550125975e-05, |
|
"loss": 0.8921, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8830784554989164e-05, |
|
"loss": 0.8891, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.873923360871858e-05, |
|
"loss": 0.8894, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8647682662448e-05, |
|
"loss": 0.8917, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8556131716177416e-05, |
|
"loss": 0.8839, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.846458076990684e-05, |
|
"loss": 0.8944, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.837302982363626e-05, |
|
"loss": 0.8929, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.828147887736568e-05, |
|
"loss": 0.8811, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.81899279310951e-05, |
|
"loss": 0.89, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.809837698482452e-05, |
|
"loss": 0.8904, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.800682603855394e-05, |
|
"loss": 0.8901, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7915275092283356e-05, |
|
"loss": 0.8976, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7823724146012773e-05, |
|
"loss": 0.892, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.773217319974219e-05, |
|
"loss": 0.8906, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7640622253471615e-05, |
|
"loss": 0.8846, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.754907130720103e-05, |
|
"loss": 0.8861, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7457520360930456e-05, |
|
"loss": 0.8873, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.736596941465987e-05, |
|
"loss": 0.8897, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.727441846838929e-05, |
|
"loss": 0.8891, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.718286752211871e-05, |
|
"loss": 0.8875, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.709131657584813e-05, |
|
"loss": 0.903, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.699976562957755e-05, |
|
"loss": 0.9041, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6908214683306966e-05, |
|
"loss": 0.9015, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.681666373703639e-05, |
|
"loss": 0.8953, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.672511279076581e-05, |
|
"loss": 0.8998, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.663356184449523e-05, |
|
"loss": 0.8869, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.654201089822464e-05, |
|
"loss": 0.8871, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6450459951954065e-05, |
|
"loss": 0.898, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.635890900568348e-05, |
|
"loss": 0.8971, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6267358059412906e-05, |
|
"loss": 0.8986, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.6175807113142324e-05, |
|
"loss": 0.8942, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.608425616687174e-05, |
|
"loss": 0.8951, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.5992705220601165e-05, |
|
"loss": 0.8935, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5901154274330575e-05, |
|
"loss": 0.8812, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.580960332806e-05, |
|
"loss": 0.8954, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.5718052381789416e-05, |
|
"loss": 0.8942, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.562650143551884e-05, |
|
"loss": 0.8932, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.553495048924826e-05, |
|
"loss": 0.8915, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.544339954297768e-05, |
|
"loss": 0.8794, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.53518485967071e-05, |
|
"loss": 0.8998, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.5260297650436516e-05, |
|
"loss": 0.8926, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.516874670416593e-05, |
|
"loss": 0.8897, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.507719575789536e-05, |
|
"loss": 0.8856, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.4985644811624774e-05, |
|
"loss": 0.8958, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.489409386535419e-05, |
|
"loss": 0.8901, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4802542919083615e-05, |
|
"loss": 0.8843, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.471099197281303e-05, |
|
"loss": 0.8991, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4619441026542456e-05, |
|
"loss": 0.8977, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4527890080271874e-05, |
|
"loss": 0.8917, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.443633913400129e-05, |
|
"loss": 0.9006, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.434478818773071e-05, |
|
"loss": 0.8889, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.425323724146013e-05, |
|
"loss": 0.8955, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.416168629518955e-05, |
|
"loss": 0.898, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.4070135348918966e-05, |
|
"loss": 0.8845, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.397858440264839e-05, |
|
"loss": 0.8898, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.388703345637781e-05, |
|
"loss": 0.8933, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3795482510107225e-05, |
|
"loss": 0.8903, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.370393156383664e-05, |
|
"loss": 0.8931, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3612380617566066e-05, |
|
"loss": 0.9005, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.352082967129548e-05, |
|
"loss": 0.8839, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.342927872502491e-05, |
|
"loss": 0.898, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3337727778754324e-05, |
|
"loss": 0.8996, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.324617683248374e-05, |
|
"loss": 0.7676, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3154625886213165e-05, |
|
"loss": 0.7695, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3063074939942576e-05, |
|
"loss": 0.7604, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.2971523993672e-05, |
|
"loss": 0.761, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.287997304740142e-05, |
|
"loss": 0.7576, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.278842210113084e-05, |
|
"loss": 0.7618, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.269687115486026e-05, |
|
"loss": 0.7585, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.260532020858968e-05, |
|
"loss": 0.7636, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.25137692623191e-05, |
|
"loss": 0.7609, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2422218316048517e-05, |
|
"loss": 0.7605, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2330667369777934e-05, |
|
"loss": 0.7686, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.223911642350735e-05, |
|
"loss": 0.7633, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.2147565477236775e-05, |
|
"loss": 0.7644, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.205601453096619e-05, |
|
"loss": 0.7687, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.1964463584695616e-05, |
|
"loss": 0.767, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.187291263842503e-05, |
|
"loss": 0.7663, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.178136169215446e-05, |
|
"loss": 0.7657, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.168981074588387e-05, |
|
"loss": 0.7682, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.159825979961329e-05, |
|
"loss": 0.7721, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.150670885334271e-05, |
|
"loss": 0.7673, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1415157907072126e-05, |
|
"loss": 0.7649, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.132360696080155e-05, |
|
"loss": 0.7737, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.123205601453097e-05, |
|
"loss": 0.7688, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.114050506826039e-05, |
|
"loss": 0.7705, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.10489541219898e-05, |
|
"loss": 0.764, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.0957403175719225e-05, |
|
"loss": 0.7654, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.086585222944864e-05, |
|
"loss": 0.7762, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0774301283178067e-05, |
|
"loss": 0.7708, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0682750336907484e-05, |
|
"loss": 0.778, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.05911993906369e-05, |
|
"loss": 0.7741, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0499648444366325e-05, |
|
"loss": 0.7714, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0408097498095746e-05, |
|
"loss": 0.774, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.031654655182516e-05, |
|
"loss": 0.7704, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.022499560555458e-05, |
|
"loss": 0.7797, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0133444659284e-05, |
|
"loss": 0.7647, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0041893713013418e-05, |
|
"loss": 0.774, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9950342766742838e-05, |
|
"loss": 0.7714, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.985879182047226e-05, |
|
"loss": 0.7638, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.976724087420168e-05, |
|
"loss": 0.7724, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9675689927931093e-05, |
|
"loss": 0.7723, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9584138981660514e-05, |
|
"loss": 0.767, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9492588035389934e-05, |
|
"loss": 0.7745, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9401037089119355e-05, |
|
"loss": 0.7657, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9309486142848776e-05, |
|
"loss": 0.7706, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9217935196578196e-05, |
|
"loss": 0.7767, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9126384250307613e-05, |
|
"loss": 0.7777, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.9034833304037034e-05, |
|
"loss": 0.7705, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8943282357766448e-05, |
|
"loss": 0.7829, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8851731411495868e-05, |
|
"loss": 0.775, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.876018046522529e-05, |
|
"loss": 0.7815, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.866862951895471e-05, |
|
"loss": 0.7703, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.857707857268413e-05, |
|
"loss": 0.7736, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.848552762641355e-05, |
|
"loss": 0.7747, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.839397668014297e-05, |
|
"loss": 0.7707, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8302425733872385e-05, |
|
"loss": 0.7666, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8210874787601806e-05, |
|
"loss": 0.7867, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8119323841331223e-05, |
|
"loss": 0.7759, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8027772895060643e-05, |
|
"loss": 0.7743, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7936221948790064e-05, |
|
"loss": 0.7708, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7844671002519484e-05, |
|
"loss": 0.7725, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7753120056248905e-05, |
|
"loss": 0.7687, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7661569109978326e-05, |
|
"loss": 0.7779, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.757001816370774e-05, |
|
"loss": 0.774, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.747846721743716e-05, |
|
"loss": 0.7671, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.738691627116658e-05, |
|
"loss": 0.7721, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7295365324896e-05, |
|
"loss": 0.7758, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.720381437862542e-05, |
|
"loss": 0.7689, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.711226343235484e-05, |
|
"loss": 0.773, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.702071248608426e-05, |
|
"loss": 0.7776, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.6929161539813673e-05, |
|
"loss": 0.7724, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6837610593543094e-05, |
|
"loss": 0.7885, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6746059647272515e-05, |
|
"loss": 0.7737, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6654508701001935e-05, |
|
"loss": 0.7715, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6562957754731356e-05, |
|
"loss": 0.7709, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6471406808460776e-05, |
|
"loss": 0.7732, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6379855862190193e-05, |
|
"loss": 0.7802, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6288304915919614e-05, |
|
"loss": 0.7736, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6196753969649028e-05, |
|
"loss": 0.782, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.610520302337845e-05, |
|
"loss": 0.7728, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.601365207710787e-05, |
|
"loss": 0.7773, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.592210113083729e-05, |
|
"loss": 0.7658, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.583055018456671e-05, |
|
"loss": 0.7782, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.573899923829613e-05, |
|
"loss": 0.7794, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.564744829202555e-05, |
|
"loss": 0.7806, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5555897345754965e-05, |
|
"loss": 0.7708, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5464346399484386e-05, |
|
"loss": 0.7712, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5372795453213806e-05, |
|
"loss": 0.781, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5281244506943223e-05, |
|
"loss": 0.7807, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5189693560672644e-05, |
|
"loss": 0.7862, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5098142614402065e-05, |
|
"loss": 0.7796, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5006591668131485e-05, |
|
"loss": 0.7842, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4915040721860902e-05, |
|
"loss": 0.782, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4823489775590323e-05, |
|
"loss": 0.7808, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4731938829319744e-05, |
|
"loss": 0.7748, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.464038788304916e-05, |
|
"loss": 0.7783, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.454883693677858e-05, |
|
"loss": 0.7832, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4457285990508e-05, |
|
"loss": 0.7795, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4365735044237416e-05, |
|
"loss": 0.7836, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4274184097966836e-05, |
|
"loss": 0.7779, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4182633151696257e-05, |
|
"loss": 0.7813, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4091082205425677e-05, |
|
"loss": 0.7818, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3999531259155095e-05, |
|
"loss": 0.7813, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3907980312884515e-05, |
|
"loss": 0.7787, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3816429366613936e-05, |
|
"loss": 0.784, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3724878420343356e-05, |
|
"loss": 0.7923, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3633327474072774e-05, |
|
"loss": 0.7752, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.354177652780219e-05, |
|
"loss": 0.7755, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.345022558153161e-05, |
|
"loss": 0.7859, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3358674635261032e-05, |
|
"loss": 0.7763, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.326712368899045e-05, |
|
"loss": 0.7812, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.317557274271987e-05, |
|
"loss": 0.7764, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.308402179644929e-05, |
|
"loss": 0.7743, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.2992470850178707e-05, |
|
"loss": 0.7861, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2900919903908128e-05, |
|
"loss": 0.7746, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.280936895763755e-05, |
|
"loss": 0.7802, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.271781801136697e-05, |
|
"loss": 0.7859, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2626267065096386e-05, |
|
"loss": 0.7774, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2534716118825804e-05, |
|
"loss": 0.779, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2443165172555224e-05, |
|
"loss": 0.7815, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2351614226284645e-05, |
|
"loss": 0.7847, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2260063280014062e-05, |
|
"loss": 0.7717, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2168512333743483e-05, |
|
"loss": 0.7701, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2076961387472903e-05, |
|
"loss": 0.7807, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.1985410441202324e-05, |
|
"loss": 0.7786, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.189385949493174e-05, |
|
"loss": 0.7894, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.180230854866116e-05, |
|
"loss": 0.7766, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.171075760239058e-05, |
|
"loss": 0.7792, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1619206656119996e-05, |
|
"loss": 0.7785, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1527655709849416e-05, |
|
"loss": 0.7795, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1436104763578837e-05, |
|
"loss": 0.7835, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1344553817308258e-05, |
|
"loss": 0.7737, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1253002871037675e-05, |
|
"loss": 0.785, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1161451924767095e-05, |
|
"loss": 0.7859, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.1069900978496516e-05, |
|
"loss": 0.7842, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0978350032225936e-05, |
|
"loss": 0.7808, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0886799085955354e-05, |
|
"loss": 0.7732, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0795248139684774e-05, |
|
"loss": 0.7785, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.070369719341419e-05, |
|
"loss": 0.7794, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0612146247143612e-05, |
|
"loss": 0.7893, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.052059530087303e-05, |
|
"loss": 0.779, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.042904435460245e-05, |
|
"loss": 0.7792, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.033749340833187e-05, |
|
"loss": 0.7902, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0245942462061288e-05, |
|
"loss": 0.7835, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0154391515790708e-05, |
|
"loss": 0.7831, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.006284056952013e-05, |
|
"loss": 0.7852, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.997128962324955e-05, |
|
"loss": 0.7855, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9879738676978966e-05, |
|
"loss": 0.7821, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9788187730708384e-05, |
|
"loss": 0.7762, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9696636784437804e-05, |
|
"loss": 0.7852, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9605085838167225e-05, |
|
"loss": 0.7857, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9513534891896642e-05, |
|
"loss": 0.7872, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9421983945626063e-05, |
|
"loss": 0.7749, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9330432999355483e-05, |
|
"loss": 0.7867, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9238882053084904e-05, |
|
"loss": 0.7801, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.914733110681432e-05, |
|
"loss": 0.7734, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.905578016054374e-05, |
|
"loss": 0.7798, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.896422921427316e-05, |
|
"loss": 0.7862, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.887267826800258e-05, |
|
"loss": 0.788, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8781127321731997e-05, |
|
"loss": 0.7863, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8689576375461417e-05, |
|
"loss": 0.7865, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8598025429190838e-05, |
|
"loss": 0.79, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8506474482920255e-05, |
|
"loss": 0.7871, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8414923536649675e-05, |
|
"loss": 0.7882, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8323372590379096e-05, |
|
"loss": 0.7811, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8231821644108517e-05, |
|
"loss": 0.7827, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8140270697837934e-05, |
|
"loss": 0.7741, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8048719751567354e-05, |
|
"loss": 0.7743, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.795716880529677e-05, |
|
"loss": 0.785, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7865617859026192e-05, |
|
"loss": 0.7774, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.777406691275561e-05, |
|
"loss": 0.7921, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.768251596648503e-05, |
|
"loss": 0.7827, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.759096502021445e-05, |
|
"loss": 0.7891, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7499414073943868e-05, |
|
"loss": 0.7873, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7407863127673288e-05, |
|
"loss": 0.7817, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.731631218140271e-05, |
|
"loss": 0.7802, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.722476123513213e-05, |
|
"loss": 0.7864, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7133210288861547e-05, |
|
"loss": 0.7872, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7041659342590964e-05, |
|
"loss": 0.7952, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6950108396320384e-05, |
|
"loss": 0.7861, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6858557450049805e-05, |
|
"loss": 0.7835, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6767006503779222e-05, |
|
"loss": 0.7922, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6675455557508643e-05, |
|
"loss": 0.7884, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6583904611238063e-05, |
|
"loss": 0.6872, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6492353664967484e-05, |
|
"loss": 0.6674, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.64008027186969e-05, |
|
"loss": 0.6726, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.630925177242632e-05, |
|
"loss": 0.6642, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6217700826155742e-05, |
|
"loss": 0.6699, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.612614987988516e-05, |
|
"loss": 0.674, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6034598933614577e-05, |
|
"loss": 0.6681, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5943047987343997e-05, |
|
"loss": 0.6729, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5851497041073418e-05, |
|
"loss": 0.671, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5759946094802835e-05, |
|
"loss": 0.6731, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5668395148532256e-05, |
|
"loss": 0.6765, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5576844202261676e-05, |
|
"loss": 0.6757, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5485293255991097e-05, |
|
"loss": 0.6647, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5393742309720514e-05, |
|
"loss": 0.6795, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5302191363449934e-05, |
|
"loss": 0.6673, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5210640417179353e-05, |
|
"loss": 0.6701, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5119089470908774e-05, |
|
"loss": 0.6726, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5027538524638191e-05, |
|
"loss": 0.6646, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.493598757836761e-05, |
|
"loss": 0.6707, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.484443663209703e-05, |
|
"loss": 0.6727, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4752885685826448e-05, |
|
"loss": 0.6799, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4661334739555868e-05, |
|
"loss": 0.6755, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4569783793285289e-05, |
|
"loss": 0.6706, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4478232847014708e-05, |
|
"loss": 0.6699, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4386681900744125e-05, |
|
"loss": 0.6771, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4295130954473546e-05, |
|
"loss": 0.6832, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4203580008202966e-05, |
|
"loss": 0.6803, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4112029061932385e-05, |
|
"loss": 0.6646, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4020478115661804e-05, |
|
"loss": 0.6792, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3928927169391223e-05, |
|
"loss": 0.6656, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3837376223120643e-05, |
|
"loss": 0.6719, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3745825276850064e-05, |
|
"loss": 0.6716, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3654274330579481e-05, |
|
"loss": 0.6728, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.35627233843089e-05, |
|
"loss": 0.6708, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.347117243803832e-05, |
|
"loss": 0.6755, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3379621491767741e-05, |
|
"loss": 0.6762, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3288070545497158e-05, |
|
"loss": 0.6811, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3196519599226579e-05, |
|
"loss": 0.676, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3104968652955998e-05, |
|
"loss": 0.6763, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3013417706685415e-05, |
|
"loss": 0.6701, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2921866760414836e-05, |
|
"loss": 0.6742, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2830315814144256e-05, |
|
"loss": 0.6745, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2738764867873675e-05, |
|
"loss": 0.6716, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2647213921603094e-05, |
|
"loss": 0.6737, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2555662975332513e-05, |
|
"loss": 0.6726, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2464112029061933e-05, |
|
"loss": 0.6713, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2372561082791352e-05, |
|
"loss": 0.6828, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2281010136520773e-05, |
|
"loss": 0.6741, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.218945919025019e-05, |
|
"loss": 0.6757, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.209790824397961e-05, |
|
"loss": 0.6788, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.200635729770903e-05, |
|
"loss": 0.6765, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1914806351438449e-05, |
|
"loss": 0.6758, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1823255405167869e-05, |
|
"loss": 0.6792, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1731704458897288e-05, |
|
"loss": 0.6772, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1640153512626707e-05, |
|
"loss": 0.6787, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1548602566356126e-05, |
|
"loss": 0.6822, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1457051620085546e-05, |
|
"loss": 0.6723, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1365500673814965e-05, |
|
"loss": 0.6687, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1273949727544384e-05, |
|
"loss": 0.6849, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1182398781273803e-05, |
|
"loss": 0.6772, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1090847835003224e-05, |
|
"loss": 0.6738, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.0999296888732642e-05, |
|
"loss": 0.6732, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0907745942462063e-05, |
|
"loss": 0.6776, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.081619499619148e-05, |
|
"loss": 0.6744, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.07246440499209e-05, |
|
"loss": 0.6792, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.063309310365032e-05, |
|
"loss": 0.6808, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0541542157379739e-05, |
|
"loss": 0.6777, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0449991211109159e-05, |
|
"loss": 0.683, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0358440264838578e-05, |
|
"loss": 0.6767, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0266889318567997e-05, |
|
"loss": 0.6744, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0175338372297416e-05, |
|
"loss": 0.6777, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0083787426026836e-05, |
|
"loss": 0.6794, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.992236479756255e-06, |
|
"loss": 0.6705, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.900685533485674e-06, |
|
"loss": 0.6696, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.809134587215093e-06, |
|
"loss": 0.6829, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.717583640944514e-06, |
|
"loss": 0.6702, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.626032694673932e-06, |
|
"loss": 0.6759, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.534481748403353e-06, |
|
"loss": 0.6822, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.442930802132772e-06, |
|
"loss": 0.6771, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.35137985586219e-06, |
|
"loss": 0.6752, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.25982890959161e-06, |
|
"loss": 0.6752, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.16827796332103e-06, |
|
"loss": 0.6752, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.07672701705045e-06, |
|
"loss": 0.6829, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.985176070779868e-06, |
|
"loss": 0.6732, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.893625124509287e-06, |
|
"loss": 0.683, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.802074178238706e-06, |
|
"loss": 0.6717, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.710523231968126e-06, |
|
"loss": 0.678, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.618972285697545e-06, |
|
"loss": 0.6752, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.527421339426964e-06, |
|
"loss": 0.6805, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.435870393156383e-06, |
|
"loss": 0.6821, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.344319446885804e-06, |
|
"loss": 0.6802, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.252768500615223e-06, |
|
"loss": 0.6703, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.161217554344643e-06, |
|
"loss": 0.6724, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.069666608074062e-06, |
|
"loss": 0.6758, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.978115661803481e-06, |
|
"loss": 0.6798, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.8865647155329e-06, |
|
"loss": 0.6832, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.79501376926232e-06, |
|
"loss": 0.6691, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.70346282299174e-06, |
|
"loss": 0.6848, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.611911876721157e-06, |
|
"loss": 0.6699, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.520360930450578e-06, |
|
"loss": 0.682, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.428809984179996e-06, |
|
"loss": 0.6789, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.3372590379094165e-06, |
|
"loss": 0.6749, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.245708091638835e-06, |
|
"loss": 0.6749, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.154157145368255e-06, |
|
"loss": 0.6771, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.062606199097674e-06, |
|
"loss": 0.6848, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.971055252827094e-06, |
|
"loss": 0.677, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.879504306556513e-06, |
|
"loss": 0.6817, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.787953360285932e-06, |
|
"loss": 0.6776, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.696402414015351e-06, |
|
"loss": 0.6794, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.604851467744772e-06, |
|
"loss": 0.6825, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.51330052147419e-06, |
|
"loss": 0.6717, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.4217495752036104e-06, |
|
"loss": 0.6819, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.330198628933029e-06, |
|
"loss": 0.674, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.238647682662448e-06, |
|
"loss": 0.6757, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.147096736391868e-06, |
|
"loss": 0.6771, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.055545790121287e-06, |
|
"loss": 0.6848, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.9639948438507066e-06, |
|
"loss": 0.6724, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.872443897580126e-06, |
|
"loss": 0.6797, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.780892951309545e-06, |
|
"loss": 0.676, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.689342005038965e-06, |
|
"loss": 0.6777, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.597791058768383e-06, |
|
"loss": 0.6707, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.506240112497803e-06, |
|
"loss": 0.6837, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.414689166227222e-06, |
|
"loss": 0.6732, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.323138219956641e-06, |
|
"loss": 0.6745, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.231587273686061e-06, |
|
"loss": 0.6766, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.14003632741548e-06, |
|
"loss": 0.6726, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.0484853811449e-06, |
|
"loss": 0.6821, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.956934434874319e-06, |
|
"loss": 0.6708, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.865383488603738e-06, |
|
"loss": 0.6837, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.773832542333158e-06, |
|
"loss": 0.6752, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.682281596062577e-06, |
|
"loss": 0.6718, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.590730649791997e-06, |
|
"loss": 0.6812, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.499179703521416e-06, |
|
"loss": 0.6803, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.407628757250835e-06, |
|
"loss": 0.6803, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.316077810980255e-06, |
|
"loss": 0.6784, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.224526864709674e-06, |
|
"loss": 0.6765, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.132975918439093e-06, |
|
"loss": 0.6792, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.0414249721685125e-06, |
|
"loss": 0.6786, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.949874025897931e-06, |
|
"loss": 0.6765, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.858323079627351e-06, |
|
"loss": 0.673, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.7667721333567704e-06, |
|
"loss": 0.6727, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6752211870861897e-06, |
|
"loss": 0.6751, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.583670240815609e-06, |
|
"loss": 0.6832, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.4921192945450287e-06, |
|
"loss": 0.6742, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.400568348274448e-06, |
|
"loss": 0.6738, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3090174020038674e-06, |
|
"loss": 0.681, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.2174664557332867e-06, |
|
"loss": 0.6724, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.125915509462706e-06, |
|
"loss": 0.6734, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.0343645631921253e-06, |
|
"loss": 0.6821, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.9428136169215446e-06, |
|
"loss": 0.6754, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.851262670650964e-06, |
|
"loss": 0.6805, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.7597117243803832e-06, |
|
"loss": 0.6777, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.668160778109803e-06, |
|
"loss": 0.6756, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.5766098318392223e-06, |
|
"loss": 0.6835, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.4850588855686416e-06, |
|
"loss": 0.675, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.3935079392980604e-06, |
|
"loss": 0.6733, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.3019569930274798e-06, |
|
"loss": 0.6761, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.2104060467568995e-06, |
|
"loss": 0.6753, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.118855100486319e-06, |
|
"loss": 0.679, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.027304154215738e-06, |
|
"loss": 0.6707, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.9357532079451574e-06, |
|
"loss": 0.6744, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.844202261674577e-06, |
|
"loss": 0.678, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.7526513154039962e-06, |
|
"loss": 0.6655, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6611003691334153e-06, |
|
"loss": 0.6812, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.5695494228628347e-06, |
|
"loss": 0.6797, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4779984765922542e-06, |
|
"loss": 0.6727, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3864475303216735e-06, |
|
"loss": 0.6738, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.2948965840510928e-06, |
|
"loss": 0.6799, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.203345637780512e-06, |
|
"loss": 0.6779, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1117946915099314e-06, |
|
"loss": 0.68, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.020243745239351e-06, |
|
"loss": 0.6783, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.286927989687702e-07, |
|
"loss": 0.6851, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.371418526981894e-07, |
|
"loss": 0.6734, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.455909064276089e-07, |
|
"loss": 0.6796, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.540399601570283e-07, |
|
"loss": 0.6754, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.624890138864476e-07, |
|
"loss": 0.6813, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.7093806761586695e-07, |
|
"loss": 0.6758, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.7938712134528625e-07, |
|
"loss": 0.6788, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.8783617507470556e-07, |
|
"loss": 0.6753, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.962852288041249e-07, |
|
"loss": 0.6819, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.0473428253354427e-07, |
|
"loss": 0.675, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.3183336262963613e-08, |
|
"loss": 0.6794, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 273072, |
|
"total_flos": 4122145169772675840, |
|
"train_runtime": 309389.2046, |
|
"train_samples_per_second": 0.883 |
|
} |
|
], |
|
"max_steps": 273072, |
|
"num_train_epochs": 3, |
|
"total_flos": 4122145169772675840, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|