|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 131.21924548933845, |
|
"global_step": 480000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 5e-05, |
|
"loss": 3.4081, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.999316473000684e-05, |
|
"loss": 3.3326, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.998632946001368e-05, |
|
"loss": 3.3069, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.997949419002051e-05, |
|
"loss": 3.2839, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.997265892002734e-05, |
|
"loss": 3.2691, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.9965823650034176e-05, |
|
"loss": 3.2595, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.995898838004102e-05, |
|
"loss": 3.2534, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.995215311004785e-05, |
|
"loss": 3.2124, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.994531784005468e-05, |
|
"loss": 3.1981, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.9938482570061516e-05, |
|
"loss": 3.193, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.993164730006836e-05, |
|
"loss": 3.1885, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.992481203007519e-05, |
|
"loss": 3.1881, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.991797676008203e-05, |
|
"loss": 3.192, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.991114149008886e-05, |
|
"loss": 3.181, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.9904306220095696e-05, |
|
"loss": 3.1629, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.989747095010253e-05, |
|
"loss": 3.139, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.9890635680109366e-05, |
|
"loss": 3.1355, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.98838004101162e-05, |
|
"loss": 3.1403, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.9876965140123035e-05, |
|
"loss": 3.1403, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.987012987012987e-05, |
|
"loss": 3.1366, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.9863294600136705e-05, |
|
"loss": 3.1378, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 4.985645933014355e-05, |
|
"loss": 3.132, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.984962406015038e-05, |
|
"loss": 3.0939, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 4.9842788790157216e-05, |
|
"loss": 3.0984, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.9835953520164044e-05, |
|
"loss": 3.0947, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.9829118250170886e-05, |
|
"loss": 3.099, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 4.982228298017772e-05, |
|
"loss": 3.1002, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 4.9815447710184555e-05, |
|
"loss": 3.0929, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.980861244019139e-05, |
|
"loss": 3.1047, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 4.9801777170198225e-05, |
|
"loss": 3.0691, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 4.979494190020506e-05, |
|
"loss": 3.0629, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 4.9788106630211895e-05, |
|
"loss": 3.0655, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 4.978127136021873e-05, |
|
"loss": 3.0679, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 4.977443609022557e-05, |
|
"loss": 3.063, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 4.97676008202324e-05, |
|
"loss": 3.065, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 4.9760765550239234e-05, |
|
"loss": 3.0706, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 4.975393028024607e-05, |
|
"loss": 3.0469, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 4.974709501025291e-05, |
|
"loss": 3.0315, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 4.9740259740259745e-05, |
|
"loss": 3.0287, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 4.973342447026658e-05, |
|
"loss": 3.0319, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 4.9726589200273414e-05, |
|
"loss": 3.0376, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 4.971975393028025e-05, |
|
"loss": 3.0405, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 4.9712918660287084e-05, |
|
"loss": 3.0393, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 4.970608339029392e-05, |
|
"loss": 3.0358, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 4.9699248120300754e-05, |
|
"loss": 3.0, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 4.969241285030759e-05, |
|
"loss": 3.0069, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 4.968557758031442e-05, |
|
"loss": 3.0053, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 4.967874231032126e-05, |
|
"loss": 3.0082, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 4.96719070403281e-05, |
|
"loss": 3.0102, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 4.9665071770334934e-05, |
|
"loss": 3.017, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 4.965823650034176e-05, |
|
"loss": 3.0128, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 4.96514012303486e-05, |
|
"loss": 2.9722, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 4.964456596035544e-05, |
|
"loss": 2.9764, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 4.9637730690362274e-05, |
|
"loss": 2.9836, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 4.963089542036911e-05, |
|
"loss": 2.9837, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 4.9624060150375936e-05, |
|
"loss": 2.9803, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 4.961722488038278e-05, |
|
"loss": 2.9923, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 4.961038961038961e-05, |
|
"loss": 2.9973, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 4.960355434039645e-05, |
|
"loss": 2.9631, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 4.959671907040329e-05, |
|
"loss": 2.9481, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 4.958988380041012e-05, |
|
"loss": 2.9594, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 4.958304853041695e-05, |
|
"loss": 2.9552, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 4.957621326042379e-05, |
|
"loss": 2.9616, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 4.956937799043063e-05, |
|
"loss": 2.9692, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 4.956254272043746e-05, |
|
"loss": 2.9657, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 4.955570745044429e-05, |
|
"loss": 2.9617, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 4.9548872180451126e-05, |
|
"loss": 2.9246, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 4.954203691045797e-05, |
|
"loss": 2.9305, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 4.95352016404648e-05, |
|
"loss": 2.9364, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 4.952836637047164e-05, |
|
"loss": 2.9419, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 4.952153110047847e-05, |
|
"loss": 2.9498, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 4.951469583048531e-05, |
|
"loss": 2.9423, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 4.950786056049214e-05, |
|
"loss": 2.9388, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 4.9501025290498976e-05, |
|
"loss": 2.908, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 4.949419002050582e-05, |
|
"loss": 2.9081, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 10.39, |
|
"learning_rate": 4.9487354750512646e-05, |
|
"loss": 2.9072, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"learning_rate": 4.948051948051948e-05, |
|
"loss": 2.9194, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 4.9473684210526315e-05, |
|
"loss": 2.9226, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 4.946684894053316e-05, |
|
"loss": 2.9242, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"learning_rate": 4.946001367053999e-05, |
|
"loss": 2.9218, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"learning_rate": 4.9453178400546827e-05, |
|
"loss": 2.9096, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 4.9446343130553655e-05, |
|
"loss": 2.8805, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 11.34, |
|
"learning_rate": 4.9439507860560496e-05, |
|
"loss": 2.8983, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"learning_rate": 4.943267259056733e-05, |
|
"loss": 2.8895, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"learning_rate": 4.9425837320574166e-05, |
|
"loss": 2.902, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"learning_rate": 4.9419002050581e-05, |
|
"loss": 2.9048, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 11.89, |
|
"learning_rate": 4.9412166780587835e-05, |
|
"loss": 2.901, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"learning_rate": 4.940533151059467e-05, |
|
"loss": 2.8931, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 12.17, |
|
"learning_rate": 4.9398496240601505e-05, |
|
"loss": 2.8609, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 12.3, |
|
"learning_rate": 4.939166097060834e-05, |
|
"loss": 2.8731, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 12.44, |
|
"learning_rate": 4.938482570061518e-05, |
|
"loss": 2.8695, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 12.58, |
|
"learning_rate": 4.937799043062201e-05, |
|
"loss": 2.8803, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 12.71, |
|
"learning_rate": 4.9371155160628844e-05, |
|
"loss": 2.8834, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 12.85, |
|
"learning_rate": 4.9364319890635686e-05, |
|
"loss": 2.8884, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"learning_rate": 4.935748462064252e-05, |
|
"loss": 2.8881, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"learning_rate": 4.9350649350649355e-05, |
|
"loss": 2.8503, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 13.26, |
|
"learning_rate": 4.934381408065618e-05, |
|
"loss": 2.85, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"learning_rate": 4.9336978810663025e-05, |
|
"loss": 2.8592, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 13.53, |
|
"learning_rate": 4.933014354066986e-05, |
|
"loss": 2.8528, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 13.67, |
|
"learning_rate": 4.9323308270676694e-05, |
|
"loss": 2.864, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 13.81, |
|
"learning_rate": 4.931647300068353e-05, |
|
"loss": 2.8593, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 13.94, |
|
"learning_rate": 4.9309637730690364e-05, |
|
"loss": 2.8678, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 14.08, |
|
"learning_rate": 4.93028024606972e-05, |
|
"loss": 2.846, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 14.22, |
|
"learning_rate": 4.9295967190704034e-05, |
|
"loss": 2.827, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 14.35, |
|
"learning_rate": 4.928913192071087e-05, |
|
"loss": 2.8338, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 14.49, |
|
"learning_rate": 4.928229665071771e-05, |
|
"loss": 2.835, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 14.63, |
|
"learning_rate": 4.927546138072454e-05, |
|
"loss": 2.8436, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 14.76, |
|
"learning_rate": 4.926862611073137e-05, |
|
"loss": 2.8503, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 14.9, |
|
"learning_rate": 4.926179084073821e-05, |
|
"loss": 2.8494, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 15.04, |
|
"learning_rate": 4.925495557074505e-05, |
|
"loss": 2.8412, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 15.17, |
|
"learning_rate": 4.9248120300751884e-05, |
|
"loss": 2.8029, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"learning_rate": 4.924128503075872e-05, |
|
"loss": 2.8143, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 15.45, |
|
"learning_rate": 4.9234449760765553e-05, |
|
"loss": 2.8192, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 15.58, |
|
"learning_rate": 4.922761449077239e-05, |
|
"loss": 2.827, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 15.72, |
|
"learning_rate": 4.922077922077922e-05, |
|
"loss": 2.8304, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 15.86, |
|
"learning_rate": 4.921394395078606e-05, |
|
"loss": 2.836, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 15.99, |
|
"learning_rate": 4.920710868079289e-05, |
|
"loss": 2.8319, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 16.13, |
|
"learning_rate": 4.920027341079973e-05, |
|
"loss": 2.7878, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 16.27, |
|
"learning_rate": 4.919343814080656e-05, |
|
"loss": 2.7968, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 16.4, |
|
"learning_rate": 4.91866028708134e-05, |
|
"loss": 2.8051, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 16.54, |
|
"learning_rate": 4.917976760082024e-05, |
|
"loss": 2.806, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 16.68, |
|
"learning_rate": 4.9172932330827073e-05, |
|
"loss": 2.8105, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 16.81, |
|
"learning_rate": 4.91660970608339e-05, |
|
"loss": 2.8127, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 16.95, |
|
"learning_rate": 4.9159261790840736e-05, |
|
"loss": 2.8156, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 17.09, |
|
"learning_rate": 4.915242652084758e-05, |
|
"loss": 2.7902, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 17.22, |
|
"learning_rate": 4.914559125085441e-05, |
|
"loss": 2.7727, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 17.36, |
|
"learning_rate": 4.913875598086125e-05, |
|
"loss": 2.7824, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 4.913192071086808e-05, |
|
"loss": 2.7957, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 17.63, |
|
"learning_rate": 4.912508544087492e-05, |
|
"loss": 2.7884, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 17.77, |
|
"learning_rate": 4.911825017088175e-05, |
|
"loss": 2.7978, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 17.91, |
|
"learning_rate": 4.9111414900888587e-05, |
|
"loss": 2.7943, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 18.04, |
|
"learning_rate": 4.910457963089543e-05, |
|
"loss": 2.7887, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 18.18, |
|
"learning_rate": 4.9097744360902256e-05, |
|
"loss": 2.7575, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 18.32, |
|
"learning_rate": 4.909090909090909e-05, |
|
"loss": 2.7736, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 18.45, |
|
"learning_rate": 4.9084073820915926e-05, |
|
"loss": 2.7681, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 18.59, |
|
"learning_rate": 4.907723855092277e-05, |
|
"loss": 2.7729, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 18.73, |
|
"learning_rate": 4.90704032809296e-05, |
|
"loss": 2.7782, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 18.86, |
|
"learning_rate": 4.906356801093644e-05, |
|
"loss": 2.7764, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"learning_rate": 4.9056732740943265e-05, |
|
"loss": 2.7843, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 19.14, |
|
"learning_rate": 4.9049897470950106e-05, |
|
"loss": 2.7397, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 19.27, |
|
"learning_rate": 4.904306220095694e-05, |
|
"loss": 2.7439, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 19.41, |
|
"learning_rate": 4.9036226930963776e-05, |
|
"loss": 2.7504, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 19.55, |
|
"learning_rate": 4.902939166097061e-05, |
|
"loss": 2.7516, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 19.68, |
|
"learning_rate": 4.9022556390977446e-05, |
|
"loss": 2.7612, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 19.82, |
|
"learning_rate": 4.901572112098428e-05, |
|
"loss": 2.7767, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 19.96, |
|
"learning_rate": 4.9008885850991115e-05, |
|
"loss": 2.7686, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 20.09, |
|
"learning_rate": 4.900205058099795e-05, |
|
"loss": 2.7407, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 20.23, |
|
"learning_rate": 4.899521531100479e-05, |
|
"loss": 2.7337, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 20.37, |
|
"learning_rate": 4.898838004101162e-05, |
|
"loss": 2.7353, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 20.5, |
|
"learning_rate": 4.8981544771018454e-05, |
|
"loss": 2.7383, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 20.64, |
|
"learning_rate": 4.8974709501025296e-05, |
|
"loss": 2.7409, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 20.78, |
|
"learning_rate": 4.896787423103213e-05, |
|
"loss": 2.7497, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 20.91, |
|
"learning_rate": 4.8961038961038966e-05, |
|
"loss": 2.7512, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 21.05, |
|
"learning_rate": 4.8954203691045794e-05, |
|
"loss": 2.7381, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 21.19, |
|
"learning_rate": 4.8947368421052635e-05, |
|
"loss": 2.7097, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 21.32, |
|
"learning_rate": 4.894053315105947e-05, |
|
"loss": 2.7176, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 21.46, |
|
"learning_rate": 4.8933697881066305e-05, |
|
"loss": 2.7187, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 21.6, |
|
"learning_rate": 4.892686261107314e-05, |
|
"loss": 2.7352, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 21.73, |
|
"learning_rate": 4.8920027341079974e-05, |
|
"loss": 2.7303, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 21.87, |
|
"learning_rate": 4.891319207108681e-05, |
|
"loss": 2.7415, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 22.01, |
|
"learning_rate": 4.8906356801093644e-05, |
|
"loss": 2.7364, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 22.14, |
|
"learning_rate": 4.889952153110048e-05, |
|
"loss": 2.6864, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 22.28, |
|
"learning_rate": 4.889268626110732e-05, |
|
"loss": 2.7004, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 22.42, |
|
"learning_rate": 4.888585099111415e-05, |
|
"loss": 2.711, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 22.55, |
|
"learning_rate": 4.887901572112098e-05, |
|
"loss": 2.715, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 22.69, |
|
"learning_rate": 4.887218045112782e-05, |
|
"loss": 2.7135, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 22.83, |
|
"learning_rate": 4.886534518113466e-05, |
|
"loss": 2.725, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 22.96, |
|
"learning_rate": 4.8858509911141494e-05, |
|
"loss": 2.7299, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 23.1, |
|
"learning_rate": 4.885167464114833e-05, |
|
"loss": 2.693, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 23.24, |
|
"learning_rate": 4.8844839371155164e-05, |
|
"loss": 2.6783, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 23.37, |
|
"learning_rate": 4.8838004101162e-05, |
|
"loss": 2.6938, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 23.51, |
|
"learning_rate": 4.8831168831168833e-05, |
|
"loss": 2.6966, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 23.65, |
|
"learning_rate": 4.882433356117567e-05, |
|
"loss": 2.693, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 23.78, |
|
"learning_rate": 4.88174982911825e-05, |
|
"loss": 2.7088, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 23.92, |
|
"learning_rate": 4.881066302118934e-05, |
|
"loss": 2.7103, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 24.06, |
|
"learning_rate": 4.880382775119617e-05, |
|
"loss": 2.6963, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 24.19, |
|
"learning_rate": 4.879699248120301e-05, |
|
"loss": 2.6712, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 24.33, |
|
"learning_rate": 4.879015721120985e-05, |
|
"loss": 2.6773, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 24.47, |
|
"learning_rate": 4.8783321941216684e-05, |
|
"loss": 2.6801, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 24.6, |
|
"learning_rate": 4.877648667122351e-05, |
|
"loss": 2.6889, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 24.74, |
|
"learning_rate": 4.8769651401230347e-05, |
|
"loss": 2.6861, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 24.88, |
|
"learning_rate": 4.876281613123719e-05, |
|
"loss": 2.693, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 25.01, |
|
"learning_rate": 4.875598086124402e-05, |
|
"loss": 2.6889, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 25.15, |
|
"learning_rate": 4.874914559125086e-05, |
|
"loss": 2.6434, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 25.29, |
|
"learning_rate": 4.874231032125769e-05, |
|
"loss": 2.6558, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 25.42, |
|
"learning_rate": 4.873547505126453e-05, |
|
"loss": 2.6626, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 25.56, |
|
"learning_rate": 4.872863978127136e-05, |
|
"loss": 2.6769, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 25.7, |
|
"learning_rate": 4.87218045112782e-05, |
|
"loss": 2.6742, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 25.83, |
|
"learning_rate": 4.871496924128504e-05, |
|
"loss": 2.6876, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 25.97, |
|
"learning_rate": 4.8708133971291866e-05, |
|
"loss": 2.6842, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 26.11, |
|
"learning_rate": 4.87012987012987e-05, |
|
"loss": 2.6463, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 26.24, |
|
"learning_rate": 4.8694463431305536e-05, |
|
"loss": 2.6453, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 26.38, |
|
"learning_rate": 4.868762816131238e-05, |
|
"loss": 2.6473, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 26.52, |
|
"learning_rate": 4.868079289131921e-05, |
|
"loss": 2.657, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 26.65, |
|
"learning_rate": 4.867395762132605e-05, |
|
"loss": 2.6645, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 26.79, |
|
"learning_rate": 4.8667122351332875e-05, |
|
"loss": 2.6612, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 26.93, |
|
"learning_rate": 4.866028708133972e-05, |
|
"loss": 2.6681, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 27.06, |
|
"learning_rate": 4.865345181134655e-05, |
|
"loss": 2.6451, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 27.2, |
|
"learning_rate": 4.8646616541353386e-05, |
|
"loss": 2.6271, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 27.34, |
|
"learning_rate": 4.863978127136022e-05, |
|
"loss": 2.6303, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 27.47, |
|
"learning_rate": 4.8632946001367056e-05, |
|
"loss": 2.6439, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 27.61, |
|
"learning_rate": 4.862611073137389e-05, |
|
"loss": 2.6458, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 27.75, |
|
"learning_rate": 4.8619275461380726e-05, |
|
"loss": 2.6524, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 27.88, |
|
"learning_rate": 4.861244019138757e-05, |
|
"loss": 2.6561, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 28.02, |
|
"learning_rate": 4.86056049213944e-05, |
|
"loss": 2.6481, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 28.16, |
|
"learning_rate": 4.859876965140123e-05, |
|
"loss": 2.61, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 28.29, |
|
"learning_rate": 4.8591934381408065e-05, |
|
"loss": 2.6118, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 28.43, |
|
"learning_rate": 4.8585099111414906e-05, |
|
"loss": 2.6284, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 28.57, |
|
"learning_rate": 4.857826384142174e-05, |
|
"loss": 2.6274, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 28.7, |
|
"learning_rate": 4.8571428571428576e-05, |
|
"loss": 2.6392, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 28.84, |
|
"learning_rate": 4.8564593301435404e-05, |
|
"loss": 2.644, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 28.98, |
|
"learning_rate": 4.8557758031442245e-05, |
|
"loss": 2.6482, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 29.11, |
|
"learning_rate": 4.855092276144908e-05, |
|
"loss": 2.6076, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 29.25, |
|
"learning_rate": 4.8544087491455915e-05, |
|
"loss": 2.5969, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 29.39, |
|
"learning_rate": 4.853725222146275e-05, |
|
"loss": 2.6128, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 29.52, |
|
"learning_rate": 4.8530416951469585e-05, |
|
"loss": 2.62, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 29.66, |
|
"learning_rate": 4.852358168147642e-05, |
|
"loss": 2.6232, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 29.8, |
|
"learning_rate": 4.8516746411483254e-05, |
|
"loss": 2.6232, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 29.93, |
|
"learning_rate": 4.850991114149009e-05, |
|
"loss": 2.633, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 30.07, |
|
"learning_rate": 4.850307587149693e-05, |
|
"loss": 2.6035, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 30.21, |
|
"learning_rate": 4.849624060150376e-05, |
|
"loss": 2.5874, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 30.34, |
|
"learning_rate": 4.8489405331510593e-05, |
|
"loss": 2.5952, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 30.48, |
|
"learning_rate": 4.8482570061517435e-05, |
|
"loss": 2.6043, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 30.62, |
|
"learning_rate": 4.847573479152427e-05, |
|
"loss": 2.6067, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 30.75, |
|
"learning_rate": 4.8468899521531105e-05, |
|
"loss": 2.6119, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 30.89, |
|
"learning_rate": 4.846206425153794e-05, |
|
"loss": 2.6214, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 31.03, |
|
"learning_rate": 4.8455228981544774e-05, |
|
"loss": 2.6113, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 31.16, |
|
"learning_rate": 4.844839371155161e-05, |
|
"loss": 2.5724, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 31.3, |
|
"learning_rate": 4.8441558441558444e-05, |
|
"loss": 2.5825, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 31.44, |
|
"learning_rate": 4.843472317156528e-05, |
|
"loss": 2.5849, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 31.57, |
|
"learning_rate": 4.842788790157211e-05, |
|
"loss": 2.5958, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 31.71, |
|
"learning_rate": 4.842105263157895e-05, |
|
"loss": 2.5992, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 31.85, |
|
"learning_rate": 4.841421736158578e-05, |
|
"loss": 2.6051, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 31.98, |
|
"learning_rate": 4.840738209159262e-05, |
|
"loss": 2.6078, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 32.12, |
|
"learning_rate": 4.840054682159946e-05, |
|
"loss": 2.56, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 32.26, |
|
"learning_rate": 4.8393711551606294e-05, |
|
"loss": 2.5672, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 32.39, |
|
"learning_rate": 4.838687628161312e-05, |
|
"loss": 2.5767, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 32.53, |
|
"learning_rate": 4.838004101161996e-05, |
|
"loss": 2.5798, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 32.67, |
|
"learning_rate": 4.83732057416268e-05, |
|
"loss": 2.5858, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 32.8, |
|
"learning_rate": 4.836637047163363e-05, |
|
"loss": 2.5966, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 32.94, |
|
"learning_rate": 4.835953520164047e-05, |
|
"loss": 2.5949, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 33.08, |
|
"learning_rate": 4.83526999316473e-05, |
|
"loss": 2.5606, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 33.21, |
|
"learning_rate": 4.834586466165414e-05, |
|
"loss": 2.5511, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 33.35, |
|
"learning_rate": 4.833902939166097e-05, |
|
"loss": 2.5657, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 33.49, |
|
"learning_rate": 4.833219412166781e-05, |
|
"loss": 2.5705, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 33.62, |
|
"learning_rate": 4.832535885167465e-05, |
|
"loss": 2.5715, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 33.76, |
|
"learning_rate": 4.831852358168148e-05, |
|
"loss": 2.5787, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 33.9, |
|
"learning_rate": 4.831168831168831e-05, |
|
"loss": 2.5778, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 34.03, |
|
"learning_rate": 4.8304853041695146e-05, |
|
"loss": 2.573, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 34.17, |
|
"learning_rate": 4.829801777170199e-05, |
|
"loss": 2.5399, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 34.31, |
|
"learning_rate": 4.829118250170882e-05, |
|
"loss": 2.5387, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 34.45, |
|
"learning_rate": 4.828434723171566e-05, |
|
"loss": 2.5553, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 34.58, |
|
"learning_rate": 4.8277511961722486e-05, |
|
"loss": 2.5587, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 34.72, |
|
"learning_rate": 4.827067669172933e-05, |
|
"loss": 2.5641, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 34.86, |
|
"learning_rate": 4.826384142173616e-05, |
|
"loss": 2.5707, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 34.99, |
|
"learning_rate": 4.8257006151743e-05, |
|
"loss": 2.5761, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 35.13, |
|
"learning_rate": 4.825017088174983e-05, |
|
"loss": 2.5247, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 35.27, |
|
"learning_rate": 4.8243335611756666e-05, |
|
"loss": 2.5297, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 35.4, |
|
"learning_rate": 4.82365003417635e-05, |
|
"loss": 2.5373, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 35.54, |
|
"learning_rate": 4.8229665071770336e-05, |
|
"loss": 2.5436, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 35.68, |
|
"learning_rate": 4.822282980177718e-05, |
|
"loss": 2.5495, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 35.81, |
|
"learning_rate": 4.821599453178401e-05, |
|
"loss": 2.566, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 35.95, |
|
"learning_rate": 4.820915926179084e-05, |
|
"loss": 2.5613, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 36.09, |
|
"learning_rate": 4.8202323991797675e-05, |
|
"loss": 2.5275, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 36.22, |
|
"learning_rate": 4.819548872180452e-05, |
|
"loss": 2.5186, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 36.36, |
|
"learning_rate": 4.818865345181135e-05, |
|
"loss": 2.5293, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 36.5, |
|
"learning_rate": 4.8181818181818186e-05, |
|
"loss": 2.5351, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 36.63, |
|
"learning_rate": 4.8174982911825014e-05, |
|
"loss": 2.5375, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 36.77, |
|
"learning_rate": 4.8168147641831856e-05, |
|
"loss": 2.5438, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 36.91, |
|
"learning_rate": 4.816131237183869e-05, |
|
"loss": 2.5456, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 37.04, |
|
"learning_rate": 4.8154477101845525e-05, |
|
"loss": 2.5379, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 37.18, |
|
"learning_rate": 4.814764183185236e-05, |
|
"loss": 2.5043, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 37.32, |
|
"learning_rate": 4.8140806561859195e-05, |
|
"loss": 2.5121, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 37.45, |
|
"learning_rate": 4.813397129186603e-05, |
|
"loss": 2.5139, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 37.59, |
|
"learning_rate": 4.8127136021872865e-05, |
|
"loss": 2.5278, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 37.73, |
|
"learning_rate": 4.81203007518797e-05, |
|
"loss": 2.5329, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 37.86, |
|
"learning_rate": 4.811346548188654e-05, |
|
"loss": 2.5364, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 38.0, |
|
"learning_rate": 4.810663021189337e-05, |
|
"loss": 2.5463, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 38.14, |
|
"learning_rate": 4.8099794941900204e-05, |
|
"loss": 2.4857, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 38.27, |
|
"learning_rate": 4.8092959671907045e-05, |
|
"loss": 2.4959, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 38.41, |
|
"learning_rate": 4.808612440191388e-05, |
|
"loss": 2.507, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 38.55, |
|
"learning_rate": 4.8079289131920715e-05, |
|
"loss": 2.5137, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 38.68, |
|
"learning_rate": 4.807245386192755e-05, |
|
"loss": 2.5184, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 38.82, |
|
"learning_rate": 4.8065618591934384e-05, |
|
"loss": 2.5339, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 38.96, |
|
"learning_rate": 4.805878332194122e-05, |
|
"loss": 2.5319, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 39.09, |
|
"learning_rate": 4.8051948051948054e-05, |
|
"loss": 2.4922, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 39.23, |
|
"learning_rate": 4.804511278195489e-05, |
|
"loss": 2.4844, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 39.37, |
|
"learning_rate": 4.8038277511961724e-05, |
|
"loss": 2.4939, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 39.5, |
|
"learning_rate": 4.803144224196856e-05, |
|
"loss": 2.5038, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 39.64, |
|
"learning_rate": 4.802460697197539e-05, |
|
"loss": 2.5074, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 39.78, |
|
"learning_rate": 4.801777170198223e-05, |
|
"loss": 2.5117, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 39.91, |
|
"learning_rate": 4.801093643198907e-05, |
|
"loss": 2.5152, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 40.05, |
|
"learning_rate": 4.8004101161995904e-05, |
|
"loss": 2.4997, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 40.19, |
|
"learning_rate": 4.799726589200273e-05, |
|
"loss": 2.4738, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 40.32, |
|
"learning_rate": 4.799043062200957e-05, |
|
"loss": 2.4777, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 40.46, |
|
"learning_rate": 4.798359535201641e-05, |
|
"loss": 2.4953, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 40.6, |
|
"learning_rate": 4.7976760082023244e-05, |
|
"loss": 2.4929, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 40.73, |
|
"learning_rate": 4.796992481203008e-05, |
|
"loss": 2.5007, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 40.87, |
|
"learning_rate": 4.796308954203691e-05, |
|
"loss": 2.5026, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 41.01, |
|
"learning_rate": 4.795625427204375e-05, |
|
"loss": 2.5095, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 41.14, |
|
"learning_rate": 4.794941900205058e-05, |
|
"loss": 2.4572, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 41.28, |
|
"learning_rate": 4.794258373205742e-05, |
|
"loss": 2.4632, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 41.42, |
|
"learning_rate": 4.793574846206426e-05, |
|
"loss": 2.478, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 41.55, |
|
"learning_rate": 4.792891319207109e-05, |
|
"loss": 2.4912, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 41.69, |
|
"learning_rate": 4.792207792207792e-05, |
|
"loss": 2.4844, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 41.83, |
|
"learning_rate": 4.791524265208476e-05, |
|
"loss": 2.4967, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 41.96, |
|
"learning_rate": 4.79084073820916e-05, |
|
"loss": 2.4939, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 42.1, |
|
"learning_rate": 4.790157211209843e-05, |
|
"loss": 2.4654, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 42.24, |
|
"learning_rate": 4.789473684210526e-05, |
|
"loss": 2.4502, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 42.37, |
|
"learning_rate": 4.7887901572112096e-05, |
|
"loss": 2.4673, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 42.51, |
|
"learning_rate": 4.788106630211894e-05, |
|
"loss": 2.4697, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 42.65, |
|
"learning_rate": 4.787423103212577e-05, |
|
"loss": 2.4791, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 42.78, |
|
"learning_rate": 4.786739576213261e-05, |
|
"loss": 2.4783, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 42.92, |
|
"learning_rate": 4.786056049213944e-05, |
|
"loss": 2.4872, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 43.06, |
|
"learning_rate": 4.785372522214628e-05, |
|
"loss": 2.4711, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 43.19, |
|
"learning_rate": 4.784688995215311e-05, |
|
"loss": 2.443, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 43.33, |
|
"learning_rate": 4.7840054682159946e-05, |
|
"loss": 2.4447, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 43.47, |
|
"learning_rate": 4.783321941216679e-05, |
|
"loss": 2.4653, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 43.6, |
|
"learning_rate": 4.7826384142173616e-05, |
|
"loss": 2.4677, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 43.74, |
|
"learning_rate": 4.781954887218045e-05, |
|
"loss": 2.4703, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 43.88, |
|
"learning_rate": 4.7812713602187285e-05, |
|
"loss": 2.4723, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 44.01, |
|
"learning_rate": 4.780587833219413e-05, |
|
"loss": 2.4755, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 44.15, |
|
"learning_rate": 4.779904306220096e-05, |
|
"loss": 2.4305, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 44.29, |
|
"learning_rate": 4.7792207792207797e-05, |
|
"loss": 2.4368, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 44.42, |
|
"learning_rate": 4.7785372522214625e-05, |
|
"loss": 2.444, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 44.56, |
|
"learning_rate": 4.7778537252221466e-05, |
|
"loss": 2.4587, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 44.7, |
|
"learning_rate": 4.77717019822283e-05, |
|
"loss": 2.4599, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 44.83, |
|
"learning_rate": 4.7764866712235136e-05, |
|
"loss": 2.4643, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 44.97, |
|
"learning_rate": 4.775803144224197e-05, |
|
"loss": 2.4688, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 45.11, |
|
"learning_rate": 4.7751196172248805e-05, |
|
"loss": 2.4327, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 45.24, |
|
"learning_rate": 4.774436090225564e-05, |
|
"loss": 2.4213, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 45.38, |
|
"learning_rate": 4.7737525632262475e-05, |
|
"loss": 2.4372, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 45.52, |
|
"learning_rate": 4.7730690362269316e-05, |
|
"loss": 2.4385, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 45.65, |
|
"learning_rate": 4.772385509227615e-05, |
|
"loss": 2.4512, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 45.79, |
|
"learning_rate": 4.771701982228298e-05, |
|
"loss": 2.4543, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 45.93, |
|
"learning_rate": 4.7710184552289814e-05, |
|
"loss": 2.4567, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 46.06, |
|
"learning_rate": 4.7703349282296656e-05, |
|
"loss": 2.4389, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 46.2, |
|
"learning_rate": 4.769651401230349e-05, |
|
"loss": 2.4125, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 46.34, |
|
"learning_rate": 4.7689678742310325e-05, |
|
"loss": 2.4239, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 46.47, |
|
"learning_rate": 4.768284347231716e-05, |
|
"loss": 2.4283, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 46.61, |
|
"learning_rate": 4.7676008202323995e-05, |
|
"loss": 2.4372, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 46.75, |
|
"learning_rate": 4.766917293233083e-05, |
|
"loss": 2.4478, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 46.88, |
|
"learning_rate": 4.7662337662337664e-05, |
|
"loss": 2.445, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 47.02, |
|
"learning_rate": 4.76555023923445e-05, |
|
"loss": 2.4429, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 47.16, |
|
"learning_rate": 4.7648667122351334e-05, |
|
"loss": 2.397, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 47.29, |
|
"learning_rate": 4.764183185235817e-05, |
|
"loss": 2.4144, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 47.43, |
|
"learning_rate": 4.7634996582365004e-05, |
|
"loss": 2.4176, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 47.57, |
|
"learning_rate": 4.762816131237184e-05, |
|
"loss": 2.4262, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 47.7, |
|
"learning_rate": 4.762132604237868e-05, |
|
"loss": 2.4353, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 47.84, |
|
"learning_rate": 4.7614490772385515e-05, |
|
"loss": 2.4362, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 47.98, |
|
"learning_rate": 4.760765550239234e-05, |
|
"loss": 2.4417, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 48.11, |
|
"learning_rate": 4.7600820232399184e-05, |
|
"loss": 2.3966, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 48.25, |
|
"learning_rate": 4.759398496240602e-05, |
|
"loss": 2.3993, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 48.39, |
|
"learning_rate": 4.7587149692412854e-05, |
|
"loss": 2.4065, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 48.52, |
|
"learning_rate": 4.758031442241969e-05, |
|
"loss": 2.414, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 48.66, |
|
"learning_rate": 4.7573479152426524e-05, |
|
"loss": 2.4234, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 48.8, |
|
"learning_rate": 4.756664388243336e-05, |
|
"loss": 2.4262, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 48.93, |
|
"learning_rate": 4.755980861244019e-05, |
|
"loss": 2.4329, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 49.07, |
|
"learning_rate": 4.755297334244703e-05, |
|
"loss": 2.4114, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 49.21, |
|
"learning_rate": 4.754613807245387e-05, |
|
"loss": 2.3889, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 49.34, |
|
"learning_rate": 4.75393028024607e-05, |
|
"loss": 0.0066, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 49.48, |
|
"learning_rate": 4.753246753246753e-05, |
|
"loss": 2.4065, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 49.62, |
|
"learning_rate": 4.752563226247437e-05, |
|
"loss": 2.4052, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 49.75, |
|
"learning_rate": 4.751879699248121e-05, |
|
"loss": 2.4143, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 49.89, |
|
"learning_rate": 4.7511961722488043e-05, |
|
"loss": 2.4229, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 50.03, |
|
"learning_rate": 4.750512645249487e-05, |
|
"loss": 2.4113, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 50.16, |
|
"learning_rate": 4.7498291182501706e-05, |
|
"loss": 2.3709, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 50.3, |
|
"learning_rate": 4.749145591250855e-05, |
|
"loss": 2.3825, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 50.44, |
|
"learning_rate": 4.748462064251538e-05, |
|
"loss": 2.3969, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 50.57, |
|
"learning_rate": 4.747778537252222e-05, |
|
"loss": 2.4013, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 50.71, |
|
"learning_rate": 4.747095010252905e-05, |
|
"loss": 2.4049, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 50.85, |
|
"learning_rate": 4.746411483253589e-05, |
|
"loss": 2.4081, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 50.98, |
|
"learning_rate": 4.745727956254272e-05, |
|
"loss": 2.416, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 51.12, |
|
"learning_rate": 4.7450444292549557e-05, |
|
"loss": 2.3682, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 51.26, |
|
"learning_rate": 4.74436090225564e-05, |
|
"loss": 2.3701, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 51.39, |
|
"learning_rate": 4.7436773752563226e-05, |
|
"loss": 2.3815, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 51.53, |
|
"learning_rate": 4.742993848257006e-05, |
|
"loss": 2.3927, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 51.67, |
|
"learning_rate": 4.7423103212576896e-05, |
|
"loss": 2.391, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 51.8, |
|
"learning_rate": 4.741626794258374e-05, |
|
"loss": 2.4008, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 51.94, |
|
"learning_rate": 4.740943267259057e-05, |
|
"loss": 2.4082, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 52.08, |
|
"learning_rate": 4.740259740259741e-05, |
|
"loss": 2.3833, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 52.21, |
|
"learning_rate": 4.7395762132604235e-05, |
|
"loss": 2.3605, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 52.35, |
|
"learning_rate": 4.7388926862611076e-05, |
|
"loss": 2.3671, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 52.49, |
|
"learning_rate": 4.738209159261791e-05, |
|
"loss": 2.3795, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 52.62, |
|
"learning_rate": 4.7375256322624746e-05, |
|
"loss": 2.3847, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 52.76, |
|
"learning_rate": 4.736842105263158e-05, |
|
"loss": 2.3837, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 52.9, |
|
"learning_rate": 4.7361585782638416e-05, |
|
"loss": 2.402, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 53.03, |
|
"learning_rate": 4.735475051264525e-05, |
|
"loss": 2.3843, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 53.17, |
|
"learning_rate": 4.7347915242652085e-05, |
|
"loss": 2.3485, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 53.31, |
|
"learning_rate": 4.734107997265893e-05, |
|
"loss": 2.359, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 53.44, |
|
"learning_rate": 4.733424470266576e-05, |
|
"loss": 2.3675, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 53.58, |
|
"learning_rate": 4.732740943267259e-05, |
|
"loss": 2.3771, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 53.72, |
|
"learning_rate": 4.7320574162679424e-05, |
|
"loss": 2.3825, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 53.85, |
|
"learning_rate": 4.7313738892686266e-05, |
|
"loss": 2.3819, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 53.99, |
|
"learning_rate": 4.73069036226931e-05, |
|
"loss": 2.3902, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 54.13, |
|
"learning_rate": 4.7300068352699936e-05, |
|
"loss": 2.3445, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 54.26, |
|
"learning_rate": 4.729323308270677e-05, |
|
"loss": 2.3435, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 54.4, |
|
"learning_rate": 4.7286397812713605e-05, |
|
"loss": 2.3632, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 54.54, |
|
"learning_rate": 4.727956254272044e-05, |
|
"loss": 2.3605, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 54.67, |
|
"learning_rate": 4.7272727272727275e-05, |
|
"loss": 2.3713, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 54.81, |
|
"learning_rate": 4.726589200273411e-05, |
|
"loss": 2.3734, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 54.95, |
|
"learning_rate": 4.7259056732740944e-05, |
|
"loss": 2.3805, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 55.08, |
|
"learning_rate": 4.725222146274778e-05, |
|
"loss": 2.3473, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 55.22, |
|
"learning_rate": 4.7245386192754614e-05, |
|
"loss": 2.3306, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 55.36, |
|
"learning_rate": 4.723855092276145e-05, |
|
"loss": 2.348, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 55.49, |
|
"learning_rate": 4.723171565276829e-05, |
|
"loss": 2.3537, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 55.63, |
|
"learning_rate": 4.7224880382775125e-05, |
|
"loss": 2.3617, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 55.77, |
|
"learning_rate": 4.721804511278195e-05, |
|
"loss": 2.3665, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 55.9, |
|
"learning_rate": 4.7211209842788795e-05, |
|
"loss": 2.3715, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 56.04, |
|
"learning_rate": 4.720437457279563e-05, |
|
"loss": 2.3597, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 56.18, |
|
"learning_rate": 4.7197539302802464e-05, |
|
"loss": 2.3231, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 56.31, |
|
"learning_rate": 4.71907040328093e-05, |
|
"loss": 2.335, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 56.45, |
|
"learning_rate": 4.7183868762816134e-05, |
|
"loss": 2.3418, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 56.59, |
|
"learning_rate": 4.717703349282297e-05, |
|
"loss": 2.3458, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 56.72, |
|
"learning_rate": 4.7170198222829803e-05, |
|
"loss": 2.3612, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 56.86, |
|
"learning_rate": 4.716336295283664e-05, |
|
"loss": 2.3632, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 57.0, |
|
"learning_rate": 4.715652768284348e-05, |
|
"loss": 2.3657, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 57.14, |
|
"learning_rate": 4.714969241285031e-05, |
|
"loss": 2.3137, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 57.27, |
|
"learning_rate": 4.714285714285714e-05, |
|
"loss": 2.3256, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 57.41, |
|
"learning_rate": 4.713602187286398e-05, |
|
"loss": 2.33, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 57.55, |
|
"learning_rate": 4.712918660287082e-05, |
|
"loss": 2.3427, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 57.68, |
|
"learning_rate": 4.7122351332877654e-05, |
|
"loss": 2.346, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 57.82, |
|
"learning_rate": 4.711551606288448e-05, |
|
"loss": 2.3537, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 57.96, |
|
"learning_rate": 4.7108680792891317e-05, |
|
"loss": 2.3487, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 58.09, |
|
"learning_rate": 4.710184552289816e-05, |
|
"loss": 2.3228, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 58.23, |
|
"learning_rate": 4.709501025290499e-05, |
|
"loss": 2.3111, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 58.37, |
|
"learning_rate": 4.708817498291183e-05, |
|
"loss": 2.3207, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 58.5, |
|
"learning_rate": 4.708133971291866e-05, |
|
"loss": 2.3281, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 58.64, |
|
"learning_rate": 4.70745044429255e-05, |
|
"loss": 2.3396, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 58.78, |
|
"learning_rate": 4.706766917293233e-05, |
|
"loss": 2.3445, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 58.91, |
|
"learning_rate": 4.706083390293917e-05, |
|
"loss": 2.3502, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 59.05, |
|
"learning_rate": 4.705399863294601e-05, |
|
"loss": 2.3343, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 59.19, |
|
"learning_rate": 4.7047163362952836e-05, |
|
"loss": 2.3013, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 59.32, |
|
"learning_rate": 4.704032809295967e-05, |
|
"loss": 2.3135, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 59.46, |
|
"learning_rate": 4.7033492822966506e-05, |
|
"loss": 2.3157, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 59.6, |
|
"learning_rate": 4.702665755297335e-05, |
|
"loss": 2.329, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 59.73, |
|
"learning_rate": 4.701982228298018e-05, |
|
"loss": 2.3323, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 59.87, |
|
"learning_rate": 4.701298701298702e-05, |
|
"loss": 2.3365, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 60.01, |
|
"learning_rate": 4.7006151742993845e-05, |
|
"loss": 2.3396, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 60.14, |
|
"learning_rate": 4.699931647300069e-05, |
|
"loss": 2.2899, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 60.28, |
|
"learning_rate": 4.699248120300752e-05, |
|
"loss": 2.2987, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 60.42, |
|
"learning_rate": 4.6985645933014356e-05, |
|
"loss": 2.3154, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 60.55, |
|
"learning_rate": 4.697881066302119e-05, |
|
"loss": 2.3181, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 60.69, |
|
"learning_rate": 4.6971975393028026e-05, |
|
"loss": 2.3242, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 60.83, |
|
"learning_rate": 4.696514012303486e-05, |
|
"loss": 2.3311, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 60.96, |
|
"learning_rate": 4.6958304853041696e-05, |
|
"loss": 2.3326, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 61.1, |
|
"learning_rate": 4.695146958304854e-05, |
|
"loss": 2.2986, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 61.24, |
|
"learning_rate": 4.694463431305537e-05, |
|
"loss": 2.2872, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 61.37, |
|
"learning_rate": 4.69377990430622e-05, |
|
"loss": 2.3003, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 61.51, |
|
"learning_rate": 4.6930963773069035e-05, |
|
"loss": 2.3074, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 61.65, |
|
"learning_rate": 4.6924128503075876e-05, |
|
"loss": 2.3136, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 61.78, |
|
"learning_rate": 4.691729323308271e-05, |
|
"loss": 2.3163, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 61.92, |
|
"learning_rate": 4.6910457963089546e-05, |
|
"loss": 2.3282, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 62.06, |
|
"learning_rate": 4.690362269309638e-05, |
|
"loss": 2.309, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 62.19, |
|
"learning_rate": 4.6896787423103215e-05, |
|
"loss": 2.2813, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 62.33, |
|
"learning_rate": 4.688995215311005e-05, |
|
"loss": 2.2888, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 62.47, |
|
"learning_rate": 4.6883116883116885e-05, |
|
"loss": 2.2994, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 62.6, |
|
"learning_rate": 4.687628161312372e-05, |
|
"loss": 2.3015, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 62.74, |
|
"learning_rate": 4.6869446343130555e-05, |
|
"loss": 2.3094, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 62.88, |
|
"learning_rate": 4.686261107313739e-05, |
|
"loss": 2.3217, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 63.01, |
|
"learning_rate": 4.6855775803144224e-05, |
|
"loss": 2.3126, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 63.15, |
|
"learning_rate": 4.6848940533151066e-05, |
|
"loss": 2.2696, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 63.29, |
|
"learning_rate": 4.68421052631579e-05, |
|
"loss": 2.2826, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 63.42, |
|
"learning_rate": 4.6835269993164735e-05, |
|
"loss": 2.2899, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 63.56, |
|
"learning_rate": 4.6828434723171563e-05, |
|
"loss": 2.2887, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 63.7, |
|
"learning_rate": 4.6821599453178405e-05, |
|
"loss": 2.3065, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 63.83, |
|
"learning_rate": 4.681476418318524e-05, |
|
"loss": 2.3073, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 63.97, |
|
"learning_rate": 4.6807928913192075e-05, |
|
"loss": 2.3111, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 64.11, |
|
"learning_rate": 4.680109364319891e-05, |
|
"loss": 2.2723, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 64.24, |
|
"learning_rate": 4.6794258373205744e-05, |
|
"loss": 2.2645, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 64.38, |
|
"learning_rate": 4.678742310321258e-05, |
|
"loss": 2.2742, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 64.52, |
|
"learning_rate": 4.6780587833219414e-05, |
|
"loss": 2.2892, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 64.65, |
|
"learning_rate": 4.677375256322625e-05, |
|
"loss": 2.2938, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 64.79, |
|
"learning_rate": 4.676691729323309e-05, |
|
"loss": 2.2964, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 64.93, |
|
"learning_rate": 4.676008202323992e-05, |
|
"loss": 2.3037, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 65.06, |
|
"learning_rate": 4.675324675324675e-05, |
|
"loss": 2.2862, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 65.2, |
|
"learning_rate": 4.674641148325359e-05, |
|
"loss": 2.2633, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 65.34, |
|
"learning_rate": 4.673957621326043e-05, |
|
"loss": 2.2678, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 65.47, |
|
"learning_rate": 4.6732740943267264e-05, |
|
"loss": 2.2798, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 65.61, |
|
"learning_rate": 4.672590567327409e-05, |
|
"loss": 2.2782, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 65.75, |
|
"learning_rate": 4.6719070403280934e-05, |
|
"loss": 2.288, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 65.88, |
|
"learning_rate": 4.671223513328777e-05, |
|
"loss": 2.2962, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 66.02, |
|
"learning_rate": 4.67053998632946e-05, |
|
"loss": 2.2894, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 66.16, |
|
"learning_rate": 4.669856459330144e-05, |
|
"loss": 2.251, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 66.29, |
|
"learning_rate": 4.669172932330827e-05, |
|
"loss": 2.2583, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 66.43, |
|
"learning_rate": 4.668489405331511e-05, |
|
"loss": 2.2658, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 66.57, |
|
"learning_rate": 4.667805878332194e-05, |
|
"loss": 2.2738, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 66.7, |
|
"learning_rate": 4.667122351332878e-05, |
|
"loss": 2.2824, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 66.84, |
|
"learning_rate": 4.666438824333562e-05, |
|
"loss": 2.2872, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 66.98, |
|
"learning_rate": 4.665755297334245e-05, |
|
"loss": 2.2883, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 67.11, |
|
"learning_rate": 4.665071770334928e-05, |
|
"loss": 2.2555, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 67.25, |
|
"learning_rate": 4.6643882433356116e-05, |
|
"loss": 2.2512, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 67.39, |
|
"learning_rate": 4.663704716336296e-05, |
|
"loss": 2.2566, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 67.52, |
|
"learning_rate": 4.663021189336979e-05, |
|
"loss": 2.268, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 67.66, |
|
"learning_rate": 4.662337662337663e-05, |
|
"loss": 2.2692, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 67.8, |
|
"learning_rate": 4.6616541353383456e-05, |
|
"loss": 2.2753, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 67.93, |
|
"learning_rate": 4.66097060833903e-05, |
|
"loss": 2.2813, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 68.07, |
|
"learning_rate": 4.660287081339713e-05, |
|
"loss": 2.2562, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 68.21, |
|
"learning_rate": 4.659603554340397e-05, |
|
"loss": 2.2335, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 68.34, |
|
"learning_rate": 4.65892002734108e-05, |
|
"loss": 2.2489, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 68.48, |
|
"learning_rate": 4.6582365003417636e-05, |
|
"loss": 2.2543, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 68.62, |
|
"learning_rate": 4.657552973342447e-05, |
|
"loss": 2.2639, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 68.75, |
|
"learning_rate": 4.6568694463431306e-05, |
|
"loss": 2.2737, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 68.89, |
|
"learning_rate": 4.656185919343815e-05, |
|
"loss": 2.2805, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 69.03, |
|
"learning_rate": 4.655502392344498e-05, |
|
"loss": 2.2629, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 69.16, |
|
"learning_rate": 4.654818865345181e-05, |
|
"loss": 2.2303, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 69.3, |
|
"learning_rate": 4.6541353383458645e-05, |
|
"loss": 2.2406, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 69.44, |
|
"learning_rate": 4.653451811346549e-05, |
|
"loss": 2.2444, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 69.57, |
|
"learning_rate": 4.652768284347232e-05, |
|
"loss": 2.2566, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 69.71, |
|
"learning_rate": 4.6520847573479156e-05, |
|
"loss": 2.2571, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 69.85, |
|
"learning_rate": 4.6514012303485984e-05, |
|
"loss": 2.2681, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 69.98, |
|
"learning_rate": 4.6507177033492826e-05, |
|
"loss": 2.272, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 70.12, |
|
"learning_rate": 4.650034176349966e-05, |
|
"loss": 2.2186, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 70.26, |
|
"learning_rate": 4.6493506493506495e-05, |
|
"loss": 2.2257, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 70.39, |
|
"learning_rate": 4.648667122351333e-05, |
|
"loss": 2.2402, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 70.53, |
|
"learning_rate": 4.6479835953520165e-05, |
|
"loss": 2.2501, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 70.67, |
|
"learning_rate": 4.6473000683527e-05, |
|
"loss": 2.2504, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 70.8, |
|
"learning_rate": 4.6466165413533835e-05, |
|
"loss": 2.2584, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 70.94, |
|
"learning_rate": 4.6459330143540676e-05, |
|
"loss": 2.2668, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 71.08, |
|
"learning_rate": 4.645249487354751e-05, |
|
"loss": 2.2337, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 71.21, |
|
"learning_rate": 4.644565960355434e-05, |
|
"loss": 2.2181, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 71.35, |
|
"learning_rate": 4.6438824333561174e-05, |
|
"loss": 2.2308, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 71.49, |
|
"learning_rate": 4.6431989063568015e-05, |
|
"loss": 2.2326, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 71.62, |
|
"learning_rate": 4.642515379357485e-05, |
|
"loss": 2.245, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 71.76, |
|
"learning_rate": 4.6418318523581685e-05, |
|
"loss": 2.2485, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 71.9, |
|
"learning_rate": 4.641148325358852e-05, |
|
"loss": 2.2613, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 72.03, |
|
"learning_rate": 4.6404647983595355e-05, |
|
"loss": 2.2447, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 72.17, |
|
"learning_rate": 4.639781271360219e-05, |
|
"loss": 2.2129, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 72.31, |
|
"learning_rate": 4.6390977443609024e-05, |
|
"loss": 2.221, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 72.44, |
|
"learning_rate": 4.638414217361586e-05, |
|
"loss": 2.2273, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 72.58, |
|
"learning_rate": 4.6377306903622694e-05, |
|
"loss": 2.2365, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 72.72, |
|
"learning_rate": 4.637047163362953e-05, |
|
"loss": 2.2332, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 72.85, |
|
"learning_rate": 4.636363636363636e-05, |
|
"loss": 2.2526, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 72.99, |
|
"learning_rate": 4.63568010936432e-05, |
|
"loss": 2.2523, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 73.13, |
|
"learning_rate": 4.634996582365004e-05, |
|
"loss": 2.2042, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 73.26, |
|
"learning_rate": 4.6343130553656874e-05, |
|
"loss": 2.2098, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 73.4, |
|
"learning_rate": 4.63362952836637e-05, |
|
"loss": 2.2214, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 73.54, |
|
"learning_rate": 4.6329460013670544e-05, |
|
"loss": 2.2239, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 73.67, |
|
"learning_rate": 4.632262474367738e-05, |
|
"loss": 2.2377, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 73.81, |
|
"learning_rate": 4.6315789473684214e-05, |
|
"loss": 2.2381, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 73.95, |
|
"learning_rate": 4.630895420369105e-05, |
|
"loss": 2.2397, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 74.08, |
|
"learning_rate": 4.630211893369788e-05, |
|
"loss": 2.2159, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 74.22, |
|
"learning_rate": 4.629528366370472e-05, |
|
"loss": 2.1986, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 74.36, |
|
"learning_rate": 4.628844839371155e-05, |
|
"loss": 2.2109, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 74.49, |
|
"learning_rate": 4.628161312371839e-05, |
|
"loss": 2.2197, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 74.63, |
|
"learning_rate": 4.627477785372523e-05, |
|
"loss": 2.2228, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 74.77, |
|
"learning_rate": 4.626794258373206e-05, |
|
"loss": 2.2331, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 74.9, |
|
"learning_rate": 4.626110731373889e-05, |
|
"loss": 2.2348, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 75.04, |
|
"learning_rate": 4.625427204374573e-05, |
|
"loss": 2.2251, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 75.18, |
|
"learning_rate": 4.624743677375257e-05, |
|
"loss": 2.1874, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 75.31, |
|
"learning_rate": 4.62406015037594e-05, |
|
"loss": 2.2042, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 75.45, |
|
"learning_rate": 4.623376623376624e-05, |
|
"loss": 2.2069, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 75.59, |
|
"learning_rate": 4.6226930963773066e-05, |
|
"loss": 2.2117, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 75.72, |
|
"learning_rate": 4.622009569377991e-05, |
|
"loss": 2.2256, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 75.86, |
|
"learning_rate": 4.621326042378674e-05, |
|
"loss": 2.2325, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 76.0, |
|
"learning_rate": 4.620642515379358e-05, |
|
"loss": 2.2365, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 76.13, |
|
"learning_rate": 4.619958988380041e-05, |
|
"loss": 2.182, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 76.27, |
|
"learning_rate": 4.619275461380725e-05, |
|
"loss": 2.1906, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 76.41, |
|
"learning_rate": 4.618591934381408e-05, |
|
"loss": 2.2033, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 76.54, |
|
"learning_rate": 4.6179084073820916e-05, |
|
"loss": 2.2111, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 76.68, |
|
"learning_rate": 4.617224880382776e-05, |
|
"loss": 2.2147, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 76.82, |
|
"learning_rate": 4.616541353383459e-05, |
|
"loss": 2.2171, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 76.95, |
|
"learning_rate": 4.615857826384142e-05, |
|
"loss": 2.2259, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 77.09, |
|
"learning_rate": 4.6151742993848255e-05, |
|
"loss": 2.1938, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 77.23, |
|
"learning_rate": 4.61449077238551e-05, |
|
"loss": 2.1838, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 77.36, |
|
"learning_rate": 4.613807245386193e-05, |
|
"loss": 2.1927, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 77.5, |
|
"learning_rate": 4.6131237183868767e-05, |
|
"loss": 2.198, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 77.64, |
|
"learning_rate": 4.6124401913875595e-05, |
|
"loss": 2.2002, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 77.77, |
|
"learning_rate": 4.6117566643882436e-05, |
|
"loss": 2.2115, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 77.91, |
|
"learning_rate": 4.611073137388927e-05, |
|
"loss": 2.2224, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 78.05, |
|
"learning_rate": 4.6103896103896106e-05, |
|
"loss": 2.2074, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 78.18, |
|
"learning_rate": 4.609706083390294e-05, |
|
"loss": 2.1668, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 78.32, |
|
"learning_rate": 4.6090225563909775e-05, |
|
"loss": 2.1842, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 78.46, |
|
"learning_rate": 4.608339029391661e-05, |
|
"loss": 2.1886, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 78.59, |
|
"learning_rate": 4.6076555023923445e-05, |
|
"loss": 2.1941, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 78.73, |
|
"learning_rate": 4.6069719753930286e-05, |
|
"loss": 2.2041, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 78.87, |
|
"learning_rate": 4.606288448393712e-05, |
|
"loss": 2.2174, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 79.0, |
|
"learning_rate": 4.605604921394395e-05, |
|
"loss": 2.2196, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 79.14, |
|
"learning_rate": 4.6049213943950784e-05, |
|
"loss": 2.162, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 79.28, |
|
"learning_rate": 4.6042378673957626e-05, |
|
"loss": 2.1761, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 79.41, |
|
"learning_rate": 4.603554340396446e-05, |
|
"loss": 2.1889, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 79.55, |
|
"learning_rate": 4.6028708133971295e-05, |
|
"loss": 2.1878, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 79.69, |
|
"learning_rate": 4.602187286397813e-05, |
|
"loss": 2.1939, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 79.83, |
|
"learning_rate": 4.6015037593984965e-05, |
|
"loss": 2.2074, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 79.96, |
|
"learning_rate": 4.60082023239918e-05, |
|
"loss": 2.2052, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 80.1, |
|
"learning_rate": 4.6001367053998634e-05, |
|
"loss": 2.1769, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 80.24, |
|
"learning_rate": 4.599453178400547e-05, |
|
"loss": 2.1675, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 80.37, |
|
"learning_rate": 4.5987696514012304e-05, |
|
"loss": 2.1722, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 80.51, |
|
"learning_rate": 4.598086124401914e-05, |
|
"loss": 2.1814, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 80.65, |
|
"learning_rate": 4.5974025974025974e-05, |
|
"loss": 2.183, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 80.78, |
|
"learning_rate": 4.5967190704032815e-05, |
|
"loss": 2.1974, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 80.92, |
|
"learning_rate": 4.596035543403965e-05, |
|
"loss": 2.1974, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 81.06, |
|
"learning_rate": 4.5953520164046485e-05, |
|
"loss": 2.1853, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 81.19, |
|
"learning_rate": 4.594668489405331e-05, |
|
"loss": 2.1593, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 81.33, |
|
"learning_rate": 4.5939849624060154e-05, |
|
"loss": 2.1672, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 81.47, |
|
"learning_rate": 4.593301435406699e-05, |
|
"loss": 2.1748, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 81.6, |
|
"learning_rate": 4.5926179084073824e-05, |
|
"loss": 2.1794, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 81.74, |
|
"learning_rate": 4.591934381408066e-05, |
|
"loss": 2.1888, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 81.88, |
|
"learning_rate": 4.5912508544087494e-05, |
|
"loss": 2.1932, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 82.01, |
|
"learning_rate": 4.590567327409433e-05, |
|
"loss": 2.1958, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 82.15, |
|
"learning_rate": 4.589883800410116e-05, |
|
"loss": 2.1528, |
|
"step": 300500 |
|
}, |
|
{ |
|
"epoch": 82.29, |
|
"learning_rate": 4.5892002734108e-05, |
|
"loss": 2.1538, |
|
"step": 301000 |
|
}, |
|
{ |
|
"epoch": 82.42, |
|
"learning_rate": 4.588516746411484e-05, |
|
"loss": 2.1697, |
|
"step": 301500 |
|
}, |
|
{ |
|
"epoch": 82.56, |
|
"learning_rate": 4.587833219412167e-05, |
|
"loss": 2.1753, |
|
"step": 302000 |
|
}, |
|
{ |
|
"epoch": 82.7, |
|
"learning_rate": 4.58714969241285e-05, |
|
"loss": 2.1771, |
|
"step": 302500 |
|
}, |
|
{ |
|
"epoch": 82.83, |
|
"learning_rate": 4.586466165413534e-05, |
|
"loss": 2.1842, |
|
"step": 303000 |
|
}, |
|
{ |
|
"epoch": 82.97, |
|
"learning_rate": 4.585782638414218e-05, |
|
"loss": 2.1916, |
|
"step": 303500 |
|
}, |
|
{ |
|
"epoch": 83.11, |
|
"learning_rate": 4.5850991114149013e-05, |
|
"loss": 2.1436, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 83.24, |
|
"learning_rate": 4.584415584415585e-05, |
|
"loss": 2.148, |
|
"step": 304500 |
|
}, |
|
{ |
|
"epoch": 83.38, |
|
"learning_rate": 4.583732057416268e-05, |
|
"loss": 2.1559, |
|
"step": 305000 |
|
}, |
|
{ |
|
"epoch": 83.52, |
|
"learning_rate": 4.583048530416952e-05, |
|
"loss": 2.1686, |
|
"step": 305500 |
|
}, |
|
{ |
|
"epoch": 83.65, |
|
"learning_rate": 4.582365003417635e-05, |
|
"loss": 2.1744, |
|
"step": 306000 |
|
}, |
|
{ |
|
"epoch": 83.79, |
|
"learning_rate": 4.581681476418319e-05, |
|
"loss": 2.1742, |
|
"step": 306500 |
|
}, |
|
{ |
|
"epoch": 83.93, |
|
"learning_rate": 4.580997949419002e-05, |
|
"loss": 2.1881, |
|
"step": 307000 |
|
}, |
|
{ |
|
"epoch": 84.06, |
|
"learning_rate": 4.580314422419686e-05, |
|
"loss": 2.1643, |
|
"step": 307500 |
|
}, |
|
{ |
|
"epoch": 84.2, |
|
"learning_rate": 4.579630895420369e-05, |
|
"loss": 2.1451, |
|
"step": 308000 |
|
}, |
|
{ |
|
"epoch": 84.34, |
|
"learning_rate": 4.5789473684210527e-05, |
|
"loss": 2.1479, |
|
"step": 308500 |
|
}, |
|
{ |
|
"epoch": 84.47, |
|
"learning_rate": 4.578263841421737e-05, |
|
"loss": 2.1579, |
|
"step": 309000 |
|
}, |
|
{ |
|
"epoch": 84.61, |
|
"learning_rate": 4.57758031442242e-05, |
|
"loss": 2.1608, |
|
"step": 309500 |
|
}, |
|
{ |
|
"epoch": 84.75, |
|
"learning_rate": 4.576896787423103e-05, |
|
"loss": 2.1697, |
|
"step": 310000 |
|
}, |
|
{ |
|
"epoch": 84.88, |
|
"learning_rate": 4.5762132604237866e-05, |
|
"loss": 2.1809, |
|
"step": 310500 |
|
}, |
|
{ |
|
"epoch": 85.02, |
|
"learning_rate": 4.575529733424471e-05, |
|
"loss": 2.1723, |
|
"step": 311000 |
|
}, |
|
{ |
|
"epoch": 85.16, |
|
"learning_rate": 4.574846206425154e-05, |
|
"loss": 2.1286, |
|
"step": 311500 |
|
}, |
|
{ |
|
"epoch": 85.29, |
|
"learning_rate": 4.574162679425838e-05, |
|
"loss": 2.1458, |
|
"step": 312000 |
|
}, |
|
{ |
|
"epoch": 85.43, |
|
"learning_rate": 4.5734791524265205e-05, |
|
"loss": 2.1496, |
|
"step": 312500 |
|
}, |
|
{ |
|
"epoch": 85.57, |
|
"learning_rate": 4.5727956254272047e-05, |
|
"loss": 2.1572, |
|
"step": 313000 |
|
}, |
|
{ |
|
"epoch": 85.7, |
|
"learning_rate": 4.572112098427888e-05, |
|
"loss": 2.1648, |
|
"step": 313500 |
|
}, |
|
{ |
|
"epoch": 85.84, |
|
"learning_rate": 4.5714285714285716e-05, |
|
"loss": 2.1736, |
|
"step": 314000 |
|
}, |
|
{ |
|
"epoch": 85.98, |
|
"learning_rate": 4.570745044429256e-05, |
|
"loss": 2.1776, |
|
"step": 314500 |
|
}, |
|
{ |
|
"epoch": 86.11, |
|
"learning_rate": 4.5700615174299386e-05, |
|
"loss": 2.1304, |
|
"step": 315000 |
|
}, |
|
{ |
|
"epoch": 86.25, |
|
"learning_rate": 4.569377990430622e-05, |
|
"loss": 2.1302, |
|
"step": 315500 |
|
}, |
|
{ |
|
"epoch": 86.39, |
|
"learning_rate": 4.5686944634313055e-05, |
|
"loss": 2.1441, |
|
"step": 316000 |
|
}, |
|
{ |
|
"epoch": 86.52, |
|
"learning_rate": 4.56801093643199e-05, |
|
"loss": 2.1551, |
|
"step": 316500 |
|
}, |
|
{ |
|
"epoch": 86.66, |
|
"learning_rate": 4.567327409432673e-05, |
|
"loss": 2.1582, |
|
"step": 317000 |
|
}, |
|
{ |
|
"epoch": 86.8, |
|
"learning_rate": 4.566643882433356e-05, |
|
"loss": 2.1666, |
|
"step": 317500 |
|
}, |
|
{ |
|
"epoch": 86.93, |
|
"learning_rate": 4.5659603554340394e-05, |
|
"loss": 2.1599, |
|
"step": 318000 |
|
}, |
|
{ |
|
"epoch": 87.07, |
|
"learning_rate": 4.5652768284347236e-05, |
|
"loss": 2.1394, |
|
"step": 318500 |
|
}, |
|
{ |
|
"epoch": 87.21, |
|
"learning_rate": 4.564593301435407e-05, |
|
"loss": 2.1233, |
|
"step": 319000 |
|
}, |
|
{ |
|
"epoch": 87.34, |
|
"learning_rate": 4.5639097744360906e-05, |
|
"loss": 2.1365, |
|
"step": 319500 |
|
}, |
|
{ |
|
"epoch": 87.48, |
|
"learning_rate": 4.563226247436774e-05, |
|
"loss": 2.1441, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 87.62, |
|
"learning_rate": 4.5625427204374575e-05, |
|
"loss": 2.1482, |
|
"step": 320500 |
|
}, |
|
{ |
|
"epoch": 87.75, |
|
"learning_rate": 4.561859193438141e-05, |
|
"loss": 2.1599, |
|
"step": 321000 |
|
}, |
|
{ |
|
"epoch": 87.89, |
|
"learning_rate": 4.5611756664388245e-05, |
|
"loss": 2.1559, |
|
"step": 321500 |
|
}, |
|
{ |
|
"epoch": 88.03, |
|
"learning_rate": 4.560492139439508e-05, |
|
"loss": 2.1513, |
|
"step": 322000 |
|
}, |
|
{ |
|
"epoch": 88.16, |
|
"learning_rate": 4.5598086124401914e-05, |
|
"loss": 2.115, |
|
"step": 322500 |
|
}, |
|
{ |
|
"epoch": 88.3, |
|
"learning_rate": 4.559125085440875e-05, |
|
"loss": 2.1336, |
|
"step": 323000 |
|
}, |
|
{ |
|
"epoch": 88.44, |
|
"learning_rate": 4.5584415584415584e-05, |
|
"loss": 2.1394, |
|
"step": 323500 |
|
}, |
|
{ |
|
"epoch": 88.57, |
|
"learning_rate": 4.5577580314422426e-05, |
|
"loss": 2.1366, |
|
"step": 324000 |
|
}, |
|
{ |
|
"epoch": 88.71, |
|
"learning_rate": 4.557074504442926e-05, |
|
"loss": 2.1424, |
|
"step": 324500 |
|
}, |
|
{ |
|
"epoch": 88.85, |
|
"learning_rate": 4.5563909774436095e-05, |
|
"loss": 2.151, |
|
"step": 325000 |
|
}, |
|
{ |
|
"epoch": 88.98, |
|
"learning_rate": 4.555707450444292e-05, |
|
"loss": 2.1611, |
|
"step": 325500 |
|
}, |
|
{ |
|
"epoch": 89.12, |
|
"learning_rate": 4.5550239234449765e-05, |
|
"loss": 2.1216, |
|
"step": 326000 |
|
}, |
|
{ |
|
"epoch": 89.26, |
|
"learning_rate": 4.55434039644566e-05, |
|
"loss": 2.1112, |
|
"step": 326500 |
|
}, |
|
{ |
|
"epoch": 89.39, |
|
"learning_rate": 4.5536568694463434e-05, |
|
"loss": 2.1246, |
|
"step": 327000 |
|
}, |
|
{ |
|
"epoch": 89.53, |
|
"learning_rate": 4.552973342447027e-05, |
|
"loss": 2.1361, |
|
"step": 327500 |
|
}, |
|
{ |
|
"epoch": 89.67, |
|
"learning_rate": 4.5522898154477104e-05, |
|
"loss": 2.1375, |
|
"step": 328000 |
|
}, |
|
{ |
|
"epoch": 89.8, |
|
"learning_rate": 4.551606288448394e-05, |
|
"loss": 2.1438, |
|
"step": 328500 |
|
}, |
|
{ |
|
"epoch": 89.94, |
|
"learning_rate": 4.5509227614490773e-05, |
|
"loss": 2.1525, |
|
"step": 329000 |
|
}, |
|
{ |
|
"epoch": 90.08, |
|
"learning_rate": 4.550239234449761e-05, |
|
"loss": 2.127, |
|
"step": 329500 |
|
}, |
|
{ |
|
"epoch": 90.21, |
|
"learning_rate": 4.549555707450445e-05, |
|
"loss": 2.1043, |
|
"step": 330000 |
|
}, |
|
{ |
|
"epoch": 90.35, |
|
"learning_rate": 4.548872180451128e-05, |
|
"loss": 2.1148, |
|
"step": 330500 |
|
}, |
|
{ |
|
"epoch": 90.49, |
|
"learning_rate": 4.548188653451811e-05, |
|
"loss": 2.1282, |
|
"step": 331000 |
|
}, |
|
{ |
|
"epoch": 90.62, |
|
"learning_rate": 4.547505126452495e-05, |
|
"loss": 2.1347, |
|
"step": 331500 |
|
}, |
|
{ |
|
"epoch": 90.76, |
|
"learning_rate": 4.546821599453179e-05, |
|
"loss": 2.1465, |
|
"step": 332000 |
|
}, |
|
{ |
|
"epoch": 90.9, |
|
"learning_rate": 4.5461380724538624e-05, |
|
"loss": 2.1457, |
|
"step": 332500 |
|
}, |
|
{ |
|
"epoch": 91.03, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 2.1369, |
|
"step": 333000 |
|
}, |
|
{ |
|
"epoch": 91.17, |
|
"learning_rate": 4.544771018455229e-05, |
|
"loss": 2.0965, |
|
"step": 333500 |
|
}, |
|
{ |
|
"epoch": 91.31, |
|
"learning_rate": 4.544087491455913e-05, |
|
"loss": 2.1165, |
|
"step": 334000 |
|
}, |
|
{ |
|
"epoch": 91.44, |
|
"learning_rate": 4.543403964456596e-05, |
|
"loss": 2.1213, |
|
"step": 334500 |
|
}, |
|
{ |
|
"epoch": 91.58, |
|
"learning_rate": 4.54272043745728e-05, |
|
"loss": 2.1286, |
|
"step": 335000 |
|
}, |
|
{ |
|
"epoch": 91.72, |
|
"learning_rate": 4.542036910457963e-05, |
|
"loss": 2.1284, |
|
"step": 335500 |
|
}, |
|
{ |
|
"epoch": 91.85, |
|
"learning_rate": 4.541353383458647e-05, |
|
"loss": 2.1407, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 91.99, |
|
"learning_rate": 4.54066985645933e-05, |
|
"loss": 2.1417, |
|
"step": 336500 |
|
}, |
|
{ |
|
"epoch": 92.13, |
|
"learning_rate": 4.539986329460014e-05, |
|
"loss": 2.0955, |
|
"step": 337000 |
|
}, |
|
{ |
|
"epoch": 92.26, |
|
"learning_rate": 4.539302802460698e-05, |
|
"loss": 2.0994, |
|
"step": 337500 |
|
}, |
|
{ |
|
"epoch": 92.4, |
|
"learning_rate": 4.538619275461381e-05, |
|
"loss": 2.1162, |
|
"step": 338000 |
|
}, |
|
{ |
|
"epoch": 92.54, |
|
"learning_rate": 4.537935748462064e-05, |
|
"loss": 2.1158, |
|
"step": 338500 |
|
}, |
|
{ |
|
"epoch": 92.67, |
|
"learning_rate": 4.5372522214627476e-05, |
|
"loss": 2.1289, |
|
"step": 339000 |
|
}, |
|
{ |
|
"epoch": 92.81, |
|
"learning_rate": 4.536568694463432e-05, |
|
"loss": 2.1288, |
|
"step": 339500 |
|
}, |
|
{ |
|
"epoch": 92.95, |
|
"learning_rate": 4.535885167464115e-05, |
|
"loss": 2.1417, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 93.08, |
|
"learning_rate": 4.535201640464799e-05, |
|
"loss": 2.1089, |
|
"step": 340500 |
|
}, |
|
{ |
|
"epoch": 93.22, |
|
"learning_rate": 4.5345181134654815e-05, |
|
"loss": 2.0924, |
|
"step": 341000 |
|
}, |
|
{ |
|
"epoch": 93.36, |
|
"learning_rate": 4.533834586466166e-05, |
|
"loss": 2.1037, |
|
"step": 341500 |
|
}, |
|
{ |
|
"epoch": 93.49, |
|
"learning_rate": 4.533151059466849e-05, |
|
"loss": 2.1097, |
|
"step": 342000 |
|
}, |
|
{ |
|
"epoch": 93.63, |
|
"learning_rate": 4.5324675324675326e-05, |
|
"loss": 2.1164, |
|
"step": 342500 |
|
}, |
|
{ |
|
"epoch": 93.77, |
|
"learning_rate": 4.531784005468217e-05, |
|
"loss": 2.1277, |
|
"step": 343000 |
|
}, |
|
{ |
|
"epoch": 93.9, |
|
"learning_rate": 4.5311004784688996e-05, |
|
"loss": 2.1281, |
|
"step": 343500 |
|
}, |
|
{ |
|
"epoch": 94.04, |
|
"learning_rate": 4.530416951469583e-05, |
|
"loss": 2.1238, |
|
"step": 344000 |
|
}, |
|
{ |
|
"epoch": 94.18, |
|
"learning_rate": 4.5297334244702666e-05, |
|
"loss": 2.0879, |
|
"step": 344500 |
|
}, |
|
{ |
|
"epoch": 94.31, |
|
"learning_rate": 4.529049897470951e-05, |
|
"loss": 2.0903, |
|
"step": 345000 |
|
}, |
|
{ |
|
"epoch": 94.45, |
|
"learning_rate": 4.528366370471634e-05, |
|
"loss": 2.1058, |
|
"step": 345500 |
|
}, |
|
{ |
|
"epoch": 94.59, |
|
"learning_rate": 4.527682843472317e-05, |
|
"loss": 2.1162, |
|
"step": 346000 |
|
}, |
|
{ |
|
"epoch": 94.72, |
|
"learning_rate": 4.5269993164730005e-05, |
|
"loss": 2.1162, |
|
"step": 346500 |
|
}, |
|
{ |
|
"epoch": 94.86, |
|
"learning_rate": 4.5263157894736846e-05, |
|
"loss": 2.1276, |
|
"step": 347000 |
|
}, |
|
{ |
|
"epoch": 95.0, |
|
"learning_rate": 4.525632262474368e-05, |
|
"loss": 2.1259, |
|
"step": 347500 |
|
}, |
|
{ |
|
"epoch": 95.13, |
|
"learning_rate": 4.5249487354750516e-05, |
|
"loss": 2.0818, |
|
"step": 348000 |
|
}, |
|
{ |
|
"epoch": 95.27, |
|
"learning_rate": 4.524265208475735e-05, |
|
"loss": 2.0907, |
|
"step": 348500 |
|
}, |
|
{ |
|
"epoch": 95.41, |
|
"learning_rate": 4.5235816814764186e-05, |
|
"loss": 2.0926, |
|
"step": 349000 |
|
}, |
|
{ |
|
"epoch": 95.54, |
|
"learning_rate": 4.522898154477102e-05, |
|
"loss": 2.1014, |
|
"step": 349500 |
|
}, |
|
{ |
|
"epoch": 95.68, |
|
"learning_rate": 4.5222146274777855e-05, |
|
"loss": 2.1133, |
|
"step": 350000 |
|
}, |
|
{ |
|
"epoch": 95.82, |
|
"learning_rate": 4.521531100478469e-05, |
|
"loss": 2.1189, |
|
"step": 350500 |
|
}, |
|
{ |
|
"epoch": 95.95, |
|
"learning_rate": 4.5208475734791525e-05, |
|
"loss": 2.1202, |
|
"step": 351000 |
|
}, |
|
{ |
|
"epoch": 96.09, |
|
"learning_rate": 4.520164046479836e-05, |
|
"loss": 2.0911, |
|
"step": 351500 |
|
}, |
|
{ |
|
"epoch": 96.23, |
|
"learning_rate": 4.5194805194805194e-05, |
|
"loss": 2.0824, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 96.36, |
|
"learning_rate": 4.5187969924812036e-05, |
|
"loss": 2.0898, |
|
"step": 352500 |
|
}, |
|
{ |
|
"epoch": 96.5, |
|
"learning_rate": 4.518113465481887e-05, |
|
"loss": 2.0946, |
|
"step": 353000 |
|
}, |
|
{ |
|
"epoch": 96.64, |
|
"learning_rate": 4.5174299384825705e-05, |
|
"loss": 2.1039, |
|
"step": 353500 |
|
}, |
|
{ |
|
"epoch": 96.77, |
|
"learning_rate": 4.5167464114832533e-05, |
|
"loss": 2.1076, |
|
"step": 354000 |
|
}, |
|
{ |
|
"epoch": 96.91, |
|
"learning_rate": 4.5160628844839375e-05, |
|
"loss": 2.1183, |
|
"step": 354500 |
|
}, |
|
{ |
|
"epoch": 97.05, |
|
"learning_rate": 4.515379357484621e-05, |
|
"loss": 2.1028, |
|
"step": 355000 |
|
}, |
|
{ |
|
"epoch": 97.18, |
|
"learning_rate": 4.5146958304853045e-05, |
|
"loss": 2.0748, |
|
"step": 355500 |
|
}, |
|
{ |
|
"epoch": 97.32, |
|
"learning_rate": 4.514012303485988e-05, |
|
"loss": 2.082, |
|
"step": 356000 |
|
}, |
|
{ |
|
"epoch": 97.46, |
|
"learning_rate": 4.5133287764866714e-05, |
|
"loss": 2.0929, |
|
"step": 356500 |
|
}, |
|
{ |
|
"epoch": 97.59, |
|
"learning_rate": 4.512645249487355e-05, |
|
"loss": 2.0978, |
|
"step": 357000 |
|
}, |
|
{ |
|
"epoch": 97.73, |
|
"learning_rate": 4.5119617224880384e-05, |
|
"loss": 2.1015, |
|
"step": 357500 |
|
}, |
|
{ |
|
"epoch": 97.87, |
|
"learning_rate": 4.511278195488722e-05, |
|
"loss": 2.1031, |
|
"step": 358000 |
|
}, |
|
{ |
|
"epoch": 98.0, |
|
"learning_rate": 4.510594668489406e-05, |
|
"loss": 2.1172, |
|
"step": 358500 |
|
}, |
|
{ |
|
"epoch": 98.14, |
|
"learning_rate": 4.509911141490089e-05, |
|
"loss": 2.0695, |
|
"step": 359000 |
|
}, |
|
{ |
|
"epoch": 98.28, |
|
"learning_rate": 4.509227614490772e-05, |
|
"loss": 2.0733, |
|
"step": 359500 |
|
}, |
|
{ |
|
"epoch": 98.41, |
|
"learning_rate": 4.5085440874914565e-05, |
|
"loss": 2.0832, |
|
"step": 360000 |
|
}, |
|
{ |
|
"epoch": 98.55, |
|
"learning_rate": 4.50786056049214e-05, |
|
"loss": 0.0029, |
|
"step": 360500 |
|
}, |
|
{ |
|
"epoch": 98.69, |
|
"learning_rate": 4.5071770334928234e-05, |
|
"loss": 2.0982, |
|
"step": 361000 |
|
}, |
|
{ |
|
"epoch": 98.82, |
|
"learning_rate": 4.506493506493506e-05, |
|
"loss": 2.0996, |
|
"step": 361500 |
|
}, |
|
{ |
|
"epoch": 98.96, |
|
"learning_rate": 4.5058099794941904e-05, |
|
"loss": 2.1084, |
|
"step": 362000 |
|
}, |
|
{ |
|
"epoch": 99.1, |
|
"learning_rate": 4.505126452494874e-05, |
|
"loss": 2.0753, |
|
"step": 362500 |
|
}, |
|
{ |
|
"epoch": 99.23, |
|
"learning_rate": 4.504442925495557e-05, |
|
"loss": 2.0645, |
|
"step": 363000 |
|
}, |
|
{ |
|
"epoch": 99.37, |
|
"learning_rate": 4.503759398496241e-05, |
|
"loss": 2.0722, |
|
"step": 363500 |
|
}, |
|
{ |
|
"epoch": 99.51, |
|
"learning_rate": 4.503075871496924e-05, |
|
"loss": 2.0849, |
|
"step": 364000 |
|
}, |
|
{ |
|
"epoch": 99.64, |
|
"learning_rate": 4.502392344497608e-05, |
|
"loss": 2.0903, |
|
"step": 364500 |
|
}, |
|
{ |
|
"epoch": 99.78, |
|
"learning_rate": 4.501708817498291e-05, |
|
"loss": 2.0973, |
|
"step": 365000 |
|
}, |
|
{ |
|
"epoch": 99.92, |
|
"learning_rate": 4.501025290498975e-05, |
|
"loss": 2.1043, |
|
"step": 365500 |
|
}, |
|
{ |
|
"epoch": 100.05, |
|
"learning_rate": 4.500341763499659e-05, |
|
"loss": 2.0809, |
|
"step": 366000 |
|
}, |
|
{ |
|
"epoch": 100.19, |
|
"learning_rate": 4.499658236500342e-05, |
|
"loss": 2.0633, |
|
"step": 366500 |
|
}, |
|
{ |
|
"epoch": 100.33, |
|
"learning_rate": 4.498974709501025e-05, |
|
"loss": 2.0694, |
|
"step": 367000 |
|
}, |
|
{ |
|
"epoch": 100.46, |
|
"learning_rate": 4.4982911825017086e-05, |
|
"loss": 2.0763, |
|
"step": 367500 |
|
}, |
|
{ |
|
"epoch": 100.6, |
|
"learning_rate": 4.497607655502393e-05, |
|
"loss": 2.0849, |
|
"step": 368000 |
|
}, |
|
{ |
|
"epoch": 100.74, |
|
"learning_rate": 4.496924128503076e-05, |
|
"loss": 2.0855, |
|
"step": 368500 |
|
}, |
|
{ |
|
"epoch": 100.87, |
|
"learning_rate": 4.49624060150376e-05, |
|
"loss": 2.0935, |
|
"step": 369000 |
|
}, |
|
{ |
|
"epoch": 101.01, |
|
"learning_rate": 4.495557074504443e-05, |
|
"loss": 2.1002, |
|
"step": 369500 |
|
}, |
|
{ |
|
"epoch": 101.15, |
|
"learning_rate": 4.494873547505127e-05, |
|
"loss": 2.0488, |
|
"step": 370000 |
|
}, |
|
{ |
|
"epoch": 101.28, |
|
"learning_rate": 4.49419002050581e-05, |
|
"loss": 2.0613, |
|
"step": 370500 |
|
}, |
|
{ |
|
"epoch": 101.42, |
|
"learning_rate": 4.493506493506494e-05, |
|
"loss": 2.0637, |
|
"step": 371000 |
|
}, |
|
{ |
|
"epoch": 101.56, |
|
"learning_rate": 4.492822966507177e-05, |
|
"loss": 2.0769, |
|
"step": 371500 |
|
}, |
|
{ |
|
"epoch": 101.69, |
|
"learning_rate": 4.4921394395078606e-05, |
|
"loss": 2.086, |
|
"step": 372000 |
|
}, |
|
{ |
|
"epoch": 101.83, |
|
"learning_rate": 4.491455912508544e-05, |
|
"loss": 2.0914, |
|
"step": 372500 |
|
}, |
|
{ |
|
"epoch": 101.97, |
|
"learning_rate": 4.4907723855092276e-05, |
|
"loss": 2.0974, |
|
"step": 373000 |
|
}, |
|
{ |
|
"epoch": 102.1, |
|
"learning_rate": 4.490088858509912e-05, |
|
"loss": 2.0534, |
|
"step": 373500 |
|
}, |
|
{ |
|
"epoch": 102.24, |
|
"learning_rate": 4.489405331510595e-05, |
|
"loss": 2.0527, |
|
"step": 374000 |
|
}, |
|
{ |
|
"epoch": 102.38, |
|
"learning_rate": 4.488721804511278e-05, |
|
"loss": 2.0637, |
|
"step": 374500 |
|
}, |
|
{ |
|
"epoch": 102.52, |
|
"learning_rate": 4.4880382775119615e-05, |
|
"loss": 2.0692, |
|
"step": 375000 |
|
}, |
|
{ |
|
"epoch": 102.65, |
|
"learning_rate": 4.487354750512646e-05, |
|
"loss": 2.078, |
|
"step": 375500 |
|
}, |
|
{ |
|
"epoch": 102.79, |
|
"learning_rate": 4.486671223513329e-05, |
|
"loss": 2.0788, |
|
"step": 376000 |
|
}, |
|
{ |
|
"epoch": 102.93, |
|
"learning_rate": 4.4859876965140126e-05, |
|
"loss": 2.089, |
|
"step": 376500 |
|
}, |
|
{ |
|
"epoch": 103.06, |
|
"learning_rate": 4.485304169514696e-05, |
|
"loss": 2.0704, |
|
"step": 377000 |
|
}, |
|
{ |
|
"epoch": 103.2, |
|
"learning_rate": 4.4846206425153796e-05, |
|
"loss": 2.0524, |
|
"step": 377500 |
|
}, |
|
{ |
|
"epoch": 103.34, |
|
"learning_rate": 4.483937115516063e-05, |
|
"loss": 2.0481, |
|
"step": 378000 |
|
}, |
|
{ |
|
"epoch": 103.47, |
|
"learning_rate": 4.4832535885167465e-05, |
|
"loss": 2.0607, |
|
"step": 378500 |
|
}, |
|
{ |
|
"epoch": 103.61, |
|
"learning_rate": 4.482570061517431e-05, |
|
"loss": 2.0742, |
|
"step": 379000 |
|
}, |
|
{ |
|
"epoch": 103.75, |
|
"learning_rate": 4.4818865345181135e-05, |
|
"loss": 2.0698, |
|
"step": 379500 |
|
}, |
|
{ |
|
"epoch": 103.88, |
|
"learning_rate": 4.481203007518797e-05, |
|
"loss": 2.0826, |
|
"step": 380000 |
|
}, |
|
{ |
|
"epoch": 104.02, |
|
"learning_rate": 4.4805194805194805e-05, |
|
"loss": 2.0789, |
|
"step": 380500 |
|
}, |
|
{ |
|
"epoch": 104.16, |
|
"learning_rate": 4.4798359535201646e-05, |
|
"loss": 2.0342, |
|
"step": 381000 |
|
}, |
|
{ |
|
"epoch": 104.29, |
|
"learning_rate": 4.479152426520848e-05, |
|
"loss": 2.0422, |
|
"step": 381500 |
|
}, |
|
{ |
|
"epoch": 104.43, |
|
"learning_rate": 4.4784688995215316e-05, |
|
"loss": 2.0582, |
|
"step": 382000 |
|
}, |
|
{ |
|
"epoch": 104.57, |
|
"learning_rate": 4.4777853725222144e-05, |
|
"loss": 2.0651, |
|
"step": 382500 |
|
}, |
|
{ |
|
"epoch": 104.7, |
|
"learning_rate": 4.4771018455228985e-05, |
|
"loss": 2.0687, |
|
"step": 383000 |
|
}, |
|
{ |
|
"epoch": 104.84, |
|
"learning_rate": 4.476418318523582e-05, |
|
"loss": 2.0763, |
|
"step": 383500 |
|
}, |
|
{ |
|
"epoch": 104.98, |
|
"learning_rate": 4.4757347915242655e-05, |
|
"loss": 2.0866, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 105.11, |
|
"learning_rate": 4.475051264524949e-05, |
|
"loss": 2.0387, |
|
"step": 384500 |
|
}, |
|
{ |
|
"epoch": 105.25, |
|
"learning_rate": 4.4743677375256325e-05, |
|
"loss": 2.0398, |
|
"step": 385000 |
|
}, |
|
{ |
|
"epoch": 105.39, |
|
"learning_rate": 4.473684210526316e-05, |
|
"loss": 2.0542, |
|
"step": 385500 |
|
}, |
|
{ |
|
"epoch": 105.52, |
|
"learning_rate": 4.4730006835269994e-05, |
|
"loss": 2.0551, |
|
"step": 386000 |
|
}, |
|
{ |
|
"epoch": 105.66, |
|
"learning_rate": 4.472317156527683e-05, |
|
"loss": 2.0639, |
|
"step": 386500 |
|
}, |
|
{ |
|
"epoch": 105.8, |
|
"learning_rate": 4.471633629528367e-05, |
|
"loss": 2.0706, |
|
"step": 387000 |
|
}, |
|
{ |
|
"epoch": 105.93, |
|
"learning_rate": 4.47095010252905e-05, |
|
"loss": 2.0742, |
|
"step": 387500 |
|
}, |
|
{ |
|
"epoch": 106.07, |
|
"learning_rate": 4.470266575529733e-05, |
|
"loss": 2.0517, |
|
"step": 388000 |
|
}, |
|
{ |
|
"epoch": 106.21, |
|
"learning_rate": 4.4695830485304175e-05, |
|
"loss": 2.0298, |
|
"step": 388500 |
|
}, |
|
{ |
|
"epoch": 106.34, |
|
"learning_rate": 4.468899521531101e-05, |
|
"loss": 2.0385, |
|
"step": 389000 |
|
}, |
|
{ |
|
"epoch": 106.48, |
|
"learning_rate": 4.4682159945317844e-05, |
|
"loss": 2.051, |
|
"step": 389500 |
|
}, |
|
{ |
|
"epoch": 106.62, |
|
"learning_rate": 4.467532467532467e-05, |
|
"loss": 2.0592, |
|
"step": 390000 |
|
}, |
|
{ |
|
"epoch": 106.75, |
|
"learning_rate": 4.4668489405331514e-05, |
|
"loss": 2.0676, |
|
"step": 390500 |
|
}, |
|
{ |
|
"epoch": 106.89, |
|
"learning_rate": 4.466165413533835e-05, |
|
"loss": 2.0695, |
|
"step": 391000 |
|
}, |
|
{ |
|
"epoch": 107.03, |
|
"learning_rate": 4.4654818865345184e-05, |
|
"loss": 2.0598, |
|
"step": 391500 |
|
}, |
|
{ |
|
"epoch": 107.16, |
|
"learning_rate": 4.464798359535202e-05, |
|
"loss": 2.024, |
|
"step": 392000 |
|
}, |
|
{ |
|
"epoch": 107.3, |
|
"learning_rate": 4.464114832535885e-05, |
|
"loss": 2.0372, |
|
"step": 392500 |
|
}, |
|
{ |
|
"epoch": 107.44, |
|
"learning_rate": 4.463431305536569e-05, |
|
"loss": 2.0433, |
|
"step": 393000 |
|
}, |
|
{ |
|
"epoch": 107.57, |
|
"learning_rate": 4.462747778537252e-05, |
|
"loss": 2.0472, |
|
"step": 393500 |
|
}, |
|
{ |
|
"epoch": 107.71, |
|
"learning_rate": 4.462064251537936e-05, |
|
"loss": 2.0579, |
|
"step": 394000 |
|
}, |
|
{ |
|
"epoch": 107.85, |
|
"learning_rate": 4.46138072453862e-05, |
|
"loss": 2.0605, |
|
"step": 394500 |
|
}, |
|
{ |
|
"epoch": 107.98, |
|
"learning_rate": 4.460697197539303e-05, |
|
"loss": 2.0745, |
|
"step": 395000 |
|
}, |
|
{ |
|
"epoch": 108.12, |
|
"learning_rate": 4.460013670539986e-05, |
|
"loss": 2.026, |
|
"step": 395500 |
|
}, |
|
{ |
|
"epoch": 108.26, |
|
"learning_rate": 4.45933014354067e-05, |
|
"loss": 2.0251, |
|
"step": 396000 |
|
}, |
|
{ |
|
"epoch": 108.39, |
|
"learning_rate": 4.458646616541354e-05, |
|
"loss": 2.0438, |
|
"step": 396500 |
|
}, |
|
{ |
|
"epoch": 108.53, |
|
"learning_rate": 4.457963089542037e-05, |
|
"loss": 2.0407, |
|
"step": 397000 |
|
}, |
|
{ |
|
"epoch": 108.67, |
|
"learning_rate": 4.457279562542721e-05, |
|
"loss": 2.0477, |
|
"step": 397500 |
|
}, |
|
{ |
|
"epoch": 108.8, |
|
"learning_rate": 4.456596035543404e-05, |
|
"loss": 2.0536, |
|
"step": 398000 |
|
}, |
|
{ |
|
"epoch": 108.94, |
|
"learning_rate": 4.455912508544088e-05, |
|
"loss": 2.065, |
|
"step": 398500 |
|
}, |
|
{ |
|
"epoch": 109.08, |
|
"learning_rate": 4.455228981544771e-05, |
|
"loss": 2.0385, |
|
"step": 399000 |
|
}, |
|
{ |
|
"epoch": 109.21, |
|
"learning_rate": 4.454545454545455e-05, |
|
"loss": 2.0245, |
|
"step": 399500 |
|
}, |
|
{ |
|
"epoch": 109.35, |
|
"learning_rate": 4.453861927546138e-05, |
|
"loss": 2.0304, |
|
"step": 400000 |
|
}, |
|
{ |
|
"epoch": 109.49, |
|
"learning_rate": 4.453178400546822e-05, |
|
"loss": 2.0395, |
|
"step": 400500 |
|
}, |
|
{ |
|
"epoch": 109.62, |
|
"learning_rate": 4.452494873547505e-05, |
|
"loss": 2.0405, |
|
"step": 401000 |
|
}, |
|
{ |
|
"epoch": 109.76, |
|
"learning_rate": 4.4518113465481886e-05, |
|
"loss": 2.0475, |
|
"step": 401500 |
|
}, |
|
{ |
|
"epoch": 109.9, |
|
"learning_rate": 4.451127819548873e-05, |
|
"loss": 2.0538, |
|
"step": 402000 |
|
}, |
|
{ |
|
"epoch": 110.03, |
|
"learning_rate": 4.450444292549556e-05, |
|
"loss": 2.0518, |
|
"step": 402500 |
|
}, |
|
{ |
|
"epoch": 110.17, |
|
"learning_rate": 4.449760765550239e-05, |
|
"loss": 2.0132, |
|
"step": 403000 |
|
}, |
|
{ |
|
"epoch": 110.31, |
|
"learning_rate": 4.4490772385509225e-05, |
|
"loss": 2.0259, |
|
"step": 403500 |
|
}, |
|
{ |
|
"epoch": 110.44, |
|
"learning_rate": 4.448393711551607e-05, |
|
"loss": 2.0302, |
|
"step": 404000 |
|
}, |
|
{ |
|
"epoch": 110.58, |
|
"learning_rate": 4.44771018455229e-05, |
|
"loss": 2.0392, |
|
"step": 404500 |
|
}, |
|
{ |
|
"epoch": 110.72, |
|
"learning_rate": 4.4470266575529737e-05, |
|
"loss": 2.0429, |
|
"step": 405000 |
|
}, |
|
{ |
|
"epoch": 110.85, |
|
"learning_rate": 4.446343130553657e-05, |
|
"loss": 2.0444, |
|
"step": 405500 |
|
}, |
|
{ |
|
"epoch": 110.99, |
|
"learning_rate": 4.4456596035543406e-05, |
|
"loss": 2.0542, |
|
"step": 406000 |
|
}, |
|
{ |
|
"epoch": 111.13, |
|
"learning_rate": 4.444976076555024e-05, |
|
"loss": 2.0086, |
|
"step": 406500 |
|
}, |
|
{ |
|
"epoch": 111.26, |
|
"learning_rate": 4.4442925495557076e-05, |
|
"loss": 2.0216, |
|
"step": 407000 |
|
}, |
|
{ |
|
"epoch": 111.4, |
|
"learning_rate": 4.443609022556392e-05, |
|
"loss": 2.0197, |
|
"step": 407500 |
|
}, |
|
{ |
|
"epoch": 111.54, |
|
"learning_rate": 4.4429254955570745e-05, |
|
"loss": 2.032, |
|
"step": 408000 |
|
}, |
|
{ |
|
"epoch": 111.67, |
|
"learning_rate": 4.442241968557758e-05, |
|
"loss": 2.0413, |
|
"step": 408500 |
|
}, |
|
{ |
|
"epoch": 111.81, |
|
"learning_rate": 4.4415584415584415e-05, |
|
"loss": 2.0456, |
|
"step": 409000 |
|
}, |
|
{ |
|
"epoch": 111.95, |
|
"learning_rate": 4.4408749145591257e-05, |
|
"loss": 2.047, |
|
"step": 409500 |
|
}, |
|
{ |
|
"epoch": 112.08, |
|
"learning_rate": 4.440191387559809e-05, |
|
"loss": 2.0248, |
|
"step": 410000 |
|
}, |
|
{ |
|
"epoch": 112.22, |
|
"learning_rate": 4.4395078605604926e-05, |
|
"loss": 2.0108, |
|
"step": 410500 |
|
}, |
|
{ |
|
"epoch": 112.36, |
|
"learning_rate": 4.4388243335611754e-05, |
|
"loss": 2.0189, |
|
"step": 411000 |
|
}, |
|
{ |
|
"epoch": 112.49, |
|
"learning_rate": 4.4381408065618596e-05, |
|
"loss": 2.0227, |
|
"step": 411500 |
|
}, |
|
{ |
|
"epoch": 112.63, |
|
"learning_rate": 4.437457279562543e-05, |
|
"loss": 2.0325, |
|
"step": 412000 |
|
}, |
|
{ |
|
"epoch": 112.77, |
|
"learning_rate": 4.4367737525632265e-05, |
|
"loss": 2.041, |
|
"step": 412500 |
|
}, |
|
{ |
|
"epoch": 112.9, |
|
"learning_rate": 4.43609022556391e-05, |
|
"loss": 2.0367, |
|
"step": 413000 |
|
}, |
|
{ |
|
"epoch": 113.04, |
|
"learning_rate": 4.4354066985645935e-05, |
|
"loss": 2.0309, |
|
"step": 413500 |
|
}, |
|
{ |
|
"epoch": 113.18, |
|
"learning_rate": 4.434723171565277e-05, |
|
"loss": 1.9949, |
|
"step": 414000 |
|
}, |
|
{ |
|
"epoch": 113.31, |
|
"learning_rate": 4.4340396445659604e-05, |
|
"loss": 2.0091, |
|
"step": 414500 |
|
}, |
|
{ |
|
"epoch": 113.45, |
|
"learning_rate": 4.433356117566644e-05, |
|
"loss": 2.0185, |
|
"step": 415000 |
|
}, |
|
{ |
|
"epoch": 113.59, |
|
"learning_rate": 4.432672590567328e-05, |
|
"loss": 2.0322, |
|
"step": 415500 |
|
}, |
|
{ |
|
"epoch": 113.72, |
|
"learning_rate": 4.431989063568011e-05, |
|
"loss": 2.0378, |
|
"step": 416000 |
|
}, |
|
{ |
|
"epoch": 113.86, |
|
"learning_rate": 4.4313055365686944e-05, |
|
"loss": 2.0344, |
|
"step": 416500 |
|
}, |
|
{ |
|
"epoch": 114.0, |
|
"learning_rate": 4.4306220095693785e-05, |
|
"loss": 2.0438, |
|
"step": 417000 |
|
}, |
|
{ |
|
"epoch": 114.13, |
|
"learning_rate": 4.429938482570062e-05, |
|
"loss": 1.9927, |
|
"step": 417500 |
|
}, |
|
{ |
|
"epoch": 114.27, |
|
"learning_rate": 4.4292549555707455e-05, |
|
"loss": 1.9996, |
|
"step": 418000 |
|
}, |
|
{ |
|
"epoch": 114.41, |
|
"learning_rate": 4.428571428571428e-05, |
|
"loss": 2.0126, |
|
"step": 418500 |
|
}, |
|
{ |
|
"epoch": 114.54, |
|
"learning_rate": 4.4278879015721124e-05, |
|
"loss": 2.0162, |
|
"step": 419000 |
|
}, |
|
{ |
|
"epoch": 114.68, |
|
"learning_rate": 4.427204374572796e-05, |
|
"loss": 2.0288, |
|
"step": 419500 |
|
}, |
|
{ |
|
"epoch": 114.82, |
|
"learning_rate": 4.4265208475734794e-05, |
|
"loss": 2.035, |
|
"step": 420000 |
|
}, |
|
{ |
|
"epoch": 114.95, |
|
"learning_rate": 4.425837320574163e-05, |
|
"loss": 2.0395, |
|
"step": 420500 |
|
}, |
|
{ |
|
"epoch": 115.09, |
|
"learning_rate": 4.4251537935748464e-05, |
|
"loss": 2.0078, |
|
"step": 421000 |
|
}, |
|
{ |
|
"epoch": 115.23, |
|
"learning_rate": 4.42447026657553e-05, |
|
"loss": 2.0002, |
|
"step": 421500 |
|
}, |
|
{ |
|
"epoch": 115.36, |
|
"learning_rate": 4.423786739576213e-05, |
|
"loss": 2.0034, |
|
"step": 422000 |
|
}, |
|
{ |
|
"epoch": 115.5, |
|
"learning_rate": 4.423103212576897e-05, |
|
"loss": 2.0088, |
|
"step": 422500 |
|
}, |
|
{ |
|
"epoch": 115.64, |
|
"learning_rate": 4.422419685577581e-05, |
|
"loss": 2.0203, |
|
"step": 423000 |
|
}, |
|
{ |
|
"epoch": 115.77, |
|
"learning_rate": 4.421736158578264e-05, |
|
"loss": 2.0238, |
|
"step": 423500 |
|
}, |
|
{ |
|
"epoch": 115.91, |
|
"learning_rate": 4.421052631578947e-05, |
|
"loss": 2.0346, |
|
"step": 424000 |
|
}, |
|
{ |
|
"epoch": 116.05, |
|
"learning_rate": 4.4203691045796314e-05, |
|
"loss": 2.0185, |
|
"step": 424500 |
|
}, |
|
{ |
|
"epoch": 116.18, |
|
"learning_rate": 4.419685577580315e-05, |
|
"loss": 1.9889, |
|
"step": 425000 |
|
}, |
|
{ |
|
"epoch": 116.32, |
|
"learning_rate": 4.4190020505809983e-05, |
|
"loss": 1.9968, |
|
"step": 425500 |
|
}, |
|
{ |
|
"epoch": 116.46, |
|
"learning_rate": 4.418318523581682e-05, |
|
"loss": 2.0024, |
|
"step": 426000 |
|
}, |
|
{ |
|
"epoch": 116.59, |
|
"learning_rate": 4.417634996582365e-05, |
|
"loss": 2.0146, |
|
"step": 426500 |
|
}, |
|
{ |
|
"epoch": 116.73, |
|
"learning_rate": 4.416951469583049e-05, |
|
"loss": 2.0248, |
|
"step": 427000 |
|
}, |
|
{ |
|
"epoch": 116.87, |
|
"learning_rate": 4.416267942583732e-05, |
|
"loss": 2.0304, |
|
"step": 427500 |
|
}, |
|
{ |
|
"epoch": 117.0, |
|
"learning_rate": 4.415584415584416e-05, |
|
"loss": 2.027, |
|
"step": 428000 |
|
}, |
|
{ |
|
"epoch": 117.14, |
|
"learning_rate": 4.414900888585099e-05, |
|
"loss": 1.9796, |
|
"step": 428500 |
|
}, |
|
{ |
|
"epoch": 117.28, |
|
"learning_rate": 4.414217361585783e-05, |
|
"loss": 1.9886, |
|
"step": 429000 |
|
}, |
|
{ |
|
"epoch": 117.41, |
|
"learning_rate": 4.413533834586466e-05, |
|
"loss": 2.0039, |
|
"step": 429500 |
|
}, |
|
{ |
|
"epoch": 117.55, |
|
"learning_rate": 4.4128503075871497e-05, |
|
"loss": 2.0054, |
|
"step": 430000 |
|
}, |
|
{ |
|
"epoch": 117.69, |
|
"learning_rate": 4.412166780587834e-05, |
|
"loss": 2.0173, |
|
"step": 430500 |
|
}, |
|
{ |
|
"epoch": 117.82, |
|
"learning_rate": 4.411483253588517e-05, |
|
"loss": 2.0238, |
|
"step": 431000 |
|
}, |
|
{ |
|
"epoch": 117.96, |
|
"learning_rate": 4.4107997265892e-05, |
|
"loss": 2.0268, |
|
"step": 431500 |
|
}, |
|
{ |
|
"epoch": 118.1, |
|
"learning_rate": 4.4101161995898836e-05, |
|
"loss": 1.9865, |
|
"step": 432000 |
|
}, |
|
{ |
|
"epoch": 118.23, |
|
"learning_rate": 4.409432672590568e-05, |
|
"loss": 1.9889, |
|
"step": 432500 |
|
}, |
|
{ |
|
"epoch": 118.37, |
|
"learning_rate": 4.408749145591251e-05, |
|
"loss": 1.9964, |
|
"step": 433000 |
|
}, |
|
{ |
|
"epoch": 118.51, |
|
"learning_rate": 4.408065618591935e-05, |
|
"loss": 2.0049, |
|
"step": 433500 |
|
}, |
|
{ |
|
"epoch": 118.64, |
|
"learning_rate": 4.407382091592618e-05, |
|
"loss": 2.0078, |
|
"step": 434000 |
|
}, |
|
{ |
|
"epoch": 118.78, |
|
"learning_rate": 4.4066985645933017e-05, |
|
"loss": 2.0146, |
|
"step": 434500 |
|
}, |
|
{ |
|
"epoch": 118.92, |
|
"learning_rate": 4.406015037593985e-05, |
|
"loss": 2.016, |
|
"step": 435000 |
|
}, |
|
{ |
|
"epoch": 119.05, |
|
"learning_rate": 4.4053315105946686e-05, |
|
"loss": 2.0019, |
|
"step": 435500 |
|
}, |
|
{ |
|
"epoch": 119.19, |
|
"learning_rate": 4.404647983595353e-05, |
|
"loss": 1.9757, |
|
"step": 436000 |
|
}, |
|
{ |
|
"epoch": 119.33, |
|
"learning_rate": 4.4039644565960356e-05, |
|
"loss": 1.9861, |
|
"step": 436500 |
|
}, |
|
{ |
|
"epoch": 119.46, |
|
"learning_rate": 4.403280929596719e-05, |
|
"loss": 1.9953, |
|
"step": 437000 |
|
}, |
|
{ |
|
"epoch": 119.6, |
|
"learning_rate": 4.4025974025974025e-05, |
|
"loss": 2.005, |
|
"step": 437500 |
|
}, |
|
{ |
|
"epoch": 119.74, |
|
"learning_rate": 4.401913875598087e-05, |
|
"loss": 2.0055, |
|
"step": 438000 |
|
}, |
|
{ |
|
"epoch": 119.87, |
|
"learning_rate": 4.40123034859877e-05, |
|
"loss": 2.0174, |
|
"step": 438500 |
|
}, |
|
{ |
|
"epoch": 120.01, |
|
"learning_rate": 4.4005468215994536e-05, |
|
"loss": 2.0204, |
|
"step": 439000 |
|
}, |
|
{ |
|
"epoch": 120.15, |
|
"learning_rate": 4.3998632946001364e-05, |
|
"loss": 1.9663, |
|
"step": 439500 |
|
}, |
|
{ |
|
"epoch": 120.28, |
|
"learning_rate": 4.3991797676008206e-05, |
|
"loss": 1.9818, |
|
"step": 440000 |
|
}, |
|
{ |
|
"epoch": 120.42, |
|
"learning_rate": 4.398496240601504e-05, |
|
"loss": 1.9914, |
|
"step": 440500 |
|
}, |
|
{ |
|
"epoch": 120.56, |
|
"learning_rate": 4.3978127136021876e-05, |
|
"loss": 1.9949, |
|
"step": 441000 |
|
}, |
|
{ |
|
"epoch": 120.69, |
|
"learning_rate": 4.397129186602871e-05, |
|
"loss": 2.0031, |
|
"step": 441500 |
|
}, |
|
{ |
|
"epoch": 120.83, |
|
"learning_rate": 4.3964456596035545e-05, |
|
"loss": 2.0048, |
|
"step": 442000 |
|
}, |
|
{ |
|
"epoch": 120.97, |
|
"learning_rate": 4.395762132604238e-05, |
|
"loss": 2.018, |
|
"step": 442500 |
|
}, |
|
{ |
|
"epoch": 121.1, |
|
"learning_rate": 4.3950786056049215e-05, |
|
"loss": 1.9803, |
|
"step": 443000 |
|
}, |
|
{ |
|
"epoch": 121.24, |
|
"learning_rate": 4.3943950786056056e-05, |
|
"loss": 1.9769, |
|
"step": 443500 |
|
}, |
|
{ |
|
"epoch": 121.38, |
|
"learning_rate": 4.393711551606289e-05, |
|
"loss": 1.9786, |
|
"step": 444000 |
|
}, |
|
{ |
|
"epoch": 121.51, |
|
"learning_rate": 4.393028024606972e-05, |
|
"loss": 1.99, |
|
"step": 444500 |
|
}, |
|
{ |
|
"epoch": 121.65, |
|
"learning_rate": 4.3923444976076554e-05, |
|
"loss": 1.9929, |
|
"step": 445000 |
|
}, |
|
{ |
|
"epoch": 121.79, |
|
"learning_rate": 4.3916609706083396e-05, |
|
"loss": 2.0034, |
|
"step": 445500 |
|
}, |
|
{ |
|
"epoch": 121.92, |
|
"learning_rate": 4.390977443609023e-05, |
|
"loss": 2.0121, |
|
"step": 446000 |
|
}, |
|
{ |
|
"epoch": 122.06, |
|
"learning_rate": 4.3902939166097065e-05, |
|
"loss": 1.9899, |
|
"step": 446500 |
|
}, |
|
{ |
|
"epoch": 122.2, |
|
"learning_rate": 4.389610389610389e-05, |
|
"loss": 1.9649, |
|
"step": 447000 |
|
}, |
|
{ |
|
"epoch": 122.33, |
|
"learning_rate": 4.3889268626110735e-05, |
|
"loss": 1.9764, |
|
"step": 447500 |
|
}, |
|
{ |
|
"epoch": 122.47, |
|
"learning_rate": 4.388243335611757e-05, |
|
"loss": 1.9795, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 122.61, |
|
"learning_rate": 4.3875598086124404e-05, |
|
"loss": 1.9988, |
|
"step": 448500 |
|
}, |
|
{ |
|
"epoch": 122.74, |
|
"learning_rate": 4.386876281613124e-05, |
|
"loss": 1.9975, |
|
"step": 449000 |
|
}, |
|
{ |
|
"epoch": 122.88, |
|
"learning_rate": 4.3861927546138074e-05, |
|
"loss": 2.007, |
|
"step": 449500 |
|
}, |
|
{ |
|
"epoch": 123.02, |
|
"learning_rate": 4.385509227614491e-05, |
|
"loss": 2.0015, |
|
"step": 450000 |
|
}, |
|
{ |
|
"epoch": 123.15, |
|
"learning_rate": 4.3848257006151743e-05, |
|
"loss": 1.9606, |
|
"step": 450500 |
|
}, |
|
{ |
|
"epoch": 123.29, |
|
"learning_rate": 4.384142173615858e-05, |
|
"loss": 1.9646, |
|
"step": 451000 |
|
}, |
|
{ |
|
"epoch": 123.43, |
|
"learning_rate": 4.383458646616542e-05, |
|
"loss": 1.9758, |
|
"step": 451500 |
|
}, |
|
{ |
|
"epoch": 123.56, |
|
"learning_rate": 4.382775119617225e-05, |
|
"loss": 1.9893, |
|
"step": 452000 |
|
}, |
|
{ |
|
"epoch": 123.7, |
|
"learning_rate": 4.382091592617908e-05, |
|
"loss": 1.9943, |
|
"step": 452500 |
|
}, |
|
{ |
|
"epoch": 123.84, |
|
"learning_rate": 4.3814080656185924e-05, |
|
"loss": 1.9997, |
|
"step": 453000 |
|
}, |
|
{ |
|
"epoch": 123.97, |
|
"learning_rate": 4.380724538619276e-05, |
|
"loss": 2.0071, |
|
"step": 453500 |
|
}, |
|
{ |
|
"epoch": 124.11, |
|
"learning_rate": 4.3800410116199594e-05, |
|
"loss": 1.9643, |
|
"step": 454000 |
|
}, |
|
{ |
|
"epoch": 124.25, |
|
"learning_rate": 4.379357484620643e-05, |
|
"loss": 1.9644, |
|
"step": 454500 |
|
}, |
|
{ |
|
"epoch": 124.38, |
|
"learning_rate": 4.378673957621326e-05, |
|
"loss": 1.9718, |
|
"step": 455000 |
|
}, |
|
{ |
|
"epoch": 124.52, |
|
"learning_rate": 4.37799043062201e-05, |
|
"loss": 1.9801, |
|
"step": 455500 |
|
}, |
|
{ |
|
"epoch": 124.66, |
|
"learning_rate": 4.377306903622693e-05, |
|
"loss": 1.9836, |
|
"step": 456000 |
|
}, |
|
{ |
|
"epoch": 124.79, |
|
"learning_rate": 4.376623376623377e-05, |
|
"loss": 1.9961, |
|
"step": 456500 |
|
}, |
|
{ |
|
"epoch": 124.93, |
|
"learning_rate": 4.37593984962406e-05, |
|
"loss": 1.9996, |
|
"step": 457000 |
|
}, |
|
{ |
|
"epoch": 125.07, |
|
"learning_rate": 4.375256322624744e-05, |
|
"loss": 1.9744, |
|
"step": 457500 |
|
}, |
|
{ |
|
"epoch": 125.21, |
|
"learning_rate": 4.374572795625427e-05, |
|
"loss": 1.9558, |
|
"step": 458000 |
|
}, |
|
{ |
|
"epoch": 125.34, |
|
"learning_rate": 4.373889268626111e-05, |
|
"loss": 1.9701, |
|
"step": 458500 |
|
}, |
|
{ |
|
"epoch": 125.48, |
|
"learning_rate": 4.373205741626795e-05, |
|
"loss": 1.971, |
|
"step": 459000 |
|
}, |
|
{ |
|
"epoch": 125.62, |
|
"learning_rate": 4.372522214627478e-05, |
|
"loss": 1.9868, |
|
"step": 459500 |
|
}, |
|
{ |
|
"epoch": 125.75, |
|
"learning_rate": 4.371838687628161e-05, |
|
"loss": 1.9827, |
|
"step": 460000 |
|
}, |
|
{ |
|
"epoch": 125.89, |
|
"learning_rate": 4.3711551606288446e-05, |
|
"loss": 1.9944, |
|
"step": 460500 |
|
}, |
|
{ |
|
"epoch": 126.03, |
|
"learning_rate": 4.370471633629529e-05, |
|
"loss": 1.9864, |
|
"step": 461000 |
|
}, |
|
{ |
|
"epoch": 126.16, |
|
"learning_rate": 4.369788106630212e-05, |
|
"loss": 1.9455, |
|
"step": 461500 |
|
}, |
|
{ |
|
"epoch": 126.3, |
|
"learning_rate": 4.369104579630896e-05, |
|
"loss": 1.9598, |
|
"step": 462000 |
|
}, |
|
{ |
|
"epoch": 126.44, |
|
"learning_rate": 4.368421052631579e-05, |
|
"loss": 1.9695, |
|
"step": 462500 |
|
}, |
|
{ |
|
"epoch": 126.57, |
|
"learning_rate": 4.367737525632263e-05, |
|
"loss": 1.9791, |
|
"step": 463000 |
|
}, |
|
{ |
|
"epoch": 126.71, |
|
"learning_rate": 4.367053998632946e-05, |
|
"loss": 1.9733, |
|
"step": 463500 |
|
}, |
|
{ |
|
"epoch": 126.85, |
|
"learning_rate": 4.3663704716336296e-05, |
|
"loss": 1.9899, |
|
"step": 464000 |
|
}, |
|
{ |
|
"epoch": 126.98, |
|
"learning_rate": 4.365686944634314e-05, |
|
"loss": 1.9975, |
|
"step": 464500 |
|
}, |
|
{ |
|
"epoch": 127.12, |
|
"learning_rate": 4.3650034176349966e-05, |
|
"loss": 1.9522, |
|
"step": 465000 |
|
}, |
|
{ |
|
"epoch": 127.26, |
|
"learning_rate": 4.36431989063568e-05, |
|
"loss": 1.9489, |
|
"step": 465500 |
|
}, |
|
{ |
|
"epoch": 127.39, |
|
"learning_rate": 4.3636363636363636e-05, |
|
"loss": 1.961, |
|
"step": 466000 |
|
}, |
|
{ |
|
"epoch": 127.53, |
|
"learning_rate": 4.362952836637048e-05, |
|
"loss": 1.9685, |
|
"step": 466500 |
|
}, |
|
{ |
|
"epoch": 127.67, |
|
"learning_rate": 4.362269309637731e-05, |
|
"loss": 1.9775, |
|
"step": 467000 |
|
}, |
|
{ |
|
"epoch": 127.8, |
|
"learning_rate": 4.361585782638414e-05, |
|
"loss": 1.983, |
|
"step": 467500 |
|
}, |
|
{ |
|
"epoch": 127.94, |
|
"learning_rate": 4.3609022556390975e-05, |
|
"loss": 1.9919, |
|
"step": 468000 |
|
}, |
|
{ |
|
"epoch": 128.08, |
|
"learning_rate": 4.3602187286397816e-05, |
|
"loss": 1.9609, |
|
"step": 468500 |
|
}, |
|
{ |
|
"epoch": 128.21, |
|
"learning_rate": 4.359535201640465e-05, |
|
"loss": 1.9508, |
|
"step": 469000 |
|
}, |
|
{ |
|
"epoch": 128.35, |
|
"learning_rate": 4.3588516746411486e-05, |
|
"loss": 1.9594, |
|
"step": 469500 |
|
}, |
|
{ |
|
"epoch": 128.49, |
|
"learning_rate": 4.358168147641832e-05, |
|
"loss": 1.9627, |
|
"step": 470000 |
|
}, |
|
{ |
|
"epoch": 128.62, |
|
"learning_rate": 4.3574846206425156e-05, |
|
"loss": 1.9675, |
|
"step": 470500 |
|
}, |
|
{ |
|
"epoch": 128.76, |
|
"learning_rate": 4.356801093643199e-05, |
|
"loss": 1.9777, |
|
"step": 471000 |
|
}, |
|
{ |
|
"epoch": 128.9, |
|
"learning_rate": 4.3561175666438825e-05, |
|
"loss": 1.9811, |
|
"step": 471500 |
|
}, |
|
{ |
|
"epoch": 129.03, |
|
"learning_rate": 4.355434039644567e-05, |
|
"loss": 1.975, |
|
"step": 472000 |
|
}, |
|
{ |
|
"epoch": 129.17, |
|
"learning_rate": 4.3547505126452495e-05, |
|
"loss": 1.9393, |
|
"step": 472500 |
|
}, |
|
{ |
|
"epoch": 129.31, |
|
"learning_rate": 4.354066985645933e-05, |
|
"loss": 1.9465, |
|
"step": 473000 |
|
}, |
|
{ |
|
"epoch": 129.44, |
|
"learning_rate": 4.3533834586466164e-05, |
|
"loss": 1.9567, |
|
"step": 473500 |
|
}, |
|
{ |
|
"epoch": 129.58, |
|
"learning_rate": 4.3526999316473006e-05, |
|
"loss": 1.963, |
|
"step": 474000 |
|
}, |
|
{ |
|
"epoch": 129.72, |
|
"learning_rate": 4.352016404647984e-05, |
|
"loss": 1.9731, |
|
"step": 474500 |
|
}, |
|
{ |
|
"epoch": 129.85, |
|
"learning_rate": 4.3513328776486675e-05, |
|
"loss": 1.9787, |
|
"step": 475000 |
|
}, |
|
{ |
|
"epoch": 129.99, |
|
"learning_rate": 4.3506493506493503e-05, |
|
"loss": 1.9779, |
|
"step": 475500 |
|
}, |
|
{ |
|
"epoch": 130.13, |
|
"learning_rate": 4.3499658236500345e-05, |
|
"loss": 1.9351, |
|
"step": 476000 |
|
}, |
|
{ |
|
"epoch": 130.26, |
|
"learning_rate": 4.349282296650718e-05, |
|
"loss": 1.9439, |
|
"step": 476500 |
|
}, |
|
{ |
|
"epoch": 130.4, |
|
"learning_rate": 4.3485987696514015e-05, |
|
"loss": 1.953, |
|
"step": 477000 |
|
}, |
|
{ |
|
"epoch": 130.54, |
|
"learning_rate": 4.347915242652085e-05, |
|
"loss": 1.9584, |
|
"step": 477500 |
|
}, |
|
{ |
|
"epoch": 130.67, |
|
"learning_rate": 4.3472317156527684e-05, |
|
"loss": 1.9635, |
|
"step": 478000 |
|
}, |
|
{ |
|
"epoch": 130.81, |
|
"learning_rate": 4.346548188653452e-05, |
|
"loss": 1.9722, |
|
"step": 478500 |
|
}, |
|
{ |
|
"epoch": 130.95, |
|
"learning_rate": 4.3458646616541354e-05, |
|
"loss": 1.9806, |
|
"step": 479000 |
|
}, |
|
{ |
|
"epoch": 131.08, |
|
"learning_rate": 4.345181134654819e-05, |
|
"loss": 1.9505, |
|
"step": 479500 |
|
}, |
|
{ |
|
"epoch": 131.22, |
|
"learning_rate": 4.344497607655503e-05, |
|
"loss": 1.9355, |
|
"step": 480000 |
|
} |
|
], |
|
"max_steps": 3658000, |
|
"num_train_epochs": 1000, |
|
"total_flos": 966230618169802752, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|