|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 399903, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993748484007372e-05, |
|
"loss": 9.307, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.987496968014744e-05, |
|
"loss": 7.1579, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.981245452022116e-05, |
|
"loss": 6.1011, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.974993936029487e-05, |
|
"loss": 5.6504, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.968742420036859e-05, |
|
"loss": 5.2222, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.962490904044231e-05, |
|
"loss": 4.8515, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.956239388051603e-05, |
|
"loss": 4.5954, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.949987872058974e-05, |
|
"loss": 4.4224, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.943736356066346e-05, |
|
"loss": 4.3024, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.937484840073718e-05, |
|
"loss": 4.2033, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.93123332408109e-05, |
|
"loss": 4.1213, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.924981808088462e-05, |
|
"loss": 4.0447, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.918730292095833e-05, |
|
"loss": 3.9873, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.912478776103205e-05, |
|
"loss": 3.9163, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.906227260110577e-05, |
|
"loss": 3.866, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.899975744117949e-05, |
|
"loss": 3.8165, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.89372422812532e-05, |
|
"loss": 3.7825, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.887472712132692e-05, |
|
"loss": 3.725, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.881221196140065e-05, |
|
"loss": 3.6905, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.874969680147436e-05, |
|
"loss": 3.6541, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.868718164154808e-05, |
|
"loss": 3.6135, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.862466648162179e-05, |
|
"loss": 3.5826, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.856215132169552e-05, |
|
"loss": 3.5434, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.849963616176923e-05, |
|
"loss": 3.5116, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.843712100184295e-05, |
|
"loss": 3.4834, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.837460584191667e-05, |
|
"loss": 3.4517, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.831209068199039e-05, |
|
"loss": 3.4314, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8249575522064106e-05, |
|
"loss": 3.3874, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.818706036213782e-05, |
|
"loss": 3.3694, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.812454520221154e-05, |
|
"loss": 3.3399, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.806203004228526e-05, |
|
"loss": 3.3237, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.7999514882358976e-05, |
|
"loss": 3.2979, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.793699972243269e-05, |
|
"loss": 3.2771, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.787448456250641e-05, |
|
"loss": 3.2564, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.781196940258013e-05, |
|
"loss": 3.2413, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7749454242653847e-05, |
|
"loss": 3.2146, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7686939082727566e-05, |
|
"loss": 3.2017, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.762442392280128e-05, |
|
"loss": 3.1786, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7561908762875004e-05, |
|
"loss": 3.1716, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.749939360294872e-05, |
|
"loss": 3.15, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7436878443022436e-05, |
|
"loss": 3.1288, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.737436328309615e-05, |
|
"loss": 3.1121, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7311848123169874e-05, |
|
"loss": 3.0973, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.724933296324359e-05, |
|
"loss": 3.0873, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.7186817803317306e-05, |
|
"loss": 3.0668, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.7124302643391025e-05, |
|
"loss": 3.0575, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.7061787483464744e-05, |
|
"loss": 3.0506, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.6999272323538464e-05, |
|
"loss": 3.0371, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.6936757163612176e-05, |
|
"loss": 3.0221, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6874242003685895e-05, |
|
"loss": 3.0119, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6811726843759614e-05, |
|
"loss": 2.9966, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6749211683833334e-05, |
|
"loss": 2.9862, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6686696523907046e-05, |
|
"loss": 2.9784, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6624181363980765e-05, |
|
"loss": 2.9718, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.656166620405449e-05, |
|
"loss": 2.955, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6499151044128204e-05, |
|
"loss": 2.9422, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.643663588420192e-05, |
|
"loss": 2.9391, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6374120724275635e-05, |
|
"loss": 2.9305, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.631160556434936e-05, |
|
"loss": 2.9197, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6249090404423074e-05, |
|
"loss": 2.9093, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.618657524449679e-05, |
|
"loss": 2.899, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6124060084570505e-05, |
|
"loss": 2.8983, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.606154492464423e-05, |
|
"loss": 2.8855, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.5999029764717944e-05, |
|
"loss": 2.8779, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.593651460479166e-05, |
|
"loss": 2.8749, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.587399944486538e-05, |
|
"loss": 2.8549, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.58114842849391e-05, |
|
"loss": 2.8547, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.574896912501282e-05, |
|
"loss": 2.8554, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.568645396508653e-05, |
|
"loss": 2.8457, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.562393880516025e-05, |
|
"loss": 2.8307, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.556142364523397e-05, |
|
"loss": 2.8229, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.549890848530769e-05, |
|
"loss": 2.814, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.54363933253814e-05, |
|
"loss": 2.8058, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.537387816545512e-05, |
|
"loss": 2.8132, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.531136300552885e-05, |
|
"loss": 2.7947, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.524884784560256e-05, |
|
"loss": 2.8021, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.518633268567628e-05, |
|
"loss": 2.7851, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.512381752574999e-05, |
|
"loss": 2.7857, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.506130236582372e-05, |
|
"loss": 2.774, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.499878720589743e-05, |
|
"loss": 2.7792, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.493627204597115e-05, |
|
"loss": 2.7619, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.487375688604486e-05, |
|
"loss": 2.7572, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.481124172611859e-05, |
|
"loss": 2.7569, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.474872656619231e-05, |
|
"loss": 2.7509, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.468621140626602e-05, |
|
"loss": 2.7437, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.462369624633974e-05, |
|
"loss": 2.7296, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.456118108641346e-05, |
|
"loss": 2.735, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.449866592648718e-05, |
|
"loss": 2.7228, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.443615076656089e-05, |
|
"loss": 2.7251, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.437363560663461e-05, |
|
"loss": 2.7254, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.431112044670833e-05, |
|
"loss": 2.72, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.424860528678205e-05, |
|
"loss": 2.7179, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.418609012685577e-05, |
|
"loss": 2.7066, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.412357496692948e-05, |
|
"loss": 2.7011, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.4061059807003205e-05, |
|
"loss": 2.7048, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.399854464707692e-05, |
|
"loss": 2.7001, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.393602948715064e-05, |
|
"loss": 2.6908, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.387351432722435e-05, |
|
"loss": 2.6901, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3810999167298076e-05, |
|
"loss": 2.6847, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.374848400737179e-05, |
|
"loss": 2.6824, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.368596884744551e-05, |
|
"loss": 2.6795, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3623453687519226e-05, |
|
"loss": 2.6737, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3560938527592946e-05, |
|
"loss": 2.67, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3498423367666665e-05, |
|
"loss": 2.6648, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.343590820774038e-05, |
|
"loss": 2.655, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3373393047814096e-05, |
|
"loss": 2.6564, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3310877887887816e-05, |
|
"loss": 2.6491, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3248362727961535e-05, |
|
"loss": 2.6509, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.318584756803525e-05, |
|
"loss": 2.6552, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3123332408108966e-05, |
|
"loss": 2.6434, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.3060817248182686e-05, |
|
"loss": 2.6403, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2998302088256405e-05, |
|
"loss": 2.6295, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2935786928330124e-05, |
|
"loss": 2.6336, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.2873271768403837e-05, |
|
"loss": 2.624, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.2810756608477556e-05, |
|
"loss": 2.6265, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2748241448551275e-05, |
|
"loss": 2.6266, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2685726288624994e-05, |
|
"loss": 2.616, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.262321112869871e-05, |
|
"loss": 2.6115, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.256069596877243e-05, |
|
"loss": 2.6181, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.249818080884615e-05, |
|
"loss": 2.6196, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2435665648919864e-05, |
|
"loss": 2.6134, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2373150488993583e-05, |
|
"loss": 2.6023, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.23106353290673e-05, |
|
"loss": 2.6016, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.224812016914102e-05, |
|
"loss": 2.6041, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2185605009214734e-05, |
|
"loss": 2.5993, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2123089849288454e-05, |
|
"loss": 2.6022, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.206057468936217e-05, |
|
"loss": 2.5916, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.199805952943589e-05, |
|
"loss": 2.5951, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.193554436950961e-05, |
|
"loss": 2.5835, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1873029209583324e-05, |
|
"loss": 2.586, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.181051404965704e-05, |
|
"loss": 2.5828, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.174799888973076e-05, |
|
"loss": 2.5811, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.168548372980448e-05, |
|
"loss": 2.5787, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.1622968569878194e-05, |
|
"loss": 2.5767, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.156045340995191e-05, |
|
"loss": 2.5727, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.149793825002563e-05, |
|
"loss": 2.5684, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.143542309009935e-05, |
|
"loss": 2.5731, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.137290793017307e-05, |
|
"loss": 2.5698, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.131039277024679e-05, |
|
"loss": 2.5716, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.124787761032051e-05, |
|
"loss": 2.5637, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.118536245039422e-05, |
|
"loss": 2.5655, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.112284729046794e-05, |
|
"loss": 2.5591, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.106033213054166e-05, |
|
"loss": 2.5528, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.099781697061538e-05, |
|
"loss": 2.5521, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.093530181068909e-05, |
|
"loss": 2.5438, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.087278665076281e-05, |
|
"loss": 2.542, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.081027149083653e-05, |
|
"loss": 2.5414, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.074775633091025e-05, |
|
"loss": 2.5427, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.068524117098397e-05, |
|
"loss": 2.5382, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.062272601105768e-05, |
|
"loss": 2.5351, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.05602108511314e-05, |
|
"loss": 2.5392, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.049769569120512e-05, |
|
"loss": 2.5352, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.043518053127884e-05, |
|
"loss": 2.5311, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.037266537135255e-05, |
|
"loss": 2.5285, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.031015021142627e-05, |
|
"loss": 2.5275, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0247635051499996e-05, |
|
"loss": 2.5365, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.018511989157371e-05, |
|
"loss": 2.519, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.012260473164743e-05, |
|
"loss": 2.5217, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.006008957172114e-05, |
|
"loss": 2.5172, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9997574411794866e-05, |
|
"loss": 2.5193, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.993505925186858e-05, |
|
"loss": 2.5114, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.98725440919423e-05, |
|
"loss": 2.5118, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.981002893201602e-05, |
|
"loss": 2.5191, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9747513772089736e-05, |
|
"loss": 2.515, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9684998612163455e-05, |
|
"loss": 2.5156, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.962248345223717e-05, |
|
"loss": 2.5051, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.955996829231089e-05, |
|
"loss": 2.5134, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9497453132384606e-05, |
|
"loss": 2.4985, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9434937972458325e-05, |
|
"loss": 2.508, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.937242281253204e-05, |
|
"loss": 2.4966, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.930990765260576e-05, |
|
"loss": 2.4985, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9247392492679476e-05, |
|
"loss": 2.5049, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9184877332753195e-05, |
|
"loss": 2.5042, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.912236217282691e-05, |
|
"loss": 2.5034, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.905984701290063e-05, |
|
"loss": 2.4971, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.899733185297435e-05, |
|
"loss": 2.4913, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.8934816693048066e-05, |
|
"loss": 2.5028, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8872301533121785e-05, |
|
"loss": 2.4927, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.88097863731955e-05, |
|
"loss": 2.4971, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.874727121326922e-05, |
|
"loss": 2.4789, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8684756053342936e-05, |
|
"loss": 2.4866, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8622240893416655e-05, |
|
"loss": 2.4762, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.855972573349037e-05, |
|
"loss": 2.4892, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.849721057356409e-05, |
|
"loss": 2.4877, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.843469541363781e-05, |
|
"loss": 2.4791, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8372180253711525e-05, |
|
"loss": 2.4774, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8309665093785244e-05, |
|
"loss": 2.4762, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.824714993385896e-05, |
|
"loss": 2.4774, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.818463477393268e-05, |
|
"loss": 2.4804, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8122119614006395e-05, |
|
"loss": 2.4724, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8059604454080114e-05, |
|
"loss": 2.4706, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.799708929415383e-05, |
|
"loss": 2.4755, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.793457413422755e-05, |
|
"loss": 2.4587, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.787205897430127e-05, |
|
"loss": 2.4769, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7809543814374984e-05, |
|
"loss": 2.4604, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.774702865444871e-05, |
|
"loss": 2.4714, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.768451349452242e-05, |
|
"loss": 2.4557, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.762199833459614e-05, |
|
"loss": 2.4592, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7559483174669854e-05, |
|
"loss": 2.4558, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.749696801474358e-05, |
|
"loss": 2.4611, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.743445285481729e-05, |
|
"loss": 2.4627, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.737193769489101e-05, |
|
"loss": 2.4596, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.730942253496473e-05, |
|
"loss": 2.4496, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.724690737503845e-05, |
|
"loss": 2.4549, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.718439221511217e-05, |
|
"loss": 2.4586, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.712187705518588e-05, |
|
"loss": 2.4525, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.70593618952596e-05, |
|
"loss": 2.4557, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.699684673533332e-05, |
|
"loss": 2.4561, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.693433157540704e-05, |
|
"loss": 2.4505, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.687181641548075e-05, |
|
"loss": 2.4491, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.680930125555447e-05, |
|
"loss": 2.4437, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.67467860956282e-05, |
|
"loss": 2.4532, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.668427093570191e-05, |
|
"loss": 2.4464, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.662175577577563e-05, |
|
"loss": 2.4438, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.655924061584934e-05, |
|
"loss": 2.4483, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.649672545592307e-05, |
|
"loss": 2.4378, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.643421029599678e-05, |
|
"loss": 2.4446, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.63716951360705e-05, |
|
"loss": 2.4276, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.630917997614421e-05, |
|
"loss": 2.4392, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.624666481621794e-05, |
|
"loss": 2.4416, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.6184149656291657e-05, |
|
"loss": 2.4375, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.612163449636537e-05, |
|
"loss": 2.4378, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.605911933643909e-05, |
|
"loss": 2.4356, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.599660417651281e-05, |
|
"loss": 2.4383, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.593408901658653e-05, |
|
"loss": 2.4336, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.587157385666024e-05, |
|
"loss": 2.4337, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.580905869673396e-05, |
|
"loss": 2.4318, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.574654353680768e-05, |
|
"loss": 2.4311, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.56840283768814e-05, |
|
"loss": 2.4269, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.5621513216955116e-05, |
|
"loss": 2.4232, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.555899805702883e-05, |
|
"loss": 2.4268, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.549648289710255e-05, |
|
"loss": 2.4252, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.543396773717627e-05, |
|
"loss": 2.4296, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.5371452577249986e-05, |
|
"loss": 2.4283, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.53089374173237e-05, |
|
"loss": 2.4186, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5246422257397424e-05, |
|
"loss": 2.4283, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.518390709747114e-05, |
|
"loss": 2.4221, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5121391937544856e-05, |
|
"loss": 2.4166, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.5058876777618575e-05, |
|
"loss": 2.4135, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.4996361617692294e-05, |
|
"loss": 2.4181, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.4933846457766014e-05, |
|
"loss": 2.4257, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4871331297839726e-05, |
|
"loss": 2.414, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4808816137913445e-05, |
|
"loss": 2.4145, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4746300977987165e-05, |
|
"loss": 2.416, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4683785818060884e-05, |
|
"loss": 2.4119, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4621270658134596e-05, |
|
"loss": 2.4104, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4558755498208315e-05, |
|
"loss": 2.4121, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4496240338282035e-05, |
|
"loss": 2.4112, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4433725178355754e-05, |
|
"loss": 2.4159, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.437121001842947e-05, |
|
"loss": 2.4074, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4308694858503185e-05, |
|
"loss": 2.4106, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4246179698576905e-05, |
|
"loss": 2.4101, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4183664538650624e-05, |
|
"loss": 2.408, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.412114937872434e-05, |
|
"loss": 2.4105, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.4058634218798056e-05, |
|
"loss": 2.4047, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.399611905887178e-05, |
|
"loss": 2.4107, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.39336038989455e-05, |
|
"loss": 2.4087, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.387108873901921e-05, |
|
"loss": 2.4039, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.380857357909293e-05, |
|
"loss": 2.4097, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.374605841916665e-05, |
|
"loss": 2.3998, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.368354325924037e-05, |
|
"loss": 2.4002, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.362102809931408e-05, |
|
"loss": 2.3973, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.35585129393878e-05, |
|
"loss": 2.3961, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.349599777946152e-05, |
|
"loss": 2.4067, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.343348261953524e-05, |
|
"loss": 2.3986, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.337096745960896e-05, |
|
"loss": 2.3942, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.330845229968267e-05, |
|
"loss": 2.3936, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.324593713975639e-05, |
|
"loss": 2.3976, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.318342197983011e-05, |
|
"loss": 2.3938, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.312090681990383e-05, |
|
"loss": 2.3913, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.305839165997754e-05, |
|
"loss": 2.3942, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.299587650005126e-05, |
|
"loss": 2.3925, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.293336134012498e-05, |
|
"loss": 2.403, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.28708461801987e-05, |
|
"loss": 2.3855, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.280833102027242e-05, |
|
"loss": 2.3908, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.274581586034613e-05, |
|
"loss": 2.3894, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.268330070041986e-05, |
|
"loss": 2.3935, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.262078554049357e-05, |
|
"loss": 2.3841, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.255827038056729e-05, |
|
"loss": 2.3876, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.249575522064101e-05, |
|
"loss": 2.386, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.243324006071473e-05, |
|
"loss": 2.3867, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.237072490078844e-05, |
|
"loss": 2.3857, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.230820974086216e-05, |
|
"loss": 2.3823, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.224569458093588e-05, |
|
"loss": 2.3806, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.21831794210096e-05, |
|
"loss": 2.3883, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.212066426108332e-05, |
|
"loss": 2.3837, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.205814910115703e-05, |
|
"loss": 2.3828, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.199563394123075e-05, |
|
"loss": 2.378, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.193311878130447e-05, |
|
"loss": 2.3774, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.187060362137819e-05, |
|
"loss": 2.3799, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.18080884614519e-05, |
|
"loss": 2.3786, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.174557330152562e-05, |
|
"loss": 2.3754, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.168305814159934e-05, |
|
"loss": 2.3811, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.162054298167306e-05, |
|
"loss": 2.3787, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1558027821746777e-05, |
|
"loss": 2.3794, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.149551266182049e-05, |
|
"loss": 2.3709, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1432997501894215e-05, |
|
"loss": 2.3754, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.137048234196793e-05, |
|
"loss": 2.3705, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1307967182041647e-05, |
|
"loss": 2.3767, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.124545202211536e-05, |
|
"loss": 2.3805, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1182936862189085e-05, |
|
"loss": 2.376, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.11204217022628e-05, |
|
"loss": 2.3733, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.105790654233652e-05, |
|
"loss": 2.3775, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.0995391382410236e-05, |
|
"loss": 2.3673, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.0932876222483955e-05, |
|
"loss": 2.3744, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0870361062557674e-05, |
|
"loss": 2.3717, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.080784590263139e-05, |
|
"loss": 2.3682, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0745330742705106e-05, |
|
"loss": 2.3694, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0682815582778825e-05, |
|
"loss": 2.3698, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0620300422852544e-05, |
|
"loss": 2.3699, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.055778526292626e-05, |
|
"loss": 2.3661, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.049527010299998e-05, |
|
"loss": 2.3603, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.04327549430737e-05, |
|
"loss": 2.3703, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0370239783147414e-05, |
|
"loss": 2.3669, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0307724623221134e-05, |
|
"loss": 2.3651, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.024520946329485e-05, |
|
"loss": 2.3657, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.018269430336857e-05, |
|
"loss": 2.3674, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0120179143442284e-05, |
|
"loss": 2.3676, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0057663983516004e-05, |
|
"loss": 2.371, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.999514882358972e-05, |
|
"loss": 2.3632, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.993263366366344e-05, |
|
"loss": 2.3642, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.987011850373716e-05, |
|
"loss": 2.3638, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9807603343810874e-05, |
|
"loss": 2.3578, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9745088183884596e-05, |
|
"loss": 2.3597, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.968257302395831e-05, |
|
"loss": 2.3582, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.962005786403203e-05, |
|
"loss": 2.3654, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9557542704105744e-05, |
|
"loss": 2.3613, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9495027544179466e-05, |
|
"loss": 2.3612, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.943251238425318e-05, |
|
"loss": 2.361, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.93699972243269e-05, |
|
"loss": 2.353, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.930748206440062e-05, |
|
"loss": 2.3695, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9244966904474336e-05, |
|
"loss": 2.3581, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9182451744548056e-05, |
|
"loss": 2.3633, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.911993658462177e-05, |
|
"loss": 2.3532, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.905742142469549e-05, |
|
"loss": 2.3555, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8994906264769207e-05, |
|
"loss": 2.3536, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8932391104842926e-05, |
|
"loss": 2.3532, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.886987594491664e-05, |
|
"loss": 2.347, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.880736078499036e-05, |
|
"loss": 2.3535, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8744845625064083e-05, |
|
"loss": 2.3504, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8682330465137796e-05, |
|
"loss": 2.3641, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.861981530521152e-05, |
|
"loss": 2.3437, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.855730014528523e-05, |
|
"loss": 2.3487, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8494784985358953e-05, |
|
"loss": 2.3537, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8432269825432666e-05, |
|
"loss": 2.3525, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.836975466550639e-05, |
|
"loss": 2.347, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.83072395055801e-05, |
|
"loss": 2.3475, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8244724345653824e-05, |
|
"loss": 2.3453, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8182209185727543e-05, |
|
"loss": 2.3503, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.811969402580126e-05, |
|
"loss": 2.3482, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8057178865874978e-05, |
|
"loss": 2.3496, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7994663705948694e-05, |
|
"loss": 2.3524, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7932148546022413e-05, |
|
"loss": 2.348, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.786963338609613e-05, |
|
"loss": 2.3438, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7807118226169848e-05, |
|
"loss": 2.3522, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7744603066243564e-05, |
|
"loss": 2.3487, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7682087906317283e-05, |
|
"loss": 2.3489, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7619572746391002e-05, |
|
"loss": 2.3494, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7557057586464718e-05, |
|
"loss": 2.3476, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7494542426538437e-05, |
|
"loss": 2.347, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7432027266612153e-05, |
|
"loss": 2.341, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7369512106685876e-05, |
|
"loss": 2.3487, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7306996946759588e-05, |
|
"loss": 2.3402, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.724448178683331e-05, |
|
"loss": 2.3473, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7181966626907023e-05, |
|
"loss": 2.3436, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7119451466980746e-05, |
|
"loss": 2.3413, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.7056936307054465e-05, |
|
"loss": 2.3508, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.699442114712818e-05, |
|
"loss": 2.3464, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.69319059872019e-05, |
|
"loss": 2.3388, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6869390827275616e-05, |
|
"loss": 2.3488, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6806875667349335e-05, |
|
"loss": 2.3343, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.674436050742305e-05, |
|
"loss": 2.3404, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.668184534749677e-05, |
|
"loss": 2.3406, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6619330187570486e-05, |
|
"loss": 2.3465, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6556815027644205e-05, |
|
"loss": 2.3428, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6494299867717924e-05, |
|
"loss": 2.3445, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.643178470779164e-05, |
|
"loss": 2.3428, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.636926954786536e-05, |
|
"loss": 2.3458, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6306754387939075e-05, |
|
"loss": 2.3373, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6244239228012794e-05, |
|
"loss": 2.3341, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.618172406808651e-05, |
|
"loss": 2.3325, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.611920890816023e-05, |
|
"loss": 2.3323, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6056693748233945e-05, |
|
"loss": 2.3403, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.5994178588307668e-05, |
|
"loss": 2.3342, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.5931663428381387e-05, |
|
"loss": 2.3295, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5869148268455103e-05, |
|
"loss": 2.3453, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5806633108528822e-05, |
|
"loss": 2.3327, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5744117948602538e-05, |
|
"loss": 2.3324, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5681602788676257e-05, |
|
"loss": 2.3341, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5619087628749973e-05, |
|
"loss": 2.3356, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5556572468823692e-05, |
|
"loss": 2.3319, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5494057308897408e-05, |
|
"loss": 2.3383, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5431542148971127e-05, |
|
"loss": 2.3365, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5369026989044846e-05, |
|
"loss": 2.3362, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5306511829118562e-05, |
|
"loss": 2.3269, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.524399666919228e-05, |
|
"loss": 2.3298, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5181481509265997e-05, |
|
"loss": 2.3368, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5118966349339716e-05, |
|
"loss": 2.3355, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5056451189413432e-05, |
|
"loss": 2.3224, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.499393602948715e-05, |
|
"loss": 2.3319, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.493142086956087e-05, |
|
"loss": 2.3264, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4868905709634586e-05, |
|
"loss": 2.3281, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4806390549708306e-05, |
|
"loss": 2.3341, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.474387538978202e-05, |
|
"loss": 2.3218, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.468136022985574e-05, |
|
"loss": 2.3295, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.461884506992946e-05, |
|
"loss": 2.3328, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4556329910003176e-05, |
|
"loss": 2.321, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4493814750076895e-05, |
|
"loss": 2.3264, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4431299590150614e-05, |
|
"loss": 2.3341, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.436878443022433e-05, |
|
"loss": 2.329, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.430626927029805e-05, |
|
"loss": 2.3269, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4243754110371765e-05, |
|
"loss": 2.3336, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4181238950445484e-05, |
|
"loss": 2.328, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.41187237905192e-05, |
|
"loss": 2.3275, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.405620863059292e-05, |
|
"loss": 2.3214, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3993693470666635e-05, |
|
"loss": 2.327, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3931178310740358e-05, |
|
"loss": 2.326, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3868663150814073e-05, |
|
"loss": 2.3222, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3806147990887793e-05, |
|
"loss": 2.3163, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.374363283096151e-05, |
|
"loss": 2.3281, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3681117671035228e-05, |
|
"loss": 2.3254, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3618602511108943e-05, |
|
"loss": 2.3225, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3556087351182663e-05, |
|
"loss": 2.324, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.349357219125638e-05, |
|
"loss": 2.3289, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3431057031330098e-05, |
|
"loss": 2.3195, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3368541871403817e-05, |
|
"loss": 2.3264, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3306026711477536e-05, |
|
"loss": 2.3236, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3243511551551252e-05, |
|
"loss": 2.323, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.318099639162497e-05, |
|
"loss": 2.3205, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3118481231698687e-05, |
|
"loss": 2.3178, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3055966071772406e-05, |
|
"loss": 2.323, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.2993450911846122e-05, |
|
"loss": 2.3263, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.293093575191984e-05, |
|
"loss": 2.3157, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2868420591993557e-05, |
|
"loss": 2.315, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2805905432067276e-05, |
|
"loss": 2.3227, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2743390272140995e-05, |
|
"loss": 2.3159, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2680875112214715e-05, |
|
"loss": 2.3293, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.261835995228843e-05, |
|
"loss": 2.3159, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.255584479236215e-05, |
|
"loss": 2.3207, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2493329632435866e-05, |
|
"loss": 2.324, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2430814472509585e-05, |
|
"loss": 2.3225, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.23682993125833e-05, |
|
"loss": 2.3157, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.230578415265702e-05, |
|
"loss": 2.322, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2243268992730736e-05, |
|
"loss": 2.3247, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2180753832804458e-05, |
|
"loss": 2.3174, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2118238672878174e-05, |
|
"loss": 2.3216, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2055723512951893e-05, |
|
"loss": 2.3159, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.199320835302561e-05, |
|
"loss": 2.3102, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.1930693193099328e-05, |
|
"loss": 2.3158, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1868178033173044e-05, |
|
"loss": 2.3171, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1805662873246763e-05, |
|
"loss": 2.3256, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.174314771332048e-05, |
|
"loss": 2.3192, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.16806325533942e-05, |
|
"loss": 2.3072, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1618117393467918e-05, |
|
"loss": 2.3089, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1555602233541637e-05, |
|
"loss": 2.3192, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1493087073615353e-05, |
|
"loss": 2.3121, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1430571913689072e-05, |
|
"loss": 2.3161, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1368056753762788e-05, |
|
"loss": 2.3234, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1305541593836507e-05, |
|
"loss": 2.3138, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1243026433910223e-05, |
|
"loss": 2.3196, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1180511273983942e-05, |
|
"loss": 2.3168, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1117996114057658e-05, |
|
"loss": 2.3173, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.105548095413138e-05, |
|
"loss": 2.3104, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0992965794205096e-05, |
|
"loss": 2.3183, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0930450634278815e-05, |
|
"loss": 2.3159, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.086793547435253e-05, |
|
"loss": 2.3198, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.080542031442625e-05, |
|
"loss": 2.3061, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0742905154499966e-05, |
|
"loss": 2.3149, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0680389994573685e-05, |
|
"loss": 2.3041, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.06178748346474e-05, |
|
"loss": 2.315, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.055535967472112e-05, |
|
"loss": 2.3093, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.049284451479484e-05, |
|
"loss": 2.3165, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.043032935486856e-05, |
|
"loss": 2.3114, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0367814194942275e-05, |
|
"loss": 2.3116, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0305299035015994e-05, |
|
"loss": 2.3115, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.024278387508971e-05, |
|
"loss": 2.3022, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.018026871516343e-05, |
|
"loss": 2.31, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0117753555237145e-05, |
|
"loss": 2.315, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.0055238395310864e-05, |
|
"loss": 2.3043, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.999272323538458e-05, |
|
"loss": 2.3068, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.9930208075458302e-05, |
|
"loss": 2.3088, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9867692915532018e-05, |
|
"loss": 2.3038, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9805177755605737e-05, |
|
"loss": 2.3168, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9742662595679453e-05, |
|
"loss": 2.3095, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9680147435753172e-05, |
|
"loss": 2.3058, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9617632275826888e-05, |
|
"loss": 2.3037, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9555117115900607e-05, |
|
"loss": 2.3066, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9492601955974323e-05, |
|
"loss": 2.3139, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9430086796048043e-05, |
|
"loss": 2.3105, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9367571636121762e-05, |
|
"loss": 2.3067, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.930505647619548e-05, |
|
"loss": 2.3068, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9242541316269197e-05, |
|
"loss": 2.3057, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9180026156342916e-05, |
|
"loss": 2.299, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9117510996416632e-05, |
|
"loss": 2.3132, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.905499583649035e-05, |
|
"loss": 2.3124, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8992480676564067e-05, |
|
"loss": 2.3119, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8929965516637786e-05, |
|
"loss": 2.3021, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8867450356711502e-05, |
|
"loss": 2.3104, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.880493519678522e-05, |
|
"loss": 2.3064, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.874242003685894e-05, |
|
"loss": 2.3066, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.867990487693266e-05, |
|
"loss": 2.3061, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8617389717006375e-05, |
|
"loss": 2.3072, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8554874557080095e-05, |
|
"loss": 2.3017, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.849235939715381e-05, |
|
"loss": 2.3019, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.842984423722753e-05, |
|
"loss": 2.3053, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8367329077301245e-05, |
|
"loss": 2.3062, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8304813917374965e-05, |
|
"loss": 2.3047, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.824229875744868e-05, |
|
"loss": 2.3039, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.81797835975224e-05, |
|
"loss": 2.2999, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.811726843759612e-05, |
|
"loss": 2.3035, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8054753277669838e-05, |
|
"loss": 2.2916, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.7992238117743554e-05, |
|
"loss": 2.3126, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.7929722957817273e-05, |
|
"loss": 2.3046, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.786720779789099e-05, |
|
"loss": 2.3017, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7804692637964708e-05, |
|
"loss": 2.301, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7742177478038424e-05, |
|
"loss": 2.3001, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7679662318112143e-05, |
|
"loss": 2.3012, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7617147158185862e-05, |
|
"loss": 2.3032, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7554631998259578e-05, |
|
"loss": 2.3016, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7492116838333297e-05, |
|
"loss": 2.2979, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7429601678407013e-05, |
|
"loss": 2.2989, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7367086518480732e-05, |
|
"loss": 2.3077, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.730457135855445e-05, |
|
"loss": 2.2988, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7242056198628167e-05, |
|
"loss": 2.2929, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7179541038701887e-05, |
|
"loss": 2.2994, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7117025878775602e-05, |
|
"loss": 2.3036, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7054510718849322e-05, |
|
"loss": 2.304, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.699199555892304e-05, |
|
"loss": 2.2941, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6929480398996757e-05, |
|
"loss": 2.3031, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6866965239070476e-05, |
|
"loss": 2.2983, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6804450079144192e-05, |
|
"loss": 2.2997, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.674193491921791e-05, |
|
"loss": 2.2997, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6679419759291627e-05, |
|
"loss": 2.2973, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6616904599365346e-05, |
|
"loss": 2.3048, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6554389439439065e-05, |
|
"loss": 2.3008, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6491874279512784e-05, |
|
"loss": 2.2967, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.64293591195865e-05, |
|
"loss": 2.2966, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.636684395966022e-05, |
|
"loss": 2.2961, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6304328799733935e-05, |
|
"loss": 2.3014, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6241813639807654e-05, |
|
"loss": 2.301, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.617929847988137e-05, |
|
"loss": 2.2893, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.611678331995509e-05, |
|
"loss": 2.3008, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6054268160028805e-05, |
|
"loss": 2.2981, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5991753000102525e-05, |
|
"loss": 2.2929, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5929237840176244e-05, |
|
"loss": 2.2976, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5866722680249963e-05, |
|
"loss": 2.3032, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.580420752032368e-05, |
|
"loss": 2.2971, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5741692360397398e-05, |
|
"loss": 2.2909, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5679177200471114e-05, |
|
"loss": 2.2959, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5616662040544833e-05, |
|
"loss": 2.3004, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.555414688061855e-05, |
|
"loss": 2.2957, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5491631720692268e-05, |
|
"loss": 2.2994, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5429116560765984e-05, |
|
"loss": 2.3015, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5366601400839703e-05, |
|
"loss": 2.2936, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5304086240913422e-05, |
|
"loss": 2.2938, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5241571080987142e-05, |
|
"loss": 2.29, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5179055921060859e-05, |
|
"loss": 2.294, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5116540761134577e-05, |
|
"loss": 2.2925, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5054025601208294e-05, |
|
"loss": 2.3008, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.4991510441282012e-05, |
|
"loss": 2.2904, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.4928995281355729e-05, |
|
"loss": 2.2996, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4866480121429447e-05, |
|
"loss": 2.2959, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4803964961503164e-05, |
|
"loss": 2.2992, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4741449801576885e-05, |
|
"loss": 2.2892, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4678934641650603e-05, |
|
"loss": 2.2916, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.461641948172432e-05, |
|
"loss": 2.2867, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4553904321798038e-05, |
|
"loss": 2.2962, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4491389161871755e-05, |
|
"loss": 2.2864, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4428874001945473e-05, |
|
"loss": 2.2887, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.436635884201919e-05, |
|
"loss": 2.2956, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4303843682092908e-05, |
|
"loss": 2.2884, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4241328522166625e-05, |
|
"loss": 2.2907, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4178813362240344e-05, |
|
"loss": 2.2839, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4116298202314062e-05, |
|
"loss": 2.294, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4053783042387781e-05, |
|
"loss": 2.3017, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3991267882461499e-05, |
|
"loss": 2.2878, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3928752722535216e-05, |
|
"loss": 2.2891, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3866237562608934e-05, |
|
"loss": 2.2978, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3803722402682651e-05, |
|
"loss": 2.2937, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3741207242756369e-05, |
|
"loss": 2.2977, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3678692082830086e-05, |
|
"loss": 2.2862, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3616176922903805e-05, |
|
"loss": 2.2867, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3553661762977523e-05, |
|
"loss": 2.2924, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.349114660305124e-05, |
|
"loss": 2.2885, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3428631443124958e-05, |
|
"loss": 2.2885, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3366116283198677e-05, |
|
"loss": 2.2838, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3303601123272395e-05, |
|
"loss": 2.2861, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3241085963346112e-05, |
|
"loss": 2.2968, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.317857080341983e-05, |
|
"loss": 2.2935, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3116055643493547e-05, |
|
"loss": 2.284, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3053540483567266e-05, |
|
"loss": 2.2897, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2991025323640984e-05, |
|
"loss": 2.2949, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2928510163714702e-05, |
|
"loss": 2.2906, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2865995003788419e-05, |
|
"loss": 2.286, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2803479843862137e-05, |
|
"loss": 2.2901, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2740964683935854e-05, |
|
"loss": 2.2937, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2678449524009572e-05, |
|
"loss": 2.2933, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.261593436408329e-05, |
|
"loss": 2.2915, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2553419204157008e-05, |
|
"loss": 2.2878, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2490904044230726e-05, |
|
"loss": 2.2847, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2428388884304443e-05, |
|
"loss": 2.2849, |
|
"step": 300500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2365873724378161e-05, |
|
"loss": 2.2817, |
|
"step": 301000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.230335856445188e-05, |
|
"loss": 2.2855, |
|
"step": 301500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2240843404525598e-05, |
|
"loss": 2.2838, |
|
"step": 302000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2178328244599315e-05, |
|
"loss": 2.2938, |
|
"step": 302500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2115813084673033e-05, |
|
"loss": 2.2844, |
|
"step": 303000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2053297924746752e-05, |
|
"loss": 2.2904, |
|
"step": 303500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.199078276482047e-05, |
|
"loss": 2.29, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.1928267604894187e-05, |
|
"loss": 2.2808, |
|
"step": 304500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1865752444967904e-05, |
|
"loss": 2.2973, |
|
"step": 305000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1803237285041622e-05, |
|
"loss": 2.2854, |
|
"step": 305500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1740722125115341e-05, |
|
"loss": 2.2813, |
|
"step": 306000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1678206965189059e-05, |
|
"loss": 2.2843, |
|
"step": 306500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1615691805262776e-05, |
|
"loss": 2.29, |
|
"step": 307000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1553176645336494e-05, |
|
"loss": 2.2893, |
|
"step": 307500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1490661485410213e-05, |
|
"loss": 2.287, |
|
"step": 308000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.142814632548393e-05, |
|
"loss": 2.2851, |
|
"step": 308500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1365631165557648e-05, |
|
"loss": 2.2882, |
|
"step": 309000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1303116005631365e-05, |
|
"loss": 2.2832, |
|
"step": 309500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1240600845705083e-05, |
|
"loss": 2.2819, |
|
"step": 310000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1178085685778802e-05, |
|
"loss": 2.2798, |
|
"step": 310500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.111557052585252e-05, |
|
"loss": 2.2916, |
|
"step": 311000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1053055365926237e-05, |
|
"loss": 2.2811, |
|
"step": 311500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.0990540205999955e-05, |
|
"loss": 2.2874, |
|
"step": 312000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.0928025046073672e-05, |
|
"loss": 2.2855, |
|
"step": 312500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0865509886147391e-05, |
|
"loss": 2.2811, |
|
"step": 313000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0802994726221109e-05, |
|
"loss": 2.2877, |
|
"step": 313500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0740479566294826e-05, |
|
"loss": 2.2836, |
|
"step": 314000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0677964406368544e-05, |
|
"loss": 2.2814, |
|
"step": 314500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0615449246442263e-05, |
|
"loss": 2.2758, |
|
"step": 315000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.055293408651598e-05, |
|
"loss": 2.2894, |
|
"step": 315500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0490418926589698e-05, |
|
"loss": 2.287, |
|
"step": 316000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0427903766663416e-05, |
|
"loss": 2.285, |
|
"step": 316500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0365388606737133e-05, |
|
"loss": 2.2871, |
|
"step": 317000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0302873446810852e-05, |
|
"loss": 2.2889, |
|
"step": 317500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.024035828688457e-05, |
|
"loss": 2.285, |
|
"step": 318000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0177843126958287e-05, |
|
"loss": 2.2846, |
|
"step": 318500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0115327967032005e-05, |
|
"loss": 2.289, |
|
"step": 319000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0052812807105724e-05, |
|
"loss": 2.2798, |
|
"step": 319500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.990297647179442e-06, |
|
"loss": 2.2855, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.92778248725316e-06, |
|
"loss": 2.2812, |
|
"step": 320500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.865267327326877e-06, |
|
"loss": 2.2754, |
|
"step": 321000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.802752167400594e-06, |
|
"loss": 2.2859, |
|
"step": 321500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.740237007474313e-06, |
|
"loss": 2.2808, |
|
"step": 322000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.677721847548031e-06, |
|
"loss": 2.2876, |
|
"step": 322500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.615206687621749e-06, |
|
"loss": 2.2844, |
|
"step": 323000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.552691527695466e-06, |
|
"loss": 2.281, |
|
"step": 323500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.490176367769185e-06, |
|
"loss": 2.2884, |
|
"step": 324000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.427661207842903e-06, |
|
"loss": 2.2924, |
|
"step": 324500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.36514604791662e-06, |
|
"loss": 2.2894, |
|
"step": 325000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.302630887990338e-06, |
|
"loss": 2.276, |
|
"step": 325500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.240115728064055e-06, |
|
"loss": 2.2845, |
|
"step": 326000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.177600568137775e-06, |
|
"loss": 2.2831, |
|
"step": 326500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.115085408211492e-06, |
|
"loss": 2.2823, |
|
"step": 327000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.05257024828521e-06, |
|
"loss": 2.2836, |
|
"step": 327500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.990055088358927e-06, |
|
"loss": 2.285, |
|
"step": 328000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.927539928432645e-06, |
|
"loss": 2.2762, |
|
"step": 328500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.865024768506364e-06, |
|
"loss": 2.2816, |
|
"step": 329000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.802509608580081e-06, |
|
"loss": 2.2831, |
|
"step": 329500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.739994448653799e-06, |
|
"loss": 2.2868, |
|
"step": 330000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.677479288727516e-06, |
|
"loss": 2.2786, |
|
"step": 330500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.614964128801236e-06, |
|
"loss": 2.2704, |
|
"step": 331000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.552448968874953e-06, |
|
"loss": 2.2809, |
|
"step": 331500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.48993380894867e-06, |
|
"loss": 2.2797, |
|
"step": 332000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.427418649022388e-06, |
|
"loss": 2.2781, |
|
"step": 332500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.364903489096106e-06, |
|
"loss": 2.286, |
|
"step": 333000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.302388329169825e-06, |
|
"loss": 2.2828, |
|
"step": 333500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.239873169243542e-06, |
|
"loss": 2.277, |
|
"step": 334000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.17735800931726e-06, |
|
"loss": 2.279, |
|
"step": 334500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.114842849390977e-06, |
|
"loss": 2.2799, |
|
"step": 335000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.052327689464697e-06, |
|
"loss": 2.2779, |
|
"step": 335500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.989812529538414e-06, |
|
"loss": 2.282, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.927297369612132e-06, |
|
"loss": 2.2873, |
|
"step": 336500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.86478220968585e-06, |
|
"loss": 2.2757, |
|
"step": 337000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.802267049759567e-06, |
|
"loss": 2.277, |
|
"step": 337500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.739751889833286e-06, |
|
"loss": 2.2801, |
|
"step": 338000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.677236729907003e-06, |
|
"loss": 2.2759, |
|
"step": 338500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.614721569980721e-06, |
|
"loss": 2.2865, |
|
"step": 339000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.5522064100544384e-06, |
|
"loss": 2.2843, |
|
"step": 339500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.489691250128156e-06, |
|
"loss": 2.2792, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.427176090201874e-06, |
|
"loss": 2.2769, |
|
"step": 340500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.364660930275593e-06, |
|
"loss": 2.2785, |
|
"step": 341000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.30214577034931e-06, |
|
"loss": 2.2858, |
|
"step": 341500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.239630610423028e-06, |
|
"loss": 2.2793, |
|
"step": 342000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.177115450496746e-06, |
|
"loss": 2.2856, |
|
"step": 342500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.114600290570464e-06, |
|
"loss": 2.2804, |
|
"step": 343000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.052085130644181e-06, |
|
"loss": 2.283, |
|
"step": 343500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.9895699707178995e-06, |
|
"loss": 2.276, |
|
"step": 344000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.927054810791617e-06, |
|
"loss": 2.2794, |
|
"step": 344500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.864539650865335e-06, |
|
"loss": 2.2722, |
|
"step": 345000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.802024490939053e-06, |
|
"loss": 2.2767, |
|
"step": 345500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.73950933101277e-06, |
|
"loss": 2.2817, |
|
"step": 346000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.676994171086489e-06, |
|
"loss": 2.2796, |
|
"step": 346500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.614479011160207e-06, |
|
"loss": 2.2786, |
|
"step": 347000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.551963851233925e-06, |
|
"loss": 2.2754, |
|
"step": 347500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.489448691307642e-06, |
|
"loss": 2.2805, |
|
"step": 348000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.42693353138136e-06, |
|
"loss": 2.2731, |
|
"step": 348500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.364418371455077e-06, |
|
"loss": 2.279, |
|
"step": 349000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.301903211528796e-06, |
|
"loss": 2.2816, |
|
"step": 349500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.239388051602514e-06, |
|
"loss": 2.2716, |
|
"step": 350000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.176872891676231e-06, |
|
"loss": 2.2827, |
|
"step": 350500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.11435773174995e-06, |
|
"loss": 2.2727, |
|
"step": 351000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.051842571823667e-06, |
|
"loss": 2.2754, |
|
"step": 351500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.989327411897385e-06, |
|
"loss": 2.2817, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.926812251971103e-06, |
|
"loss": 2.2804, |
|
"step": 352500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.864297092044821e-06, |
|
"loss": 2.2824, |
|
"step": 353000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.801781932118539e-06, |
|
"loss": 2.2786, |
|
"step": 353500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.739266772192257e-06, |
|
"loss": 2.276, |
|
"step": 354000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.676751612265975e-06, |
|
"loss": 2.2795, |
|
"step": 354500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.6142364523396925e-06, |
|
"loss": 2.2825, |
|
"step": 355000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.551721292413411e-06, |
|
"loss": 2.2811, |
|
"step": 355500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.489206132487128e-06, |
|
"loss": 2.2778, |
|
"step": 356000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.426690972560846e-06, |
|
"loss": 2.2799, |
|
"step": 356500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.364175812634564e-06, |
|
"loss": 2.2752, |
|
"step": 357000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.301660652708282e-06, |
|
"loss": 2.2759, |
|
"step": 357500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.239145492782e-06, |
|
"loss": 2.2756, |
|
"step": 358000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.176630332855718e-06, |
|
"loss": 2.2811, |
|
"step": 358500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.114115172929436e-06, |
|
"loss": 2.2845, |
|
"step": 359000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.0516000130031535e-06, |
|
"loss": 2.2721, |
|
"step": 359500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.989084853076871e-06, |
|
"loss": 2.2685, |
|
"step": 360000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.926569693150589e-06, |
|
"loss": 2.2756, |
|
"step": 360500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.864054533224307e-06, |
|
"loss": 2.2815, |
|
"step": 361000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.801539373298025e-06, |
|
"loss": 2.2715, |
|
"step": 361500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.739024213371743e-06, |
|
"loss": 2.2858, |
|
"step": 362000 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.676509053445461e-06, |
|
"loss": 2.2809, |
|
"step": 362500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.613993893519179e-06, |
|
"loss": 2.2768, |
|
"step": 363000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.551478733592897e-06, |
|
"loss": 2.278, |
|
"step": 363500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.4889635736666145e-06, |
|
"loss": 2.2864, |
|
"step": 364000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.426448413740332e-06, |
|
"loss": 2.2738, |
|
"step": 364500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.36393325381405e-06, |
|
"loss": 2.2772, |
|
"step": 365000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.301418093887768e-06, |
|
"loss": 2.2811, |
|
"step": 365500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.238902933961486e-06, |
|
"loss": 2.2811, |
|
"step": 366000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.176387774035204e-06, |
|
"loss": 2.2818, |
|
"step": 366500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.113872614108922e-06, |
|
"loss": 2.2637, |
|
"step": 367000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.05135745418264e-06, |
|
"loss": 2.2835, |
|
"step": 367500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.988842294256357e-06, |
|
"loss": 2.2688, |
|
"step": 368000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.9263271343300756e-06, |
|
"loss": 2.28, |
|
"step": 368500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.863811974403793e-06, |
|
"loss": 2.2771, |
|
"step": 369000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.801296814477511e-06, |
|
"loss": 2.2742, |
|
"step": 369500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.7387816545512285e-06, |
|
"loss": 2.2822, |
|
"step": 370000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.676266494624947e-06, |
|
"loss": 2.2734, |
|
"step": 370500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6137513346986644e-06, |
|
"loss": 2.2708, |
|
"step": 371000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.551236174772382e-06, |
|
"loss": 2.2749, |
|
"step": 371500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.4887210148461003e-06, |
|
"loss": 2.2761, |
|
"step": 372000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.426205854919818e-06, |
|
"loss": 2.2793, |
|
"step": 372500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.363690694993536e-06, |
|
"loss": 2.2698, |
|
"step": 373000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3011755350672537e-06, |
|
"loss": 2.2728, |
|
"step": 373500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.238660375140972e-06, |
|
"loss": 2.2754, |
|
"step": 374000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1761452152146896e-06, |
|
"loss": 2.2701, |
|
"step": 374500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1136300552884075e-06, |
|
"loss": 2.2873, |
|
"step": 375000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.0511148953621255e-06, |
|
"loss": 2.2705, |
|
"step": 375500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.9885997354358434e-06, |
|
"loss": 2.2702, |
|
"step": 376000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.9260845755095613e-06, |
|
"loss": 2.2764, |
|
"step": 376500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.8635694155832793e-06, |
|
"loss": 2.2756, |
|
"step": 377000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.801054255656997e-06, |
|
"loss": 2.2815, |
|
"step": 377500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.7385390957307147e-06, |
|
"loss": 2.2751, |
|
"step": 378000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.6760239358044327e-06, |
|
"loss": 2.2744, |
|
"step": 378500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.6135087758781506e-06, |
|
"loss": 2.2773, |
|
"step": 379000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.5509936159518686e-06, |
|
"loss": 2.2723, |
|
"step": 379500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.4884784560255865e-06, |
|
"loss": 2.2701, |
|
"step": 380000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.4259632960993044e-06, |
|
"loss": 2.2856, |
|
"step": 380500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.3634481361730224e-06, |
|
"loss": 2.2775, |
|
"step": 381000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.30093297624674e-06, |
|
"loss": 2.2772, |
|
"step": 381500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.238417816320458e-06, |
|
"loss": 2.2791, |
|
"step": 382000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1759026563941758e-06, |
|
"loss": 2.2739, |
|
"step": 382500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1133874964678937e-06, |
|
"loss": 2.276, |
|
"step": 383000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.0508723365416117e-06, |
|
"loss": 2.2718, |
|
"step": 383500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.988357176615329e-06, |
|
"loss": 2.281, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.925842016689047e-06, |
|
"loss": 2.2739, |
|
"step": 384500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8633268567627653e-06, |
|
"loss": 2.2727, |
|
"step": 385000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8008116968364828e-06, |
|
"loss": 2.2735, |
|
"step": 385500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.7382965369102007e-06, |
|
"loss": 2.2743, |
|
"step": 386000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6757813769839187e-06, |
|
"loss": 2.2765, |
|
"step": 386500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6132662170576366e-06, |
|
"loss": 2.2762, |
|
"step": 387000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.5507510571313545e-06, |
|
"loss": 2.2724, |
|
"step": 387500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4882358972050723e-06, |
|
"loss": 2.2745, |
|
"step": 388000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4257207372787902e-06, |
|
"loss": 2.2701, |
|
"step": 388500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.363205577352508e-06, |
|
"loss": 2.2706, |
|
"step": 389000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3006904174262259e-06, |
|
"loss": 2.2807, |
|
"step": 389500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.2381752574999438e-06, |
|
"loss": 2.2741, |
|
"step": 390000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1756600975736615e-06, |
|
"loss": 2.2755, |
|
"step": 390500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1131449376473795e-06, |
|
"loss": 2.2738, |
|
"step": 391000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0506297777210974e-06, |
|
"loss": 2.2704, |
|
"step": 391500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.881146177948154e-07, |
|
"loss": 2.2723, |
|
"step": 392000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.255994578685331e-07, |
|
"loss": 2.2792, |
|
"step": 392500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.63084297942251e-07, |
|
"loss": 2.2723, |
|
"step": 393000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.00569138015969e-07, |
|
"loss": 2.269, |
|
"step": 393500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.380539780896868e-07, |
|
"loss": 2.2743, |
|
"step": 394000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.755388181634046e-07, |
|
"loss": 2.278, |
|
"step": 394500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.130236582371225e-07, |
|
"loss": 2.2656, |
|
"step": 395000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.505084983108404e-07, |
|
"loss": 2.272, |
|
"step": 395500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.879933383845583e-07, |
|
"loss": 2.2746, |
|
"step": 396000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.254781784582762e-07, |
|
"loss": 2.2748, |
|
"step": 396500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.6296301853199403e-07, |
|
"loss": 2.2775, |
|
"step": 397000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.0044785860571186e-07, |
|
"loss": 2.2755, |
|
"step": 397500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.3793269867942975e-07, |
|
"loss": 2.2716, |
|
"step": 398000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.7541753875314766e-07, |
|
"loss": 2.2723, |
|
"step": 398500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.1290237882686551e-07, |
|
"loss": 2.2753, |
|
"step": 399000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 5.038721890058339e-08, |
|
"loss": 2.274, |
|
"step": 399500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 399903, |
|
"total_flos": 3931191705600000.0, |
|
"train_loss": 0.30394269704031157, |
|
"train_runtime": 3702.8677, |
|
"train_samples_per_second": 1727.965, |
|
"train_steps_per_second": 107.998 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 399903, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 3931191705600000.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|