|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 145677, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.982838745992847e-05, |
|
"loss": 3.5304, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.965677491985695e-05, |
|
"loss": 2.5735, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.948516237978542e-05, |
|
"loss": 2.2667, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9313549839713894e-05, |
|
"loss": 2.0703, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.914193729964236e-05, |
|
"loss": 1.9631, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.897032475957083e-05, |
|
"loss": 1.8471, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.87987122194993e-05, |
|
"loss": 1.7836, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.862709967942778e-05, |
|
"loss": 1.7197, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.845548713935625e-05, |
|
"loss": 1.6647, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8283874599284724e-05, |
|
"loss": 1.6346, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.811226205921319e-05, |
|
"loss": 1.581, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.794064951914167e-05, |
|
"loss": 1.5703, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.776903697907014e-05, |
|
"loss": 1.5301, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.759742443899861e-05, |
|
"loss": 1.4876, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.742581189892708e-05, |
|
"loss": 1.497, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.725419935885555e-05, |
|
"loss": 1.4551, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.708258681878402e-05, |
|
"loss": 1.4338, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.691097427871249e-05, |
|
"loss": 1.429, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.673936173864097e-05, |
|
"loss": 1.4032, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.656774919856944e-05, |
|
"loss": 1.4083, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6396136658497914e-05, |
|
"loss": 1.3653, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6224524118426383e-05, |
|
"loss": 1.35, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.605291157835486e-05, |
|
"loss": 1.3492, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.588129903828333e-05, |
|
"loss": 1.3277, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.57096864982118e-05, |
|
"loss": 1.3122, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.553807395814027e-05, |
|
"loss": 1.3207, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5366461418068744e-05, |
|
"loss": 1.3067, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5194848877997213e-05, |
|
"loss": 1.2932, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.502323633792569e-05, |
|
"loss": 1.2696, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.485162379785416e-05, |
|
"loss": 1.268, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4680011257782635e-05, |
|
"loss": 1.2721, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4508398717711105e-05, |
|
"loss": 1.2564, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4336786177639574e-05, |
|
"loss": 1.2382, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.416517363756805e-05, |
|
"loss": 1.234, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.399356109749652e-05, |
|
"loss": 1.2221, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.382194855742499e-05, |
|
"loss": 1.2148, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.365033601735346e-05, |
|
"loss": 1.1949, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3478723477281935e-05, |
|
"loss": 1.2081, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3307110937210404e-05, |
|
"loss": 1.2145, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.313549839713888e-05, |
|
"loss": 1.198, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.296388585706735e-05, |
|
"loss": 1.1809, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.2792273316995826e-05, |
|
"loss": 1.1791, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2620660776924295e-05, |
|
"loss": 1.1718, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2449048236852765e-05, |
|
"loss": 1.1647, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2277435696781234e-05, |
|
"loss": 1.1682, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.210582315670971e-05, |
|
"loss": 1.1564, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.193421061663818e-05, |
|
"loss": 1.1496, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.176259807656665e-05, |
|
"loss": 1.1494, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.1590985536495125e-05, |
|
"loss": 1.1439, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1419372996423594e-05, |
|
"loss": 1.1381, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.124776045635207e-05, |
|
"loss": 1.138, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.107614791628054e-05, |
|
"loss": 1.1259, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0904535376209016e-05, |
|
"loss": 1.1294, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0732922836137486e-05, |
|
"loss": 1.1063, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0561310296065955e-05, |
|
"loss": 1.112, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0389697755994424e-05, |
|
"loss": 1.1154, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.02180852159229e-05, |
|
"loss": 1.1084, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.004647267585137e-05, |
|
"loss": 1.1031, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9874860135779846e-05, |
|
"loss": 1.0912, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9703247595708316e-05, |
|
"loss": 1.0828, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.953163505563679e-05, |
|
"loss": 1.0678, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.936002251556526e-05, |
|
"loss": 1.0891, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.918840997549373e-05, |
|
"loss": 1.078, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.90167974354222e-05, |
|
"loss": 1.0897, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.884518489535067e-05, |
|
"loss": 1.0785, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8673572355279146e-05, |
|
"loss": 1.0629, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8501959815207615e-05, |
|
"loss": 1.0703, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.833034727513609e-05, |
|
"loss": 1.076, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.815873473506456e-05, |
|
"loss": 1.0835, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.798712219499304e-05, |
|
"loss": 1.083, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7815509654921506e-05, |
|
"loss": 1.0627, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.764389711484998e-05, |
|
"loss": 1.0594, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.747228457477845e-05, |
|
"loss": 1.0574, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.730067203470692e-05, |
|
"loss": 1.0481, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.712905949463539e-05, |
|
"loss": 1.0365, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.695744695456387e-05, |
|
"loss": 1.0382, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6785834414492336e-05, |
|
"loss": 1.0507, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.661422187442081e-05, |
|
"loss": 1.0393, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.644260933434928e-05, |
|
"loss": 1.0363, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.627099679427775e-05, |
|
"loss": 1.0129, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.609938425420623e-05, |
|
"loss": 1.0076, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.59277717141347e-05, |
|
"loss": 1.0229, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.575615917406317e-05, |
|
"loss": 1.0083, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.558454663399164e-05, |
|
"loss": 1.0246, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.541293409392011e-05, |
|
"loss": 1.0259, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.524132155384858e-05, |
|
"loss": 1.0002, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.506970901377706e-05, |
|
"loss": 0.9945, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.489809647370553e-05, |
|
"loss": 1.008, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4726483933634e-05, |
|
"loss": 0.9999, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.455487139356247e-05, |
|
"loss": 1.0142, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.438325885349095e-05, |
|
"loss": 1.007, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.421164631341942e-05, |
|
"loss": 1.0208, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.404003377334789e-05, |
|
"loss": 0.9994, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.386842123327636e-05, |
|
"loss": 0.9949, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3696808693204826e-05, |
|
"loss": 0.9969, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.35251961531333e-05, |
|
"loss": 0.9769, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.335358361306177e-05, |
|
"loss": 1.0008, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.318197107299025e-05, |
|
"loss": 0.8865, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.301035853291872e-05, |
|
"loss": 0.9015, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.283874599284719e-05, |
|
"loss": 0.8867, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.266713345277566e-05, |
|
"loss": 0.8908, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.249552091270414e-05, |
|
"loss": 0.8774, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.232390837263261e-05, |
|
"loss": 0.9031, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.215229583256108e-05, |
|
"loss": 0.8783, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.198068329248955e-05, |
|
"loss": 0.8944, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.180907075241802e-05, |
|
"loss": 0.8765, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.163745821234649e-05, |
|
"loss": 0.8839, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.146584567227497e-05, |
|
"loss": 0.8951, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.129423313220344e-05, |
|
"loss": 0.8842, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1122620592131915e-05, |
|
"loss": 0.8974, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.0951008052060384e-05, |
|
"loss": 0.8904, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.077939551198885e-05, |
|
"loss": 0.8863, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.060778297191732e-05, |
|
"loss": 0.8853, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0436170431845795e-05, |
|
"loss": 0.8671, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.026455789177427e-05, |
|
"loss": 0.8747, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0092945351702738e-05, |
|
"loss": 0.8638, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9921332811631214e-05, |
|
"loss": 0.8847, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9749720271559683e-05, |
|
"loss": 0.8689, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.957810773148816e-05, |
|
"loss": 0.8938, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.940649519141663e-05, |
|
"loss": 0.881, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.92348826513451e-05, |
|
"loss": 0.8656, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.906327011127357e-05, |
|
"loss": 0.8716, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8891657571202047e-05, |
|
"loss": 0.8758, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8720045031130517e-05, |
|
"loss": 0.8799, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.854843249105899e-05, |
|
"loss": 0.8764, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.837681995098746e-05, |
|
"loss": 0.8676, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8205207410915928e-05, |
|
"loss": 0.863, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8033594870844404e-05, |
|
"loss": 0.8655, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7861982330772874e-05, |
|
"loss": 0.8746, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7690369790701347e-05, |
|
"loss": 0.8717, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7518757250629816e-05, |
|
"loss": 0.8584, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7347144710558292e-05, |
|
"loss": 0.855, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.717553217048676e-05, |
|
"loss": 0.8457, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.7003919630415238e-05, |
|
"loss": 0.8595, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6832307090343707e-05, |
|
"loss": 0.8627, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.666069455027218e-05, |
|
"loss": 0.8383, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.648908201020065e-05, |
|
"loss": 0.8548, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6317469470129126e-05, |
|
"loss": 0.8652, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6145856930057595e-05, |
|
"loss": 0.8492, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.5974244389986068e-05, |
|
"loss": 0.8574, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5802631849914537e-05, |
|
"loss": 0.8501, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5631019309843007e-05, |
|
"loss": 0.8352, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5459406769771483e-05, |
|
"loss": 0.8501, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5287794229699952e-05, |
|
"loss": 0.8536, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5116181689628425e-05, |
|
"loss": 0.8447, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4944569149556898e-05, |
|
"loss": 0.843, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.477295660948537e-05, |
|
"loss": 0.8336, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4601344069413843e-05, |
|
"loss": 0.8264, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4429731529342316e-05, |
|
"loss": 0.8614, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4258118989270785e-05, |
|
"loss": 0.8553, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4086506449199255e-05, |
|
"loss": 0.8377, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3914893909127728e-05, |
|
"loss": 0.8406, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.37432813690562e-05, |
|
"loss": 0.8421, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3571668828984673e-05, |
|
"loss": 0.8544, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3400056288913143e-05, |
|
"loss": 0.8355, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3228443748841615e-05, |
|
"loss": 0.8437, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3056831208770088e-05, |
|
"loss": 0.838, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.288521866869856e-05, |
|
"loss": 0.8282, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2713606128627034e-05, |
|
"loss": 0.8325, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2541993588555503e-05, |
|
"loss": 0.8288, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2370381048483976e-05, |
|
"loss": 0.8347, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.219876850841245e-05, |
|
"loss": 0.8241, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.202715596834092e-05, |
|
"loss": 0.82, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1855543428269394e-05, |
|
"loss": 0.8377, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1683930888197864e-05, |
|
"loss": 0.8244, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1512318348126333e-05, |
|
"loss": 0.8263, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1340705808054806e-05, |
|
"loss": 0.8045, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.116909326798328e-05, |
|
"loss": 0.8322, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.099748072791175e-05, |
|
"loss": 0.8114, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.082586818784022e-05, |
|
"loss": 0.8194, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0654255647768694e-05, |
|
"loss": 0.8434, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0482643107697167e-05, |
|
"loss": 0.8188, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.031103056762564e-05, |
|
"loss": 0.8381, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0139418027554112e-05, |
|
"loss": 0.8249, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.996780548748258e-05, |
|
"loss": 0.8063, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9796192947411054e-05, |
|
"loss": 0.7925, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9624580407339527e-05, |
|
"loss": 0.8193, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9452967867268e-05, |
|
"loss": 0.8094, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.928135532719647e-05, |
|
"loss": 0.8145, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9109742787124942e-05, |
|
"loss": 0.8147, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.893813024705341e-05, |
|
"loss": 0.8137, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8766517706981884e-05, |
|
"loss": 0.8083, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8594905166910357e-05, |
|
"loss": 0.8064, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.842329262683883e-05, |
|
"loss": 0.8005, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.82516800867673e-05, |
|
"loss": 0.8026, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8080067546695772e-05, |
|
"loss": 0.7973, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7908455006624245e-05, |
|
"loss": 0.8126, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7736842466552718e-05, |
|
"loss": 0.8142, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7565229926481187e-05, |
|
"loss": 0.8157, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.739361738640966e-05, |
|
"loss": 0.8218, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7222004846338133e-05, |
|
"loss": 0.8054, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7050392306266605e-05, |
|
"loss": 0.8133, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6878779766195078e-05, |
|
"loss": 0.7944, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6707167226123548e-05, |
|
"loss": 0.8043, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.653555468605202e-05, |
|
"loss": 0.7356, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6363942145980493e-05, |
|
"loss": 0.7092, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6192329605908963e-05, |
|
"loss": 0.7041, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6020717065837435e-05, |
|
"loss": 0.6975, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5849104525765908e-05, |
|
"loss": 0.7074, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5677491985694378e-05, |
|
"loss": 0.7055, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.550587944562285e-05, |
|
"loss": 0.6926, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5334266905551323e-05, |
|
"loss": 0.7092, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5162654365479794e-05, |
|
"loss": 0.7026, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.4991041825408267e-05, |
|
"loss": 0.7037, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4819429285336738e-05, |
|
"loss": 0.6968, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4647816745265211e-05, |
|
"loss": 0.7116, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4476204205193684e-05, |
|
"loss": 0.6982, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4304591665122155e-05, |
|
"loss": 0.6964, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4132979125050628e-05, |
|
"loss": 0.7047, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.3961366584979099e-05, |
|
"loss": 0.7049, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3789754044907571e-05, |
|
"loss": 0.6941, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3618141504836043e-05, |
|
"loss": 0.6832, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3446528964764512e-05, |
|
"loss": 0.6849, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3274916424692985e-05, |
|
"loss": 0.7042, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3103303884621458e-05, |
|
"loss": 0.6899, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2931691344549929e-05, |
|
"loss": 0.6956, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2760078804478401e-05, |
|
"loss": 0.6953, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2588466264406873e-05, |
|
"loss": 0.6852, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2416853724335345e-05, |
|
"loss": 0.6945, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2245241184263816e-05, |
|
"loss": 0.7027, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.207362864419229e-05, |
|
"loss": 0.6908, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1902016104120762e-05, |
|
"loss": 0.6943, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1730403564049233e-05, |
|
"loss": 0.6971, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1558791023977706e-05, |
|
"loss": 0.6771, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1387178483906175e-05, |
|
"loss": 0.7018, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1215565943834648e-05, |
|
"loss": 0.6815, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1043953403763121e-05, |
|
"loss": 0.7, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0872340863691592e-05, |
|
"loss": 0.6839, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0700728323620065e-05, |
|
"loss": 0.6852, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0529115783548536e-05, |
|
"loss": 0.6891, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0357503243477009e-05, |
|
"loss": 0.7009, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.018589070340548e-05, |
|
"loss": 0.6836, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0014278163333951e-05, |
|
"loss": 0.6913, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.842665623262424e-06, |
|
"loss": 0.6899, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.671053083190895e-06, |
|
"loss": 0.6758, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.499440543119368e-06, |
|
"loss": 0.6939, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.327828003047839e-06, |
|
"loss": 0.6903, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.156215462976311e-06, |
|
"loss": 0.676, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.984602922904784e-06, |
|
"loss": 0.6786, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.812990382833254e-06, |
|
"loss": 0.6834, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.641377842761726e-06, |
|
"loss": 0.6845, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.469765302690198e-06, |
|
"loss": 0.6727, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.29815276261867e-06, |
|
"loss": 0.6865, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.126540222547143e-06, |
|
"loss": 0.694, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.954927682475614e-06, |
|
"loss": 0.684, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.783315142404087e-06, |
|
"loss": 0.6815, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.611702602332559e-06, |
|
"loss": 0.6764, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.440090062261029e-06, |
|
"loss": 0.6937, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.268477522189501e-06, |
|
"loss": 0.6806, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.096864982117973e-06, |
|
"loss": 0.657, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.925252442046445e-06, |
|
"loss": 0.6792, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.753639901974918e-06, |
|
"loss": 0.6812, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.58202736190339e-06, |
|
"loss": 0.6706, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.410414821831862e-06, |
|
"loss": 0.6628, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.238802281760333e-06, |
|
"loss": 0.6707, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.067189741688805e-06, |
|
"loss": 0.6816, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.895577201617277e-06, |
|
"loss": 0.6802, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.723964661545749e-06, |
|
"loss": 0.6687, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.5523521214742206e-06, |
|
"loss": 0.6747, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.3807395814026925e-06, |
|
"loss": 0.6651, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.2091270413311644e-06, |
|
"loss": 0.6869, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.037514501259636e-06, |
|
"loss": 0.6724, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.865901961188108e-06, |
|
"loss": 0.6694, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.69428942111658e-06, |
|
"loss": 0.6801, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.522676881045052e-06, |
|
"loss": 0.6707, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.351064340973523e-06, |
|
"loss": 0.6892, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.179451800901995e-06, |
|
"loss": 0.6518, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.007839260830468e-06, |
|
"loss": 0.6689, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.83622672075894e-06, |
|
"loss": 0.6786, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.664614180687411e-06, |
|
"loss": 0.6705, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.493001640615883e-06, |
|
"loss": 0.6758, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3213891005443554e-06, |
|
"loss": 0.6741, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1497765604728273e-06, |
|
"loss": 0.6552, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.978164020401299e-06, |
|
"loss": 0.6708, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.806551480329771e-06, |
|
"loss": 0.6608, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.6349389402582427e-06, |
|
"loss": 0.6594, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.4633264001867147e-06, |
|
"loss": 0.6703, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.2917138601151866e-06, |
|
"loss": 0.6693, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.120101320043658e-06, |
|
"loss": 0.6804, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.9484887799721305e-06, |
|
"loss": 0.6724, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.776876239900602e-06, |
|
"loss": 0.6681, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6052636998290742e-06, |
|
"loss": 0.6633, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.433651159757546e-06, |
|
"loss": 0.6685, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.2620386196860177e-06, |
|
"loss": 0.671, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.0904260796144896e-06, |
|
"loss": 0.6744, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.188135395429614e-07, |
|
"loss": 0.6599, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.472009994714334e-07, |
|
"loss": 0.657, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.755884593999052e-07, |
|
"loss": 0.6707, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.0397591932837716e-07, |
|
"loss": 0.6655, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.3236337925684908e-07, |
|
"loss": 0.6699, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 6.075083918532095e-08, |
|
"loss": 0.6605, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 145677, |
|
"total_flos": 1.6338088009187328e+17, |
|
"train_loss": 0.9301021921609061, |
|
"train_runtime": 42107.9428, |
|
"train_samples_per_second": 34.596, |
|
"train_steps_per_second": 3.46 |
|
} |
|
], |
|
"max_steps": 145677, |
|
"num_train_epochs": 3, |
|
"total_flos": 1.6338088009187328e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|