|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"global_step": 478150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9947715152148914e-05, |
|
"loss": 1.9286, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.989543030429782e-05, |
|
"loss": 1.4178, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.984314545644672e-05, |
|
"loss": 1.3079, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.979086060859563e-05, |
|
"loss": 1.22, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.973857576074454e-05, |
|
"loss": 1.2108, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9686290912893443e-05, |
|
"loss": 1.1095, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9634006065042355e-05, |
|
"loss": 1.1086, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.958172121719126e-05, |
|
"loss": 1.0555, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9529436369340164e-05, |
|
"loss": 1.0431, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9477151521489075e-05, |
|
"loss": 1.0312, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.942486667363798e-05, |
|
"loss": 1.038, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.937258182578689e-05, |
|
"loss": 0.9589, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9320296977935796e-05, |
|
"loss": 0.9573, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.926801213008471e-05, |
|
"loss": 0.9399, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.921572728223361e-05, |
|
"loss": 0.9621, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9163442434382516e-05, |
|
"loss": 0.9075, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.911115758653143e-05, |
|
"loss": 0.9014, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.905887273868033e-05, |
|
"loss": 0.9125, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9006587890829244e-05, |
|
"loss": 0.8829, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.895430304297814e-05, |
|
"loss": 0.9324, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.890201819512705e-05, |
|
"loss": 0.8304, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.8849733347275964e-05, |
|
"loss": 0.8362, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.879744849942487e-05, |
|
"loss": 0.8292, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.874516365157378e-05, |
|
"loss": 0.846, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.8692878803722685e-05, |
|
"loss": 0.8455, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.864059395587159e-05, |
|
"loss": 0.844, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.8588309108020494e-05, |
|
"loss": 0.8488, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.8536024260169405e-05, |
|
"loss": 0.8259, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.848373941231831e-05, |
|
"loss": 0.8309, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.843145456446722e-05, |
|
"loss": 0.8033, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.837916971661613e-05, |
|
"loss": 0.8117, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.832688486876503e-05, |
|
"loss": 0.8268, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.827460002091394e-05, |
|
"loss": 0.7868, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.8222315173062846e-05, |
|
"loss": 0.8446, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.817003032521176e-05, |
|
"loss": 0.7891, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.811774547736066e-05, |
|
"loss": 0.7854, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.806546062950957e-05, |
|
"loss": 0.8087, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.801317578165848e-05, |
|
"loss": 0.7793, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.796089093380738e-05, |
|
"loss": 0.7704, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.7908606085956294e-05, |
|
"loss": 0.7703, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.78563212381052e-05, |
|
"loss": 0.7493, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.780403639025411e-05, |
|
"loss": 0.7818, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7751751542403015e-05, |
|
"loss": 0.7732, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.769946669455192e-05, |
|
"loss": 0.7606, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.764718184670083e-05, |
|
"loss": 0.7694, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.7594896998849735e-05, |
|
"loss": 0.7524, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.7542612150998646e-05, |
|
"loss": 0.7691, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.749032730314755e-05, |
|
"loss": 0.7804, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.7438042455296456e-05, |
|
"loss": 0.8009, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.738575760744536e-05, |
|
"loss": 0.7812, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.733347275959427e-05, |
|
"loss": 0.7623, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.728118791174318e-05, |
|
"loss": 0.7702, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.722890306389209e-05, |
|
"loss": 0.7669, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.717661821604099e-05, |
|
"loss": 0.7348, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.71243333681899e-05, |
|
"loss": 0.7423, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.707204852033881e-05, |
|
"loss": 0.7587, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.701976367248771e-05, |
|
"loss": 0.736, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.6967478824636624e-05, |
|
"loss": 0.742, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.6915193976785535e-05, |
|
"loss": 0.7497, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.686290912893443e-05, |
|
"loss": 0.7355, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.6810624281083344e-05, |
|
"loss": 0.7097, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.675833943323225e-05, |
|
"loss": 0.7811, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.670605458538116e-05, |
|
"loss": 0.736, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.6653769737530065e-05, |
|
"loss": 0.7308, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.6601484889678976e-05, |
|
"loss": 0.7214, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.654920004182788e-05, |
|
"loss": 0.7604, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.6496915193976786e-05, |
|
"loss": 0.7473, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.64446303461257e-05, |
|
"loss": 0.7504, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.63923454982746e-05, |
|
"loss": 0.7095, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.634006065042351e-05, |
|
"loss": 0.7317, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.628777580257241e-05, |
|
"loss": 0.7478, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.623549095472132e-05, |
|
"loss": 0.7212, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.618320610687023e-05, |
|
"loss": 0.7275, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.613092125901914e-05, |
|
"loss": 0.7644, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.607863641116805e-05, |
|
"loss": 0.7487, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.6026351563316954e-05, |
|
"loss": 0.7298, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.597406671546586e-05, |
|
"loss": 0.7049, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.592178186761476e-05, |
|
"loss": 0.7178, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.5869497019763674e-05, |
|
"loss": 0.7099, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.5817212171912586e-05, |
|
"loss": 0.712, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.576492732406149e-05, |
|
"loss": 0.7371, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.57126424762104e-05, |
|
"loss": 0.7406, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.56603576283593e-05, |
|
"loss": 0.725, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.560807278050821e-05, |
|
"loss": 0.715, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.5555787932657115e-05, |
|
"loss": 0.7177, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.550350308480603e-05, |
|
"loss": 0.7345, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.545121823695493e-05, |
|
"loss": 0.6942, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.539893338910384e-05, |
|
"loss": 0.7055, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.534664854125275e-05, |
|
"loss": 0.7284, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.529436369340165e-05, |
|
"loss": 0.7422, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.524207884555056e-05, |
|
"loss": 0.7039, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.518979399769947e-05, |
|
"loss": 0.7464, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.513750914984838e-05, |
|
"loss": 0.7237, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.5085224301997284e-05, |
|
"loss": 0.7161, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.503293945414619e-05, |
|
"loss": 0.7366, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.49806546062951e-05, |
|
"loss": 0.7339, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4928369758444004e-05, |
|
"loss": 0.6388, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4876084910592916e-05, |
|
"loss": 0.6579, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.482380006274182e-05, |
|
"loss": 0.6537, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.4771515214890725e-05, |
|
"loss": 0.6422, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.4719230367039636e-05, |
|
"loss": 0.6894, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.466694551918854e-05, |
|
"loss": 0.6774, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.461466067133745e-05, |
|
"loss": 0.6605, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.456237582348636e-05, |
|
"loss": 0.6687, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.451009097563527e-05, |
|
"loss": 0.662, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.4457806127784166e-05, |
|
"loss": 0.648, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.440552127993308e-05, |
|
"loss": 0.6638, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.435323643208198e-05, |
|
"loss": 0.661, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.430095158423089e-05, |
|
"loss": 0.6733, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.4248666736379804e-05, |
|
"loss": 0.6828, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.41963818885287e-05, |
|
"loss": 0.6613, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.4144097040677614e-05, |
|
"loss": 0.708, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.409181219282652e-05, |
|
"loss": 0.6683, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.403952734497543e-05, |
|
"loss": 0.6493, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.3987242497124334e-05, |
|
"loss": 0.6648, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.3934957649273246e-05, |
|
"loss": 0.7077, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.388267280142215e-05, |
|
"loss": 0.6687, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.3830387953571055e-05, |
|
"loss": 0.6465, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.3778103105719966e-05, |
|
"loss": 0.6375, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.372581825786887e-05, |
|
"loss": 0.677, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.367353341001778e-05, |
|
"loss": 0.6624, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.3621248562166687e-05, |
|
"loss": 0.6619, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.356896371431559e-05, |
|
"loss": 0.6867, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.35166788664645e-05, |
|
"loss": 0.6723, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.346439401861341e-05, |
|
"loss": 0.6585, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.341210917076232e-05, |
|
"loss": 0.6564, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.335982432291122e-05, |
|
"loss": 0.6608, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.330753947506013e-05, |
|
"loss": 0.657, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.325525462720903e-05, |
|
"loss": 0.6957, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.3202969779357944e-05, |
|
"loss": 0.6625, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.3150684931506855e-05, |
|
"loss": 0.6876, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.309840008365576e-05, |
|
"loss": 0.6622, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.304611523580467e-05, |
|
"loss": 0.6452, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.299383038795357e-05, |
|
"loss": 0.6647, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.294154554010248e-05, |
|
"loss": 0.6562, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.2889260692251385e-05, |
|
"loss": 0.6274, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.2836975844400296e-05, |
|
"loss": 0.6503, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.27846909965492e-05, |
|
"loss": 0.6427, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.273240614869811e-05, |
|
"loss": 0.6766, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.2680121300847016e-05, |
|
"loss": 0.6578, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.262783645299592e-05, |
|
"loss": 0.6269, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.257555160514483e-05, |
|
"loss": 0.6384, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.252326675729374e-05, |
|
"loss": 0.6593, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.247098190944265e-05, |
|
"loss": 0.658, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.241869706159155e-05, |
|
"loss": 0.6884, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.236641221374046e-05, |
|
"loss": 0.6342, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.231412736588937e-05, |
|
"loss": 0.6604, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.2261842518038273e-05, |
|
"loss": 0.6693, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.2209557670187185e-05, |
|
"loss": 0.6543, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.215727282233609e-05, |
|
"loss": 0.6449, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.2104987974484994e-05, |
|
"loss": 0.6421, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.2052703126633905e-05, |
|
"loss": 0.617, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.200041827878281e-05, |
|
"loss": 0.6678, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.194813343093172e-05, |
|
"loss": 0.6912, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.1895848583080626e-05, |
|
"loss": 0.648, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.184356373522954e-05, |
|
"loss": 0.6743, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.1791278887378435e-05, |
|
"loss": 0.6393, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.1738994039527346e-05, |
|
"loss": 0.6387, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.168670919167625e-05, |
|
"loss": 0.6549, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.163442434382516e-05, |
|
"loss": 0.6439, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.1582139495974074e-05, |
|
"loss": 0.6458, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.152985464812297e-05, |
|
"loss": 0.663, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.147756980027188e-05, |
|
"loss": 0.6304, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.142528495242079e-05, |
|
"loss": 0.6471, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.13730001045697e-05, |
|
"loss": 0.652, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.13207152567186e-05, |
|
"loss": 0.645, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.1268430408867515e-05, |
|
"loss": 0.6546, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.121614556101642e-05, |
|
"loss": 0.6011, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.1163860713165324e-05, |
|
"loss": 0.6465, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.1111575865314235e-05, |
|
"loss": 0.6639, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.105929101746314e-05, |
|
"loss": 0.6391, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.100700616961205e-05, |
|
"loss": 0.6391, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.0954721321760956e-05, |
|
"loss": 0.6355, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.090243647390986e-05, |
|
"loss": 0.6607, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.085015162605877e-05, |
|
"loss": 0.6498, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.0797866778207676e-05, |
|
"loss": 0.6463, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.074558193035659e-05, |
|
"loss": 0.6566, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.069329708250549e-05, |
|
"loss": 0.6586, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.06410122346544e-05, |
|
"loss": 0.6191, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.05887273868033e-05, |
|
"loss": 0.6107, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.053644253895221e-05, |
|
"loss": 0.6505, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.0484157691101124e-05, |
|
"loss": 0.6473, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.043187284325003e-05, |
|
"loss": 0.6264, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.037958799539894e-05, |
|
"loss": 0.6572, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.032730314754784e-05, |
|
"loss": 0.6464, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.027501829969675e-05, |
|
"loss": 0.6204, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.0222733451845654e-05, |
|
"loss": 0.6363, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.0170448603994565e-05, |
|
"loss": 0.6514, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.0118163756143476e-05, |
|
"loss": 0.6464, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.006587890829238e-05, |
|
"loss": 0.6582, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.0013594060441286e-05, |
|
"loss": 0.6507, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.996130921259019e-05, |
|
"loss": 0.5918, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.99090243647391e-05, |
|
"loss": 0.5721, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.9856739516888006e-05, |
|
"loss": 0.5882, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.980445466903692e-05, |
|
"loss": 0.5759, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.975216982118582e-05, |
|
"loss": 0.5864, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.969988497333473e-05, |
|
"loss": 0.5741, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.964760012548364e-05, |
|
"loss": 0.5952, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.959531527763254e-05, |
|
"loss": 0.5823, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.9543030429781454e-05, |
|
"loss": 0.5871, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.949074558193036e-05, |
|
"loss": 0.5717, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.943846073407926e-05, |
|
"loss": 0.6012, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.9386175886228174e-05, |
|
"loss": 0.5999, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.933389103837708e-05, |
|
"loss": 0.5749, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.928160619052599e-05, |
|
"loss": 0.6024, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.9229321342674895e-05, |
|
"loss": 0.5815, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.9177036494823806e-05, |
|
"loss": 0.6153, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.9124751646972704e-05, |
|
"loss": 0.5985, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.9072466799121616e-05, |
|
"loss": 0.5789, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.902018195127053e-05, |
|
"loss": 0.5979, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.896789710341943e-05, |
|
"loss": 0.6111, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.891561225556834e-05, |
|
"loss": 0.5985, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.886332740771724e-05, |
|
"loss": 0.603, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.881104255986615e-05, |
|
"loss": 0.6152, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.8758757712015057e-05, |
|
"loss": 0.5891, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.870647286416397e-05, |
|
"loss": 0.6007, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.865418801631287e-05, |
|
"loss": 0.5796, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.8601903168461784e-05, |
|
"loss": 0.6049, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.854961832061069e-05, |
|
"loss": 0.5707, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.849733347275959e-05, |
|
"loss": 0.6083, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.8445048624908504e-05, |
|
"loss": 0.5984, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 3.839276377705741e-05, |
|
"loss": 0.6061, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 3.834047892920632e-05, |
|
"loss": 0.585, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3.8288194081355225e-05, |
|
"loss": 0.5908, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 3.823590923350413e-05, |
|
"loss": 0.5628, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 3.818362438565304e-05, |
|
"loss": 0.5913, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 3.8131339537801945e-05, |
|
"loss": 0.6095, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 3.807905468995086e-05, |
|
"loss": 0.5652, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 3.802676984209976e-05, |
|
"loss": 0.5884, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 3.7974484994248666e-05, |
|
"loss": 0.5881, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.792220014639758e-05, |
|
"loss": 0.572, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 3.786991529854648e-05, |
|
"loss": 0.613, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.781763045069539e-05, |
|
"loss": 0.5828, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.77653456028443e-05, |
|
"loss": 0.593, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.771306075499321e-05, |
|
"loss": 0.5822, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.766077590714211e-05, |
|
"loss": 0.584, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.760849105929102e-05, |
|
"loss": 0.6153, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.755620621143992e-05, |
|
"loss": 0.576, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.7503921363588834e-05, |
|
"loss": 0.6093, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.7451636515737746e-05, |
|
"loss": 0.5884, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.739935166788665e-05, |
|
"loss": 0.6014, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 3.7347066820035555e-05, |
|
"loss": 0.6056, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 3.729478197218446e-05, |
|
"loss": 0.6095, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 3.724249712433337e-05, |
|
"loss": 0.5864, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 3.7190212276482275e-05, |
|
"loss": 0.5991, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 3.713792742863119e-05, |
|
"loss": 0.6125, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 3.708564258078009e-05, |
|
"loss": 0.5813, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 3.7033357732928996e-05, |
|
"loss": 0.5866, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 3.698107288507791e-05, |
|
"loss": 0.5924, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 3.692878803722681e-05, |
|
"loss": 0.5912, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 3.687650318937572e-05, |
|
"loss": 0.5514, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 3.682421834152463e-05, |
|
"loss": 0.5862, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 3.677193349367353e-05, |
|
"loss": 0.5608, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 3.6719648645822444e-05, |
|
"loss": 0.5791, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 3.666736379797135e-05, |
|
"loss": 0.6113, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 3.661507895012026e-05, |
|
"loss": 0.5795, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 3.6562794102269164e-05, |
|
"loss": 0.6109, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 3.6510509254418076e-05, |
|
"loss": 0.6091, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 3.645822440656697e-05, |
|
"loss": 0.5855, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 3.6405939558715885e-05, |
|
"loss": 0.5788, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 3.6353654710864796e-05, |
|
"loss": 0.6089, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.63013698630137e-05, |
|
"loss": 0.6061, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.624908501516261e-05, |
|
"loss": 0.6076, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.6196800167311517e-05, |
|
"loss": 0.592, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.614451531946042e-05, |
|
"loss": 0.5624, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6092230471609326e-05, |
|
"loss": 0.592, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.603994562375824e-05, |
|
"loss": 0.6006, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.598766077590714e-05, |
|
"loss": 0.5696, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.593537592805605e-05, |
|
"loss": 0.5638, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.588309108020496e-05, |
|
"loss": 0.5895, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.583080623235386e-05, |
|
"loss": 0.5862, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.5778521384502774e-05, |
|
"loss": 0.5675, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.572623653665168e-05, |
|
"loss": 0.5469, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.567395168880059e-05, |
|
"loss": 0.5871, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.5621666840949494e-05, |
|
"loss": 0.5943, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.55693819930984e-05, |
|
"loss": 0.5586, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.551709714524731e-05, |
|
"loss": 0.5978, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.5464812297396215e-05, |
|
"loss": 0.579, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.5412527449545126e-05, |
|
"loss": 0.6138, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.536024260169403e-05, |
|
"loss": 0.5756, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 3.530795775384294e-05, |
|
"loss": 0.5899, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.5255672905991846e-05, |
|
"loss": 0.5765, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.520338805814075e-05, |
|
"loss": 0.5767, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.515110321028966e-05, |
|
"loss": 0.5916, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.509881836243857e-05, |
|
"loss": 0.6079, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.504653351458748e-05, |
|
"loss": 0.608, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.4994248666736376e-05, |
|
"loss": 0.5729, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.494196381888529e-05, |
|
"loss": 0.5501, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.488967897103419e-05, |
|
"loss": 0.5655, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.4837394123183103e-05, |
|
"loss": 0.5459, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 3.4785109275332015e-05, |
|
"loss": 0.5261, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.473282442748092e-05, |
|
"loss": 0.5256, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 3.4680539579629824e-05, |
|
"loss": 0.553, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.462825473177873e-05, |
|
"loss": 0.5718, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.457596988392764e-05, |
|
"loss": 0.5577, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.4523685036076545e-05, |
|
"loss": 0.5396, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.4471400188225456e-05, |
|
"loss": 0.5378, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.441911534037436e-05, |
|
"loss": 0.5366, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.4366830492523265e-05, |
|
"loss": 0.5619, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 3.4314545644672176e-05, |
|
"loss": 0.5326, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.426226079682108e-05, |
|
"loss": 0.5735, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 3.420997594896999e-05, |
|
"loss": 0.5345, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.41576911011189e-05, |
|
"loss": 0.5283, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.41054062532678e-05, |
|
"loss": 0.5424, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 3.405312140541671e-05, |
|
"loss": 0.5694, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.400083655756562e-05, |
|
"loss": 0.5309, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 3.394855170971453e-05, |
|
"loss": 0.5426, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.389626686186343e-05, |
|
"loss": 0.5597, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.3843982014012345e-05, |
|
"loss": 0.5433, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.379169716616124e-05, |
|
"loss": 0.542, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.3739412318310154e-05, |
|
"loss": 0.5371, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 3.3687127470459065e-05, |
|
"loss": 0.5698, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.363484262260797e-05, |
|
"loss": 0.5431, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 3.358255777475688e-05, |
|
"loss": 0.5284, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.3530272926905786e-05, |
|
"loss": 0.536, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.347798807905469e-05, |
|
"loss": 0.5591, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.3425703231203595e-05, |
|
"loss": 0.5197, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.3373418383352506e-05, |
|
"loss": 0.5357, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.332113353550142e-05, |
|
"loss": 0.5549, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.326884868765032e-05, |
|
"loss": 0.556, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.321656383979923e-05, |
|
"loss": 0.5425, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.316427899194813e-05, |
|
"loss": 0.5247, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.311199414409704e-05, |
|
"loss": 0.5339, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.305970929624595e-05, |
|
"loss": 0.5495, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.300742444839486e-05, |
|
"loss": 0.5362, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.295513960054376e-05, |
|
"loss": 0.556, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.290285475269267e-05, |
|
"loss": 0.5521, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 3.285056990484158e-05, |
|
"loss": 0.5553, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.2798285056990484e-05, |
|
"loss": 0.5434, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 3.2746000209139395e-05, |
|
"loss": 0.5498, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 3.26937153612883e-05, |
|
"loss": 0.5435, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.264143051343721e-05, |
|
"loss": 0.5384, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 3.2589145665586116e-05, |
|
"loss": 0.5705, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.253686081773502e-05, |
|
"loss": 0.5452, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.248457596988393e-05, |
|
"loss": 0.5341, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 3.2432291122032836e-05, |
|
"loss": 0.5492, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 3.238000627418175e-05, |
|
"loss": 0.5583, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 3.2327721426330645e-05, |
|
"loss": 0.5837, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.227543657847956e-05, |
|
"loss": 0.5318, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 3.222315173062847e-05, |
|
"loss": 0.5835, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.217086688277737e-05, |
|
"loss": 0.5547, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.2118582034926284e-05, |
|
"loss": 0.5549, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.206629718707519e-05, |
|
"loss": 0.5426, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.201401233922409e-05, |
|
"loss": 0.5464, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.1961727491373e-05, |
|
"loss": 0.5396, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.190944264352191e-05, |
|
"loss": 0.563, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.1857157795670814e-05, |
|
"loss": 0.5453, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.1804872947819725e-05, |
|
"loss": 0.5593, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.1752588099968636e-05, |
|
"loss": 0.5374, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.1700303252117534e-05, |
|
"loss": 0.549, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.1648018404266446e-05, |
|
"loss": 0.5501, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.159573355641535e-05, |
|
"loss": 0.5716, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.154344870856426e-05, |
|
"loss": 0.5449, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.1491163860713166e-05, |
|
"loss": 0.5678, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.143887901286207e-05, |
|
"loss": 0.5384, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.138659416501098e-05, |
|
"loss": 0.5327, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.1334309317159887e-05, |
|
"loss": 0.546, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.12820244693088e-05, |
|
"loss": 0.5257, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.12297396214577e-05, |
|
"loss": 0.5232, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.1177454773606614e-05, |
|
"loss": 0.5351, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.112516992575552e-05, |
|
"loss": 0.5379, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.107288507790442e-05, |
|
"loss": 0.5343, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.1020600230053334e-05, |
|
"loss": 0.5496, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.096831538220224e-05, |
|
"loss": 0.5387, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.091603053435115e-05, |
|
"loss": 0.5361, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 3.0863745686500055e-05, |
|
"loss": 0.5321, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.081146083864896e-05, |
|
"loss": 0.5526, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.0759175990797864e-05, |
|
"loss": 0.5866, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.0706891142946775e-05, |
|
"loss": 0.5566, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.065460629509569e-05, |
|
"loss": 0.5216, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.060232144724459e-05, |
|
"loss": 0.5304, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 3.0550036599393496e-05, |
|
"loss": 0.5371, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.0497751751542404e-05, |
|
"loss": 0.5406, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.0445466903691312e-05, |
|
"loss": 0.5361, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 3.039318205584022e-05, |
|
"loss": 0.5458, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.0340897207989128e-05, |
|
"loss": 0.5337, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 3.0288612360138036e-05, |
|
"loss": 0.544, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.0236327512286937e-05, |
|
"loss": 0.5252, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 3.0184042664435845e-05, |
|
"loss": 0.5183, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.0131757816584756e-05, |
|
"loss": 0.5715, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.0079472968733664e-05, |
|
"loss": 0.5432, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 3.0027188120882572e-05, |
|
"loss": 0.5498, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 2.997490327303148e-05, |
|
"loss": 0.5321, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 2.992261842518038e-05, |
|
"loss": 0.5073, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 2.987033357732929e-05, |
|
"loss": 0.5032, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 2.9818048729478197e-05, |
|
"loss": 0.4929, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 2.9765763881627105e-05, |
|
"loss": 0.5216, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.9713479033776013e-05, |
|
"loss": 0.496, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 2.9661194185924918e-05, |
|
"loss": 0.4708, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 2.9608909338073826e-05, |
|
"loss": 0.4902, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 2.9556624490222734e-05, |
|
"loss": 0.4774, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 2.9504339642371642e-05, |
|
"loss": 0.5148, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 2.945205479452055e-05, |
|
"loss": 0.5185, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 2.9399769946669458e-05, |
|
"loss": 0.4926, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 2.9347485098818362e-05, |
|
"loss": 0.5151, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 2.929520025096727e-05, |
|
"loss": 0.5046, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 2.9242915403116178e-05, |
|
"loss": 0.514, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 2.9190630555265086e-05, |
|
"loss": 0.51, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 2.9138345707413994e-05, |
|
"loss": 0.5434, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 2.9086060859562902e-05, |
|
"loss": 0.5082, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 2.9033776011711807e-05, |
|
"loss": 0.4826, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 2.8981491163860715e-05, |
|
"loss": 0.4887, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 2.8929206316009623e-05, |
|
"loss": 0.5012, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 2.887692146815853e-05, |
|
"loss": 0.4757, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 2.882463662030744e-05, |
|
"loss": 0.5505, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 2.8772351772456347e-05, |
|
"loss": 0.5012, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 2.8720066924605248e-05, |
|
"loss": 0.4849, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 2.8667782076754156e-05, |
|
"loss": 0.4962, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 2.8615497228903064e-05, |
|
"loss": 0.5078, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 2.8563212381051975e-05, |
|
"loss": 0.5322, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 2.8510927533200883e-05, |
|
"loss": 0.5212, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 2.8458642685349784e-05, |
|
"loss": 0.5152, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 2.8406357837498692e-05, |
|
"loss": 0.5157, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 2.83540729896476e-05, |
|
"loss": 0.4864, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 2.8301788141796508e-05, |
|
"loss": 0.4851, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 2.8249503293945416e-05, |
|
"loss": 0.5083, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 2.8197218446094324e-05, |
|
"loss": 0.5059, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 2.814493359824323e-05, |
|
"loss": 0.5203, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.8092648750392137e-05, |
|
"loss": 0.5268, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 2.8040363902541045e-05, |
|
"loss": 0.5143, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 2.7988079054689953e-05, |
|
"loss": 0.5081, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 2.793579420683886e-05, |
|
"loss": 0.4966, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 2.788350935898777e-05, |
|
"loss": 0.4854, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 2.7831224511136673e-05, |
|
"loss": 0.5144, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 2.777893966328558e-05, |
|
"loss": 0.4946, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 2.772665481543449e-05, |
|
"loss": 0.4746, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 2.7674369967583397e-05, |
|
"loss": 0.5224, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 2.7622085119732305e-05, |
|
"loss": 0.5116, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 2.7569800271881206e-05, |
|
"loss": 0.476, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 2.7517515424030114e-05, |
|
"loss": 0.5014, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 2.7465230576179026e-05, |
|
"loss": 0.5107, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 2.7412945728327933e-05, |
|
"loss": 0.5157, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 2.736066088047684e-05, |
|
"loss": 0.5458, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 2.730837603262575e-05, |
|
"loss": 0.5031, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 2.725609118477465e-05, |
|
"loss": 0.5098, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 2.720380633692356e-05, |
|
"loss": 0.4895, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 2.7151521489072467e-05, |
|
"loss": 0.5169, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 2.7099236641221375e-05, |
|
"loss": 0.5024, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 2.7046951793370286e-05, |
|
"loss": 0.4962, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 2.6994666945519194e-05, |
|
"loss": 0.5244, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 2.6942382097668095e-05, |
|
"loss": 0.5145, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 2.6890097249817003e-05, |
|
"loss": 0.4924, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.683781240196591e-05, |
|
"loss": 0.5127, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.678552755411482e-05, |
|
"loss": 0.5052, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.6733242706263727e-05, |
|
"loss": 0.4872, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.668095785841263e-05, |
|
"loss": 0.5164, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.662867301056154e-05, |
|
"loss": 0.5303, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.6576388162710447e-05, |
|
"loss": 0.5112, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.6524103314859355e-05, |
|
"loss": 0.5119, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.6471818467008263e-05, |
|
"loss": 0.5028, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 2.641953361915717e-05, |
|
"loss": 0.5166, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 2.6367248771306076e-05, |
|
"loss": 0.5139, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.6314963923454984e-05, |
|
"loss": 0.5063, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.6262679075603892e-05, |
|
"loss": 0.5236, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.62103942277528e-05, |
|
"loss": 0.5249, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.6158109379901708e-05, |
|
"loss": 0.524, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 2.6105824532050616e-05, |
|
"loss": 0.4759, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.6053539684199517e-05, |
|
"loss": 0.5111, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 2.6001254836348425e-05, |
|
"loss": 0.5016, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 2.5948969988497336e-05, |
|
"loss": 0.5448, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 2.5896685140646244e-05, |
|
"loss": 0.5204, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.5844400292795152e-05, |
|
"loss": 0.5324, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 2.5792115444944053e-05, |
|
"loss": 0.5069, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 2.573983059709296e-05, |
|
"loss": 0.5221, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 2.568754574924187e-05, |
|
"loss": 0.506, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 2.5635260901390777e-05, |
|
"loss": 0.5418, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 2.5582976053539685e-05, |
|
"loss": 0.4877, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.5530691205688593e-05, |
|
"loss": 0.4889, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.5478406357837498e-05, |
|
"loss": 0.5019, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 2.5426121509986406e-05, |
|
"loss": 0.5124, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 2.5373836662135314e-05, |
|
"loss": 0.5066, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.5321551814284222e-05, |
|
"loss": 0.5014, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 2.526926696643313e-05, |
|
"loss": 0.5057, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 2.5216982118582038e-05, |
|
"loss": 0.5125, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.5164697270730942e-05, |
|
"loss": 0.5012, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 2.511241242287985e-05, |
|
"loss": 0.4988, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.5060127575028758e-05, |
|
"loss": 0.4761, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.5007842727177666e-05, |
|
"loss": 0.518, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 2.495555787932657e-05, |
|
"loss": 0.49, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 2.490327303147548e-05, |
|
"loss": 0.4793, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 2.4850988183624387e-05, |
|
"loss": 0.4624, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 2.4798703335773295e-05, |
|
"loss": 0.4706, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.4746418487922203e-05, |
|
"loss": 0.4716, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 2.469413364007111e-05, |
|
"loss": 0.4775, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 2.4641848792220015e-05, |
|
"loss": 0.4865, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 2.4589563944368923e-05, |
|
"loss": 0.4824, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.4537279096517828e-05, |
|
"loss": 0.4781, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 2.4484994248666736e-05, |
|
"loss": 0.445, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 2.4432709400815647e-05, |
|
"loss": 0.4782, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 2.438042455296455e-05, |
|
"loss": 0.4768, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 2.432813970511346e-05, |
|
"loss": 0.4527, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 2.4275854857262368e-05, |
|
"loss": 0.4633, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 2.4223570009411272e-05, |
|
"loss": 0.4677, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 2.417128516156018e-05, |
|
"loss": 0.4615, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 2.4119000313709088e-05, |
|
"loss": 0.4708, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 2.4066715465857996e-05, |
|
"loss": 0.4835, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 2.4014430618006904e-05, |
|
"loss": 0.4554, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 2.3962145770155812e-05, |
|
"loss": 0.4758, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 2.3909860922304717e-05, |
|
"loss": 0.5039, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 2.3857576074453625e-05, |
|
"loss": 0.4834, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 2.3805291226602533e-05, |
|
"loss": 0.4782, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 2.3753006378751437e-05, |
|
"loss": 0.487, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 2.3700721530900345e-05, |
|
"loss": 0.4716, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 2.3648436683049253e-05, |
|
"loss": 0.4902, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 2.359615183519816e-05, |
|
"loss": 0.4802, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 2.354386698734707e-05, |
|
"loss": 0.4816, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 2.3491582139495974e-05, |
|
"loss": 0.4725, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 2.343929729164488e-05, |
|
"loss": 0.4646, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 2.338701244379379e-05, |
|
"loss": 0.4651, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 2.3334727595942697e-05, |
|
"loss": 0.4932, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 2.3282442748091605e-05, |
|
"loss": 0.466, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 2.3230157900240513e-05, |
|
"loss": 0.4887, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.3177873052389418e-05, |
|
"loss": 0.4856, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 2.3125588204538326e-05, |
|
"loss": 0.5018, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 2.3073303356687234e-05, |
|
"loss": 0.4869, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 2.302101850883614e-05, |
|
"loss": 0.4699, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 2.2968733660985047e-05, |
|
"loss": 0.484, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 2.2916448813133954e-05, |
|
"loss": 0.4564, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 2.2864163965282862e-05, |
|
"loss": 0.5038, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 2.281187911743177e-05, |
|
"loss": 0.4859, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 2.275959426958068e-05, |
|
"loss": 0.4783, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 2.2707309421729583e-05, |
|
"loss": 0.4773, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 2.265502457387849e-05, |
|
"loss": 0.4729, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 2.2602739726027396e-05, |
|
"loss": 0.4682, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 2.2550454878176307e-05, |
|
"loss": 0.4682, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 2.2498170030325215e-05, |
|
"loss": 0.4819, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 2.244588518247412e-05, |
|
"loss": 0.4896, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 2.2393600334623027e-05, |
|
"loss": 0.4475, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 2.2341315486771935e-05, |
|
"loss": 0.464, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 2.228903063892084e-05, |
|
"loss": 0.4981, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 2.2236745791069748e-05, |
|
"loss": 0.4741, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 2.2184460943218656e-05, |
|
"loss": 0.4592, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 2.2132176095367564e-05, |
|
"loss": 0.4636, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 2.2079891247516472e-05, |
|
"loss": 0.4954, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 2.202760639966538e-05, |
|
"loss": 0.4981, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 2.1975321551814284e-05, |
|
"loss": 0.4602, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 2.1923036703963192e-05, |
|
"loss": 0.4694, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 2.18707518561121e-05, |
|
"loss": 0.4881, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 2.1818467008261005e-05, |
|
"loss": 0.4973, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 2.1766182160409916e-05, |
|
"loss": 0.4687, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 2.171389731255882e-05, |
|
"loss": 0.4862, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 2.166161246470773e-05, |
|
"loss": 0.4734, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 2.1609327616856637e-05, |
|
"loss": 0.5074, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 2.155704276900554e-05, |
|
"loss": 0.4922, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 2.150475792115445e-05, |
|
"loss": 0.498, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 2.1452473073303357e-05, |
|
"loss": 0.4725, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 2.1400188225452265e-05, |
|
"loss": 0.429, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 2.1347903377601173e-05, |
|
"loss": 0.4755, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 2.129561852975008e-05, |
|
"loss": 0.4757, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 2.1243333681898986e-05, |
|
"loss": 0.4728, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 2.1191048834047894e-05, |
|
"loss": 0.491, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 2.1138763986196802e-05, |
|
"loss": 0.4939, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.1086479138345706e-05, |
|
"loss": 0.4773, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 2.1034194290494618e-05, |
|
"loss": 0.4812, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 2.0981909442643526e-05, |
|
"loss": 0.4796, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 2.092962459479243e-05, |
|
"loss": 0.48, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 2.0877339746941338e-05, |
|
"loss": 0.4863, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 2.0825054899090243e-05, |
|
"loss": 0.4961, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 2.077277005123915e-05, |
|
"loss": 0.4645, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 2.072048520338806e-05, |
|
"loss": 0.4926, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 2.0668200355536967e-05, |
|
"loss": 0.4935, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 2.0615915507685875e-05, |
|
"loss": 0.5012, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 2.0563630659834783e-05, |
|
"loss": 0.4922, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 2.0511345811983687e-05, |
|
"loss": 0.4614, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 2.0459060964132595e-05, |
|
"loss": 0.4737, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 2.0406776116281503e-05, |
|
"loss": 0.482, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 2.0354491268430408e-05, |
|
"loss": 0.4773, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 2.0302206420579316e-05, |
|
"loss": 0.5035, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 2.0249921572728227e-05, |
|
"loss": 0.4404, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 2.019763672487713e-05, |
|
"loss": 0.4864, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 2.014535187702604e-05, |
|
"loss": 0.4959, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 2.0093067029174948e-05, |
|
"loss": 0.4792, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 2.0040782181323852e-05, |
|
"loss": 0.4821, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 1.998849733347276e-05, |
|
"loss": 0.4649, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 1.9936212485621668e-05, |
|
"loss": 0.4552, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 1.9883927637770576e-05, |
|
"loss": 0.4687, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 1.9831642789919484e-05, |
|
"loss": 0.4455, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 1.977935794206839e-05, |
|
"loss": 0.4571, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 1.9727073094217297e-05, |
|
"loss": 0.4498, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 1.9674788246366205e-05, |
|
"loss": 0.4616, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 1.962250339851511e-05, |
|
"loss": 0.4694, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.9570218550664017e-05, |
|
"loss": 0.4594, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 1.9517933702812925e-05, |
|
"loss": 0.4337, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 1.9465648854961833e-05, |
|
"loss": 0.4548, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 1.941336400711074e-05, |
|
"loss": 0.4498, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 1.936107915925965e-05, |
|
"loss": 0.4649, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 1.9308794311408554e-05, |
|
"loss": 0.4529, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 1.925650946355746e-05, |
|
"loss": 0.4513, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 1.920422461570637e-05, |
|
"loss": 0.437, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 1.9151939767855277e-05, |
|
"loss": 0.4541, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 1.9099654920004185e-05, |
|
"loss": 0.464, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 1.9047370072153093e-05, |
|
"loss": 0.4681, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 1.8995085224301998e-05, |
|
"loss": 0.4354, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 1.8942800376450906e-05, |
|
"loss": 0.4457, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 1.889051552859981e-05, |
|
"loss": 0.4584, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 1.883823068074872e-05, |
|
"loss": 0.4815, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 1.8785945832897626e-05, |
|
"loss": 0.4383, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 1.8733660985046534e-05, |
|
"loss": 0.446, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 1.8681376137195442e-05, |
|
"loss": 0.4733, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 1.862909128934435e-05, |
|
"loss": 0.449, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 1.8576806441493255e-05, |
|
"loss": 0.4419, |
|
"step": 300500 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 1.8524521593642163e-05, |
|
"loss": 0.4374, |
|
"step": 301000 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 1.847223674579107e-05, |
|
"loss": 0.4565, |
|
"step": 301500 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 1.8419951897939975e-05, |
|
"loss": 0.4434, |
|
"step": 302000 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 1.8367667050088887e-05, |
|
"loss": 0.4394, |
|
"step": 302500 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 1.8315382202237795e-05, |
|
"loss": 0.4649, |
|
"step": 303000 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 1.82630973543867e-05, |
|
"loss": 0.4655, |
|
"step": 303500 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 1.8210812506535607e-05, |
|
"loss": 0.4712, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 1.8158527658684515e-05, |
|
"loss": 0.4672, |
|
"step": 304500 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 1.810624281083342e-05, |
|
"loss": 0.4626, |
|
"step": 305000 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 1.8053957962982328e-05, |
|
"loss": 0.4649, |
|
"step": 305500 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 1.8001673115131236e-05, |
|
"loss": 0.4477, |
|
"step": 306000 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.7949388267280144e-05, |
|
"loss": 0.45, |
|
"step": 306500 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.7897103419429052e-05, |
|
"loss": 0.4577, |
|
"step": 307000 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 1.7844818571577956e-05, |
|
"loss": 0.4795, |
|
"step": 307500 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 1.7792533723726864e-05, |
|
"loss": 0.4645, |
|
"step": 308000 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.7740248875875772e-05, |
|
"loss": 0.4423, |
|
"step": 308500 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 1.7687964028024677e-05, |
|
"loss": 0.4497, |
|
"step": 309000 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.7635679180173588e-05, |
|
"loss": 0.4769, |
|
"step": 309500 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 1.7583394332322496e-05, |
|
"loss": 0.4639, |
|
"step": 310000 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.75311094844714e-05, |
|
"loss": 0.4703, |
|
"step": 310500 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 1.747882463662031e-05, |
|
"loss": 0.4541, |
|
"step": 311000 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 1.7426539788769217e-05, |
|
"loss": 0.454, |
|
"step": 311500 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 1.737425494091812e-05, |
|
"loss": 0.4483, |
|
"step": 312000 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.732197009306703e-05, |
|
"loss": 0.4234, |
|
"step": 312500 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 1.7269685245215937e-05, |
|
"loss": 0.4451, |
|
"step": 313000 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.7217400397364845e-05, |
|
"loss": 0.4651, |
|
"step": 313500 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 1.7165115549513753e-05, |
|
"loss": 0.4331, |
|
"step": 314000 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.7112830701662658e-05, |
|
"loss": 0.4765, |
|
"step": 314500 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 1.7060545853811566e-05, |
|
"loss": 0.4552, |
|
"step": 315000 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.7008261005960474e-05, |
|
"loss": 0.4791, |
|
"step": 315500 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 1.6955976158109378e-05, |
|
"loss": 0.4552, |
|
"step": 316000 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.6903691310258286e-05, |
|
"loss": 0.455, |
|
"step": 316500 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 1.6851406462407198e-05, |
|
"loss": 0.4444, |
|
"step": 317000 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 1.6799121614556102e-05, |
|
"loss": 0.4469, |
|
"step": 317500 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 1.674683676670501e-05, |
|
"loss": 0.4334, |
|
"step": 318000 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.6694551918853918e-05, |
|
"loss": 0.4568, |
|
"step": 318500 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.6642267071002823e-05, |
|
"loss": 0.4446, |
|
"step": 319000 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 1.658998222315173e-05, |
|
"loss": 0.4535, |
|
"step": 319500 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.653769737530064e-05, |
|
"loss": 0.4797, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.6485412527449547e-05, |
|
"loss": 0.4491, |
|
"step": 320500 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.6433127679598455e-05, |
|
"loss": 0.4702, |
|
"step": 321000 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 1.6380842831747363e-05, |
|
"loss": 0.4596, |
|
"step": 321500 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.6328557983896267e-05, |
|
"loss": 0.4359, |
|
"step": 322000 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 1.6276273136045175e-05, |
|
"loss": 0.4612, |
|
"step": 322500 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.622398828819408e-05, |
|
"loss": 0.4545, |
|
"step": 323000 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.6171703440342988e-05, |
|
"loss": 0.4274, |
|
"step": 323500 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.6119418592491896e-05, |
|
"loss": 0.4545, |
|
"step": 324000 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.6067133744640804e-05, |
|
"loss": 0.4704, |
|
"step": 324500 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 1.601484889678971e-05, |
|
"loss": 0.4611, |
|
"step": 325000 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.596256404893862e-05, |
|
"loss": 0.4661, |
|
"step": 325500 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.5910279201087524e-05, |
|
"loss": 0.4599, |
|
"step": 326000 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.5857994353236432e-05, |
|
"loss": 0.4543, |
|
"step": 326500 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.580570950538534e-05, |
|
"loss": 0.4599, |
|
"step": 327000 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.5753424657534248e-05, |
|
"loss": 0.4739, |
|
"step": 327500 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.5701139809683156e-05, |
|
"loss": 0.4799, |
|
"step": 328000 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 1.5648854961832064e-05, |
|
"loss": 0.4452, |
|
"step": 328500 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.559657011398097e-05, |
|
"loss": 0.4733, |
|
"step": 329000 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 1.5544285266129877e-05, |
|
"loss": 0.4738, |
|
"step": 329500 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 1.5492000418278784e-05, |
|
"loss": 0.4626, |
|
"step": 330000 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 1.543971557042769e-05, |
|
"loss": 0.4565, |
|
"step": 330500 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.5387430722576597e-05, |
|
"loss": 0.443, |
|
"step": 331000 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.5335145874725505e-05, |
|
"loss": 0.4594, |
|
"step": 331500 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 1.5282861026874413e-05, |
|
"loss": 0.4415, |
|
"step": 332000 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 1.5230576179023321e-05, |
|
"loss": 0.4454, |
|
"step": 332500 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 1.5178291331172226e-05, |
|
"loss": 0.4572, |
|
"step": 333000 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.5126006483321133e-05, |
|
"loss": 0.4609, |
|
"step": 333500 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 1.5073721635470041e-05, |
|
"loss": 0.4617, |
|
"step": 334000 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 1.5021436787618948e-05, |
|
"loss": 0.4703, |
|
"step": 334500 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.4969151939767856e-05, |
|
"loss": 0.4387, |
|
"step": 335000 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 1.4916867091916764e-05, |
|
"loss": 0.4427, |
|
"step": 335500 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.486458224406567e-05, |
|
"loss": 0.4316, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 1.4812297396214578e-05, |
|
"loss": 0.4496, |
|
"step": 336500 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.4760012548363486e-05, |
|
"loss": 0.4646, |
|
"step": 337000 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 1.4707727700512392e-05, |
|
"loss": 0.4382, |
|
"step": 337500 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 1.46554428526613e-05, |
|
"loss": 0.4402, |
|
"step": 338000 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 1.4603158004810208e-05, |
|
"loss": 0.4444, |
|
"step": 338500 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 1.4550873156959113e-05, |
|
"loss": 0.4309, |
|
"step": 339000 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 1.4498588309108022e-05, |
|
"loss": 0.4195, |
|
"step": 339500 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 1.444630346125693e-05, |
|
"loss": 0.4596, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 1.4394018613405835e-05, |
|
"loss": 0.4394, |
|
"step": 340500 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.4341733765554743e-05, |
|
"loss": 0.4309, |
|
"step": 341000 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 1.4289448917703649e-05, |
|
"loss": 0.4517, |
|
"step": 341500 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.4237164069852557e-05, |
|
"loss": 0.4504, |
|
"step": 342000 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 1.4184879222001465e-05, |
|
"loss": 0.4289, |
|
"step": 342500 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 1.4132594374150371e-05, |
|
"loss": 0.4268, |
|
"step": 343000 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.408030952629928e-05, |
|
"loss": 0.4317, |
|
"step": 343500 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 1.4028024678448187e-05, |
|
"loss": 0.4535, |
|
"step": 344000 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 1.3975739830597092e-05, |
|
"loss": 0.4405, |
|
"step": 344500 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 1.3923454982746002e-05, |
|
"loss": 0.4287, |
|
"step": 345000 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.387117013489491e-05, |
|
"loss": 0.4418, |
|
"step": 345500 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 1.3818885287043814e-05, |
|
"loss": 0.4316, |
|
"step": 346000 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 1.3766600439192722e-05, |
|
"loss": 0.4319, |
|
"step": 346500 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 1.3714315591341632e-05, |
|
"loss": 0.4426, |
|
"step": 347000 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 1.3662030743490536e-05, |
|
"loss": 0.4215, |
|
"step": 347500 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 1.3609745895639444e-05, |
|
"loss": 0.4526, |
|
"step": 348000 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 1.3557461047788352e-05, |
|
"loss": 0.4335, |
|
"step": 348500 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.3505176199937259e-05, |
|
"loss": 0.4294, |
|
"step": 349000 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 1.3452891352086166e-05, |
|
"loss": 0.4141, |
|
"step": 349500 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 1.3400606504235073e-05, |
|
"loss": 0.4348, |
|
"step": 350000 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 1.334832165638398e-05, |
|
"loss": 0.4515, |
|
"step": 350500 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 1.3296036808532889e-05, |
|
"loss": 0.4326, |
|
"step": 351000 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 1.3243751960681793e-05, |
|
"loss": 0.4244, |
|
"step": 351500 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 1.3191467112830703e-05, |
|
"loss": 0.4621, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 1.3139182264979611e-05, |
|
"loss": 0.4211, |
|
"step": 352500 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 1.3086897417128516e-05, |
|
"loss": 0.4295, |
|
"step": 353000 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 1.3034612569277423e-05, |
|
"loss": 0.4276, |
|
"step": 353500 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 1.2982327721426331e-05, |
|
"loss": 0.4339, |
|
"step": 354000 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 1.2930042873575238e-05, |
|
"loss": 0.4433, |
|
"step": 354500 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 1.2877758025724146e-05, |
|
"loss": 0.4503, |
|
"step": 355000 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 1.2825473177873054e-05, |
|
"loss": 0.4386, |
|
"step": 355500 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 1.277318833002196e-05, |
|
"loss": 0.4357, |
|
"step": 356000 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 1.2720903482170868e-05, |
|
"loss": 0.4371, |
|
"step": 356500 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.2668618634319776e-05, |
|
"loss": 0.4332, |
|
"step": 357000 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 1.2616333786468682e-05, |
|
"loss": 0.425, |
|
"step": 357500 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 1.256404893861759e-05, |
|
"loss": 0.4317, |
|
"step": 358000 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.2511764090766495e-05, |
|
"loss": 0.428, |
|
"step": 358500 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 1.2459479242915403e-05, |
|
"loss": 0.4694, |
|
"step": 359000 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 1.2407194395064312e-05, |
|
"loss": 0.412, |
|
"step": 359500 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 1.2354909547213219e-05, |
|
"loss": 0.4387, |
|
"step": 360000 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 1.2302624699362125e-05, |
|
"loss": 0.4412, |
|
"step": 360500 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.2250339851511033e-05, |
|
"loss": 0.4401, |
|
"step": 361000 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 1.219805500365994e-05, |
|
"loss": 0.4325, |
|
"step": 361500 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 1.2145770155808847e-05, |
|
"loss": 0.4294, |
|
"step": 362000 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 1.2093485307957753e-05, |
|
"loss": 0.4468, |
|
"step": 362500 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 1.2041200460106663e-05, |
|
"loss": 0.4422, |
|
"step": 363000 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 1.198891561225557e-05, |
|
"loss": 0.4261, |
|
"step": 363500 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 1.1936630764404476e-05, |
|
"loss": 0.4385, |
|
"step": 364000 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 1.1884345916553384e-05, |
|
"loss": 0.4453, |
|
"step": 364500 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 1.1832061068702292e-05, |
|
"loss": 0.4743, |
|
"step": 365000 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 1.1779776220851198e-05, |
|
"loss": 0.4223, |
|
"step": 365500 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 1.1727491373000104e-05, |
|
"loss": 0.4491, |
|
"step": 366000 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 1.1675206525149012e-05, |
|
"loss": 0.4185, |
|
"step": 366500 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 1.162292167729792e-05, |
|
"loss": 0.4402, |
|
"step": 367000 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 1.1570636829446826e-05, |
|
"loss": 0.4612, |
|
"step": 367500 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 1.1518351981595734e-05, |
|
"loss": 0.4621, |
|
"step": 368000 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 1.1466067133744642e-05, |
|
"loss": 0.4304, |
|
"step": 368500 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 1.1413782285893549e-05, |
|
"loss": 0.4245, |
|
"step": 369000 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 1.1361497438042455e-05, |
|
"loss": 0.4259, |
|
"step": 369500 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 1.1309212590191363e-05, |
|
"loss": 0.428, |
|
"step": 370000 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 1.125692774234027e-05, |
|
"loss": 0.4337, |
|
"step": 370500 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 1.1204642894489177e-05, |
|
"loss": 0.4452, |
|
"step": 371000 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 1.1152358046638085e-05, |
|
"loss": 0.4485, |
|
"step": 371500 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 1.1100073198786993e-05, |
|
"loss": 0.4324, |
|
"step": 372000 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 1.10477883509359e-05, |
|
"loss": 0.43, |
|
"step": 372500 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 1.0995503503084807e-05, |
|
"loss": 0.4408, |
|
"step": 373000 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1.0943218655233713e-05, |
|
"loss": 0.4352, |
|
"step": 373500 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 1.0890933807382621e-05, |
|
"loss": 0.4282, |
|
"step": 374000 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 1.0838648959531528e-05, |
|
"loss": 0.4512, |
|
"step": 374500 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.0786364111680436e-05, |
|
"loss": 0.4499, |
|
"step": 375000 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 1.0734079263829342e-05, |
|
"loss": 0.4457, |
|
"step": 375500 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 1.068179441597825e-05, |
|
"loss": 0.4276, |
|
"step": 376000 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 1.0629509568127158e-05, |
|
"loss": 0.4353, |
|
"step": 376500 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 1.0577224720276064e-05, |
|
"loss": 0.4521, |
|
"step": 377000 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 1.0524939872424972e-05, |
|
"loss": 0.4409, |
|
"step": 377500 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 1.0472655024573878e-05, |
|
"loss": 0.4217, |
|
"step": 378000 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 1.0420370176722786e-05, |
|
"loss": 0.453, |
|
"step": 378500 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 1.0368085328871693e-05, |
|
"loss": 0.4401, |
|
"step": 379000 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 1.03158004810206e-05, |
|
"loss": 0.4541, |
|
"step": 379500 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 1.0263515633169509e-05, |
|
"loss": 0.432, |
|
"step": 380000 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 1.0211230785318415e-05, |
|
"loss": 0.4453, |
|
"step": 380500 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 1.0158945937467323e-05, |
|
"loss": 0.4389, |
|
"step": 381000 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 1.010666108961623e-05, |
|
"loss": 0.4453, |
|
"step": 381500 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 1.0054376241765137e-05, |
|
"loss": 0.4338, |
|
"step": 382000 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 1.0002091393914043e-05, |
|
"loss": 0.4269, |
|
"step": 382500 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 9.949806546062951e-06, |
|
"loss": 0.4261, |
|
"step": 383000 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 9.89752169821186e-06, |
|
"loss": 0.4249, |
|
"step": 383500 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 9.845236850360766e-06, |
|
"loss": 0.4067, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 9.792952002509674e-06, |
|
"loss": 0.4065, |
|
"step": 384500 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 9.740667154658582e-06, |
|
"loss": 0.4301, |
|
"step": 385000 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 9.688382306807488e-06, |
|
"loss": 0.4121, |
|
"step": 385500 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 9.636097458956394e-06, |
|
"loss": 0.4413, |
|
"step": 386000 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 9.583812611105302e-06, |
|
"loss": 0.4113, |
|
"step": 386500 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 9.53152776325421e-06, |
|
"loss": 0.4107, |
|
"step": 387000 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 9.479242915403116e-06, |
|
"loss": 0.4356, |
|
"step": 387500 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 9.426958067552023e-06, |
|
"loss": 0.4213, |
|
"step": 388000 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 9.374673219700932e-06, |
|
"loss": 0.4323, |
|
"step": 388500 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 9.322388371849838e-06, |
|
"loss": 0.4215, |
|
"step": 389000 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 9.270103523998745e-06, |
|
"loss": 0.4348, |
|
"step": 389500 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 9.217818676147653e-06, |
|
"loss": 0.4179, |
|
"step": 390000 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 9.16553382829656e-06, |
|
"loss": 0.4253, |
|
"step": 390500 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 9.113248980445467e-06, |
|
"loss": 0.4407, |
|
"step": 391000 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 9.060964132594373e-06, |
|
"loss": 0.4426, |
|
"step": 391500 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 9.008679284743283e-06, |
|
"loss": 0.4223, |
|
"step": 392000 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 8.95639443689219e-06, |
|
"loss": 0.399, |
|
"step": 392500 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 8.904109589041095e-06, |
|
"loss": 0.4347, |
|
"step": 393000 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 8.851824741190003e-06, |
|
"loss": 0.418, |
|
"step": 393500 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 8.799539893338911e-06, |
|
"loss": 0.4381, |
|
"step": 394000 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 8.747255045487818e-06, |
|
"loss": 0.4165, |
|
"step": 394500 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 8.694970197636726e-06, |
|
"loss": 0.4168, |
|
"step": 395000 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 8.642685349785634e-06, |
|
"loss": 0.4169, |
|
"step": 395500 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 8.59040050193454e-06, |
|
"loss": 0.3928, |
|
"step": 396000 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 8.538115654083446e-06, |
|
"loss": 0.4138, |
|
"step": 396500 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 8.485830806232354e-06, |
|
"loss": 0.4091, |
|
"step": 397000 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 8.433545958381262e-06, |
|
"loss": 0.4141, |
|
"step": 397500 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 8.381261110530168e-06, |
|
"loss": 0.4111, |
|
"step": 398000 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 8.328976262679076e-06, |
|
"loss": 0.4271, |
|
"step": 398500 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 8.276691414827983e-06, |
|
"loss": 0.4164, |
|
"step": 399000 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 8.22440656697689e-06, |
|
"loss": 0.4158, |
|
"step": 399500 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 8.172121719125797e-06, |
|
"loss": 0.4256, |
|
"step": 400000 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 8.119836871274705e-06, |
|
"loss": 0.4206, |
|
"step": 400500 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 8.067552023423613e-06, |
|
"loss": 0.4123, |
|
"step": 401000 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 8.015267175572519e-06, |
|
"loss": 0.4244, |
|
"step": 401500 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 7.962982327721427e-06, |
|
"loss": 0.42, |
|
"step": 402000 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 7.910697479870333e-06, |
|
"loss": 0.4067, |
|
"step": 402500 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 7.858412632019241e-06, |
|
"loss": 0.4307, |
|
"step": 403000 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 7.80612778416815e-06, |
|
"loss": 0.4354, |
|
"step": 403500 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 7.753842936317056e-06, |
|
"loss": 0.4007, |
|
"step": 404000 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 7.701558088465964e-06, |
|
"loss": 0.4185, |
|
"step": 404500 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 7.64927324061487e-06, |
|
"loss": 0.4448, |
|
"step": 405000 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 7.596988392763778e-06, |
|
"loss": 0.4314, |
|
"step": 405500 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 7.544703544912685e-06, |
|
"loss": 0.4176, |
|
"step": 406000 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 7.492418697061591e-06, |
|
"loss": 0.4242, |
|
"step": 406500 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 7.4401338492105e-06, |
|
"loss": 0.4259, |
|
"step": 407000 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 7.387849001359406e-06, |
|
"loss": 0.3979, |
|
"step": 407500 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 7.335564153508313e-06, |
|
"loss": 0.4005, |
|
"step": 408000 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 7.283279305657221e-06, |
|
"loss": 0.4435, |
|
"step": 408500 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 7.2309944578061285e-06, |
|
"loss": 0.4414, |
|
"step": 409000 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 7.178709609955036e-06, |
|
"loss": 0.4335, |
|
"step": 409500 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 7.126424762103942e-06, |
|
"loss": 0.4032, |
|
"step": 410000 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 7.074139914252851e-06, |
|
"loss": 0.4205, |
|
"step": 410500 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 7.021855066401757e-06, |
|
"loss": 0.4307, |
|
"step": 411000 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 6.969570218550664e-06, |
|
"loss": 0.4257, |
|
"step": 411500 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 6.917285370699572e-06, |
|
"loss": 0.3947, |
|
"step": 412000 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 6.865000522848479e-06, |
|
"loss": 0.4222, |
|
"step": 412500 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 6.8127156749973854e-06, |
|
"loss": 0.4179, |
|
"step": 413000 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 6.7604308271462926e-06, |
|
"loss": 0.4095, |
|
"step": 413500 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 6.7081459792952005e-06, |
|
"loss": 0.4268, |
|
"step": 414000 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 6.655861131444108e-06, |
|
"loss": 0.4201, |
|
"step": 414500 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 6.603576283593015e-06, |
|
"loss": 0.4435, |
|
"step": 415000 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 6.551291435741923e-06, |
|
"loss": 0.4201, |
|
"step": 415500 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 6.49900658789083e-06, |
|
"loss": 0.4152, |
|
"step": 416000 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 6.446721740039736e-06, |
|
"loss": 0.4389, |
|
"step": 416500 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 6.394436892188645e-06, |
|
"loss": 0.4174, |
|
"step": 417000 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 6.342152044337551e-06, |
|
"loss": 0.4222, |
|
"step": 417500 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 6.289867196486458e-06, |
|
"loss": 0.4301, |
|
"step": 418000 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 6.2375823486353655e-06, |
|
"loss": 0.4279, |
|
"step": 418500 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 6.185297500784273e-06, |
|
"loss": 0.4346, |
|
"step": 419000 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 6.1330126529331806e-06, |
|
"loss": 0.4296, |
|
"step": 419500 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 6.080727805082088e-06, |
|
"loss": 0.4194, |
|
"step": 420000 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 6.028442957230995e-06, |
|
"loss": 0.412, |
|
"step": 420500 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 5.976158109379902e-06, |
|
"loss": 0.4128, |
|
"step": 421000 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 5.923873261528809e-06, |
|
"loss": 0.4182, |
|
"step": 421500 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 5.871588413677716e-06, |
|
"loss": 0.4338, |
|
"step": 422000 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 5.819303565826624e-06, |
|
"loss": 0.4509, |
|
"step": 422500 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 5.7670187179755304e-06, |
|
"loss": 0.4219, |
|
"step": 423000 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 5.714733870124438e-06, |
|
"loss": 0.4517, |
|
"step": 423500 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 5.6624490222733455e-06, |
|
"loss": 0.4208, |
|
"step": 424000 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 5.610164174422253e-06, |
|
"loss": 0.4269, |
|
"step": 424500 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 5.557879326571161e-06, |
|
"loss": 0.4002, |
|
"step": 425000 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 5.505594478720067e-06, |
|
"loss": 0.4041, |
|
"step": 425500 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 5.453309630868975e-06, |
|
"loss": 0.4152, |
|
"step": 426000 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 5.401024783017881e-06, |
|
"loss": 0.4241, |
|
"step": 426500 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 5.348739935166789e-06, |
|
"loss": 0.4164, |
|
"step": 427000 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 5.296455087315696e-06, |
|
"loss": 0.4242, |
|
"step": 427500 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 5.244170239464603e-06, |
|
"loss": 0.4456, |
|
"step": 428000 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 5.1918853916135105e-06, |
|
"loss": 0.4229, |
|
"step": 428500 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 5.139600543762418e-06, |
|
"loss": 0.4097, |
|
"step": 429000 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 5.0873156959113256e-06, |
|
"loss": 0.4169, |
|
"step": 429500 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 5.035030848060232e-06, |
|
"loss": 0.4466, |
|
"step": 430000 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 4.98274600020914e-06, |
|
"loss": 0.4345, |
|
"step": 430500 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 4.930461152358047e-06, |
|
"loss": 0.4037, |
|
"step": 431000 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 4.878176304506954e-06, |
|
"loss": 0.4221, |
|
"step": 431500 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 4.825891456655861e-06, |
|
"loss": 0.3757, |
|
"step": 432000 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 4.773606608804768e-06, |
|
"loss": 0.4079, |
|
"step": 432500 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 4.721321760953676e-06, |
|
"loss": 0.4045, |
|
"step": 433000 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 4.669036913102583e-06, |
|
"loss": 0.4178, |
|
"step": 433500 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 4.6167520652514905e-06, |
|
"loss": 0.404, |
|
"step": 434000 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 4.564467217400398e-06, |
|
"loss": 0.4363, |
|
"step": 434500 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 4.512182369549305e-06, |
|
"loss": 0.4086, |
|
"step": 435000 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 4.459897521698212e-06, |
|
"loss": 0.4141, |
|
"step": 435500 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 4.40761267384712e-06, |
|
"loss": 0.4174, |
|
"step": 436000 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 4.355327825996026e-06, |
|
"loss": 0.4218, |
|
"step": 436500 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 4.303042978144934e-06, |
|
"loss": 0.404, |
|
"step": 437000 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 4.250758130293841e-06, |
|
"loss": 0.415, |
|
"step": 437500 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 4.198473282442748e-06, |
|
"loss": 0.4141, |
|
"step": 438000 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 4.1461884345916555e-06, |
|
"loss": 0.3927, |
|
"step": 438500 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 4.0939035867405626e-06, |
|
"loss": 0.4302, |
|
"step": 439000 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 4.0416187388894705e-06, |
|
"loss": 0.4193, |
|
"step": 439500 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 3.989333891038377e-06, |
|
"loss": 0.4089, |
|
"step": 440000 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 3.937049043187285e-06, |
|
"loss": 0.4003, |
|
"step": 440500 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 3.884764195336191e-06, |
|
"loss": 0.4011, |
|
"step": 441000 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 3.832479347485099e-06, |
|
"loss": 0.406, |
|
"step": 441500 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 3.7801944996340066e-06, |
|
"loss": 0.416, |
|
"step": 442000 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 3.7279096517829133e-06, |
|
"loss": 0.4303, |
|
"step": 442500 |
|
}, |
|
{ |
|
"epoch": 9.26, |
|
"learning_rate": 3.675624803931821e-06, |
|
"loss": 0.3928, |
|
"step": 443000 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 3.6233399560807275e-06, |
|
"loss": 0.4058, |
|
"step": 443500 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 3.571055108229635e-06, |
|
"loss": 0.4145, |
|
"step": 444000 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 3.5187702603785426e-06, |
|
"loss": 0.405, |
|
"step": 444500 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 3.4664854125274497e-06, |
|
"loss": 0.4074, |
|
"step": 445000 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 3.4142005646763573e-06, |
|
"loss": 0.4068, |
|
"step": 445500 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 3.361915716825264e-06, |
|
"loss": 0.4059, |
|
"step": 446000 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 3.3096308689741715e-06, |
|
"loss": 0.4371, |
|
"step": 446500 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 3.257346021123079e-06, |
|
"loss": 0.4203, |
|
"step": 447000 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 3.2050611732719858e-06, |
|
"loss": 0.407, |
|
"step": 447500 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 3.1527763254208933e-06, |
|
"loss": 0.4107, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 3.1004914775698004e-06, |
|
"loss": 0.4242, |
|
"step": 448500 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 3.0482066297187076e-06, |
|
"loss": 0.4185, |
|
"step": 449000 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 2.9959217818676147e-06, |
|
"loss": 0.4028, |
|
"step": 449500 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 2.9436369340165222e-06, |
|
"loss": 0.3876, |
|
"step": 450000 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 2.8913520861654294e-06, |
|
"loss": 0.3924, |
|
"step": 450500 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 2.839067238314337e-06, |
|
"loss": 0.4232, |
|
"step": 451000 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 2.786782390463244e-06, |
|
"loss": 0.4339, |
|
"step": 451500 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 2.734497542612151e-06, |
|
"loss": 0.4155, |
|
"step": 452000 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 2.6822126947610583e-06, |
|
"loss": 0.4212, |
|
"step": 452500 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 2.6299278469099654e-06, |
|
"loss": 0.4205, |
|
"step": 453000 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 2.577642999058873e-06, |
|
"loss": 0.4055, |
|
"step": 453500 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 2.52535815120778e-06, |
|
"loss": 0.4073, |
|
"step": 454000 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 2.4730733033566876e-06, |
|
"loss": 0.4086, |
|
"step": 454500 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 2.4207884555055947e-06, |
|
"loss": 0.4472, |
|
"step": 455000 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 2.368503607654502e-06, |
|
"loss": 0.4278, |
|
"step": 455500 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 2.316218759803409e-06, |
|
"loss": 0.4158, |
|
"step": 456000 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 2.2639339119523165e-06, |
|
"loss": 0.4119, |
|
"step": 456500 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 2.2116490641012236e-06, |
|
"loss": 0.3938, |
|
"step": 457000 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 2.1593642162501308e-06, |
|
"loss": 0.4163, |
|
"step": 457500 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 2.107079368399038e-06, |
|
"loss": 0.3945, |
|
"step": 458000 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.054794520547945e-06, |
|
"loss": 0.4338, |
|
"step": 458500 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 2.0025096726968526e-06, |
|
"loss": 0.4274, |
|
"step": 459000 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 1.95022482484576e-06, |
|
"loss": 0.4125, |
|
"step": 459500 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 1.897939976994667e-06, |
|
"loss": 0.4229, |
|
"step": 460000 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 1.8456551291435743e-06, |
|
"loss": 0.4063, |
|
"step": 460500 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 1.7933702812924815e-06, |
|
"loss": 0.4322, |
|
"step": 461000 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 1.7410854334413886e-06, |
|
"loss": 0.41, |
|
"step": 461500 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 1.6888005855902961e-06, |
|
"loss": 0.3928, |
|
"step": 462000 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 1.6365157377392033e-06, |
|
"loss": 0.4201, |
|
"step": 462500 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 1.5842308898881106e-06, |
|
"loss": 0.4173, |
|
"step": 463000 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 1.5319460420370177e-06, |
|
"loss": 0.417, |
|
"step": 463500 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 1.479661194185925e-06, |
|
"loss": 0.3899, |
|
"step": 464000 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 1.4273763463348322e-06, |
|
"loss": 0.419, |
|
"step": 464500 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 1.3750914984837395e-06, |
|
"loss": 0.4127, |
|
"step": 465000 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 1.3228066506326468e-06, |
|
"loss": 0.4289, |
|
"step": 465500 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 1.270521802781554e-06, |
|
"loss": 0.4348, |
|
"step": 466000 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 1.218236954930461e-06, |
|
"loss": 0.3801, |
|
"step": 466500 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 1.1659521070793684e-06, |
|
"loss": 0.4107, |
|
"step": 467000 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 1.1136672592282758e-06, |
|
"loss": 0.4241, |
|
"step": 467500 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 1.061382411377183e-06, |
|
"loss": 0.4128, |
|
"step": 468000 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 1.0090975635260902e-06, |
|
"loss": 0.4336, |
|
"step": 468500 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 9.568127156749973e-07, |
|
"loss": 0.4109, |
|
"step": 469000 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 9.045278678239048e-07, |
|
"loss": 0.3986, |
|
"step": 469500 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 8.522430199728119e-07, |
|
"loss": 0.4232, |
|
"step": 470000 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 7.999581721217191e-07, |
|
"loss": 0.4168, |
|
"step": 470500 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 7.476733242706265e-07, |
|
"loss": 0.4261, |
|
"step": 471000 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 6.953884764195336e-07, |
|
"loss": 0.399, |
|
"step": 471500 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 6.431036285684409e-07, |
|
"loss": 0.416, |
|
"step": 472000 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 5.908187807173481e-07, |
|
"loss": 0.4144, |
|
"step": 472500 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 5.385339328662554e-07, |
|
"loss": 0.3898, |
|
"step": 473000 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 4.862490850151626e-07, |
|
"loss": 0.392, |
|
"step": 473500 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 4.339642371640699e-07, |
|
"loss": 0.425, |
|
"step": 474000 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 3.816793893129771e-07, |
|
"loss": 0.4121, |
|
"step": 474500 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 3.293945414618844e-07, |
|
"loss": 0.4141, |
|
"step": 475000 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 2.771096936107916e-07, |
|
"loss": 0.4202, |
|
"step": 475500 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 2.2482484575969887e-07, |
|
"loss": 0.4116, |
|
"step": 476000 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 1.725399979086061e-07, |
|
"loss": 0.4211, |
|
"step": 476500 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 1.2025515005751333e-07, |
|
"loss": 0.3955, |
|
"step": 477000 |
|
}, |
|
{ |
|
"epoch": 9.99, |
|
"learning_rate": 6.797030220642058e-08, |
|
"loss": 0.4151, |
|
"step": 477500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 1.5685454355327827e-08, |
|
"loss": 0.4201, |
|
"step": 478000 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 478150, |
|
"total_flos": 2.0234881444387553e+18, |
|
"train_runtime": 143076.1453, |
|
"train_samples_per_second": 3.342 |
|
} |
|
], |
|
"max_steps": 478150, |
|
"num_train_epochs": 10, |
|
"total_flos": 2.0234881444387553e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|