|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 228315, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.989050215710751e-05, |
|
"loss": 3.3993, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.978100431421501e-05, |
|
"loss": 2.3797, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9671506471322515e-05, |
|
"loss": 2.0579, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9562008628430026e-05, |
|
"loss": 1.8511, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9452510785537524e-05, |
|
"loss": 1.7398, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9343012942645035e-05, |
|
"loss": 1.6751, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.923351509975254e-05, |
|
"loss": 1.588, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9124017256860043e-05, |
|
"loss": 1.5222, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.901451941396755e-05, |
|
"loss": 1.4736, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.890502157107505e-05, |
|
"loss": 1.426, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8795523728182556e-05, |
|
"loss": 1.3883, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.868602588529006e-05, |
|
"loss": 1.3851, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8576528042397565e-05, |
|
"loss": 1.3285, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.846703019950507e-05, |
|
"loss": 1.3257, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.835753235661258e-05, |
|
"loss": 1.3013, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.824803451372008e-05, |
|
"loss": 1.2689, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.813853667082759e-05, |
|
"loss": 1.23, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.802903882793509e-05, |
|
"loss": 1.2311, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.79195409850426e-05, |
|
"loss": 1.2157, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.78100431421501e-05, |
|
"loss": 1.215, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.770054529925761e-05, |
|
"loss": 1.1977, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.759104745636511e-05, |
|
"loss": 1.1763, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7481549613472614e-05, |
|
"loss": 1.1651, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7372051770580125e-05, |
|
"loss": 1.1662, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.726255392768762e-05, |
|
"loss": 1.1588, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.7153056084795134e-05, |
|
"loss": 1.1328, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.704355824190264e-05, |
|
"loss": 1.131, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.693406039901014e-05, |
|
"loss": 1.1043, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.682456255611765e-05, |
|
"loss": 1.115, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.671506471322515e-05, |
|
"loss": 1.0963, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6605566870332655e-05, |
|
"loss": 1.0741, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.649606902744016e-05, |
|
"loss": 1.0785, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6386571184547664e-05, |
|
"loss": 1.0727, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.627707334165517e-05, |
|
"loss": 1.062, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.616757549876268e-05, |
|
"loss": 1.0622, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.605807765587018e-05, |
|
"loss": 1.0508, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.594857981297769e-05, |
|
"loss": 1.0475, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.583908197008519e-05, |
|
"loss": 1.0473, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5729584127192696e-05, |
|
"loss": 1.0391, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.56200862843002e-05, |
|
"loss": 1.0312, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.551058844140771e-05, |
|
"loss": 1.0299, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.540109059851521e-05, |
|
"loss": 1.0074, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5291592755622713e-05, |
|
"loss": 1.0091, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5182094912730225e-05, |
|
"loss": 1.0185, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.507259706983772e-05, |
|
"loss": 1.0254, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.496309922694523e-05, |
|
"loss": 0.9965, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.485360138405274e-05, |
|
"loss": 0.9907, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.474410354116024e-05, |
|
"loss": 0.9807, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4634605698267746e-05, |
|
"loss": 0.9948, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.452510785537525e-05, |
|
"loss": 0.9755, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4415610012482754e-05, |
|
"loss": 0.9694, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4306112169590266e-05, |
|
"loss": 0.9679, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.419661432669776e-05, |
|
"loss": 0.9725, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.408711648380527e-05, |
|
"loss": 0.968, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.397761864091278e-05, |
|
"loss": 0.9499, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3868120798020276e-05, |
|
"loss": 0.9607, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.375862295512779e-05, |
|
"loss": 0.9559, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.364912511223529e-05, |
|
"loss": 0.95, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3539627269342795e-05, |
|
"loss": 0.9471, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.34301294264503e-05, |
|
"loss": 0.9421, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.332063158355781e-05, |
|
"loss": 0.9585, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.321113374066531e-05, |
|
"loss": 0.9294, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.310163589777282e-05, |
|
"loss": 0.9322, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2992138054880324e-05, |
|
"loss": 0.9391, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.288264021198782e-05, |
|
"loss": 0.9266, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.277314236909533e-05, |
|
"loss": 0.9237, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2663644526202836e-05, |
|
"loss": 0.9295, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.255414668331034e-05, |
|
"loss": 0.9301, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2444648840417845e-05, |
|
"loss": 0.9313, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.233515099752535e-05, |
|
"loss": 0.9167, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2225653154632854e-05, |
|
"loss": 0.9261, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2116155311740365e-05, |
|
"loss": 0.9047, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.200665746884786e-05, |
|
"loss": 0.9168, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.189715962595537e-05, |
|
"loss": 0.9028, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.178766178306288e-05, |
|
"loss": 0.9196, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.1678163940170375e-05, |
|
"loss": 0.8912, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1568666097277886e-05, |
|
"loss": 0.8921, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.145916825438539e-05, |
|
"loss": 0.8876, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1349670411492895e-05, |
|
"loss": 0.8979, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.12401725686004e-05, |
|
"loss": 0.8922, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.113067472570791e-05, |
|
"loss": 0.901, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.102117688281541e-05, |
|
"loss": 0.902, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.091167903992292e-05, |
|
"loss": 0.8889, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.080218119703042e-05, |
|
"loss": 0.8792, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.069268335413793e-05, |
|
"loss": 0.8885, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.058318551124543e-05, |
|
"loss": 0.8766, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0473687668352936e-05, |
|
"loss": 0.8659, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.036418982546044e-05, |
|
"loss": 0.8783, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0254691982567944e-05, |
|
"loss": 0.8667, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.014519413967545e-05, |
|
"loss": 0.8818, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.003569629678295e-05, |
|
"loss": 0.8583, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9926198453890464e-05, |
|
"loss": 0.8621, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.981670061099796e-05, |
|
"loss": 0.8712, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.970720276810547e-05, |
|
"loss": 0.8714, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9597704925212977e-05, |
|
"loss": 0.8653, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.948820708232048e-05, |
|
"loss": 0.8444, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.9378709239427985e-05, |
|
"loss": 0.8528, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.926921139653549e-05, |
|
"loss": 0.8345, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9159713553642994e-05, |
|
"loss": 0.8529, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.90502157107505e-05, |
|
"loss": 0.8392, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.894071786785801e-05, |
|
"loss": 0.8466, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8831220024965507e-05, |
|
"loss": 0.8285, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.872172218207302e-05, |
|
"loss": 0.8471, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.861222433918052e-05, |
|
"loss": 0.8435, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8502726496288026e-05, |
|
"loss": 0.8534, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.839322865339553e-05, |
|
"loss": 0.8354, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8283730810503035e-05, |
|
"loss": 0.839, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.817423296761054e-05, |
|
"loss": 0.8225, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.806473512471804e-05, |
|
"loss": 0.8385, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.795523728182555e-05, |
|
"loss": 0.8514, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.784573943893305e-05, |
|
"loss": 0.8337, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.773624159604056e-05, |
|
"loss": 0.8305, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.762674375314806e-05, |
|
"loss": 0.83, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.751724591025557e-05, |
|
"loss": 0.8237, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.7407748067363076e-05, |
|
"loss": 0.8267, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.729825022447058e-05, |
|
"loss": 0.8254, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7188752381578084e-05, |
|
"loss": 0.8202, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.707925453868559e-05, |
|
"loss": 0.8276, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.696975669579309e-05, |
|
"loss": 0.8158, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.68602588529006e-05, |
|
"loss": 0.8193, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.675076101000811e-05, |
|
"loss": 0.8144, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6641263167115606e-05, |
|
"loss": 0.8105, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.653176532422312e-05, |
|
"loss": 0.8044, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.642226748133062e-05, |
|
"loss": 0.8026, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6312769638438125e-05, |
|
"loss": 0.8094, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.620327179554563e-05, |
|
"loss": 0.8001, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.6093773952653134e-05, |
|
"loss": 0.8104, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.598427610976064e-05, |
|
"loss": 0.8184, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.587477826686814e-05, |
|
"loss": 0.8181, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.576528042397565e-05, |
|
"loss": 0.7975, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.565578258108315e-05, |
|
"loss": 0.8061, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.554628473819066e-05, |
|
"loss": 0.7999, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.5436786895298166e-05, |
|
"loss": 0.7834, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.532728905240567e-05, |
|
"loss": 0.8048, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5217791209513175e-05, |
|
"loss": 0.7945, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.510829336662068e-05, |
|
"loss": 0.7981, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.4998795523728183e-05, |
|
"loss": 0.7879, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4889297680835694e-05, |
|
"loss": 0.802, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.477979983794319e-05, |
|
"loss": 0.7892, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4670301995050696e-05, |
|
"loss": 0.7863, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.456080415215821e-05, |
|
"loss": 0.7839, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4451306309265705e-05, |
|
"loss": 0.7878, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4341808466373216e-05, |
|
"loss": 0.7682, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.423231062348072e-05, |
|
"loss": 0.7871, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4122812780588224e-05, |
|
"loss": 0.8045, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.401331493769573e-05, |
|
"loss": 0.7681, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.390381709480323e-05, |
|
"loss": 0.7796, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.379431925191074e-05, |
|
"loss": 0.785, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.368482140901824e-05, |
|
"loss": 0.7655, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.357532356612575e-05, |
|
"loss": 0.7747, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.346582572323325e-05, |
|
"loss": 0.7753, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.335632788034076e-05, |
|
"loss": 0.7705, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3246830037448265e-05, |
|
"loss": 0.7246, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.313733219455577e-05, |
|
"loss": 0.7176, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3027834351663274e-05, |
|
"loss": 0.7044, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.291833650877078e-05, |
|
"loss": 0.7052, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.280883866587828e-05, |
|
"loss": 0.7027, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2699340822985794e-05, |
|
"loss": 0.697, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.258984298009329e-05, |
|
"loss": 0.7019, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2480345137200795e-05, |
|
"loss": 0.6969, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2370847294308306e-05, |
|
"loss": 0.7061, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2261349451415804e-05, |
|
"loss": 0.6972, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.2151851608523315e-05, |
|
"loss": 0.6854, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.204235376563082e-05, |
|
"loss": 0.6884, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.1932855922738324e-05, |
|
"loss": 0.704, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.182335807984583e-05, |
|
"loss": 0.7074, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.171386023695334e-05, |
|
"loss": 0.7139, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1604362394060836e-05, |
|
"loss": 0.6983, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.149486455116835e-05, |
|
"loss": 0.6929, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.138536670827585e-05, |
|
"loss": 0.6915, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.127586886538335e-05, |
|
"loss": 0.7004, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.116637102249086e-05, |
|
"loss": 0.6891, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.1056873179598365e-05, |
|
"loss": 0.7021, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.094737533670587e-05, |
|
"loss": 0.6856, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.083787749381337e-05, |
|
"loss": 0.7055, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.072837965092088e-05, |
|
"loss": 0.6814, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.061888180802838e-05, |
|
"loss": 0.6953, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.050938396513589e-05, |
|
"loss": 0.6996, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0399886122243394e-05, |
|
"loss": 0.6818, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.02903882793509e-05, |
|
"loss": 0.6958, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0180890436458402e-05, |
|
"loss": 0.6873, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0071392593565906e-05, |
|
"loss": 0.6942, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9961894750673414e-05, |
|
"loss": 0.6985, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9852396907780915e-05, |
|
"loss": 0.6741, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9742899064888423e-05, |
|
"loss": 0.692, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9633401221995927e-05, |
|
"loss": 0.6888, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9523903379103435e-05, |
|
"loss": 0.6871, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.941440553621094e-05, |
|
"loss": 0.6827, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9304907693318447e-05, |
|
"loss": 0.6914, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9195409850425947e-05, |
|
"loss": 0.7024, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9085912007533455e-05, |
|
"loss": 0.6818, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.897641416464096e-05, |
|
"loss": 0.6865, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.886691632174846e-05, |
|
"loss": 0.6911, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8757418478855968e-05, |
|
"loss": 0.6765, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8647920635963472e-05, |
|
"loss": 0.6815, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.853842279307098e-05, |
|
"loss": 0.693, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.842892495017848e-05, |
|
"loss": 0.6775, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.831942710728599e-05, |
|
"loss": 0.6855, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8209929264393493e-05, |
|
"loss": 0.6839, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8100431421501e-05, |
|
"loss": 0.6716, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.79909335786085e-05, |
|
"loss": 0.6725, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7881435735716006e-05, |
|
"loss": 0.6747, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7771937892823513e-05, |
|
"loss": 0.6954, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7662440049931014e-05, |
|
"loss": 0.6846, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7552942207038525e-05, |
|
"loss": 0.6812, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7443444364146026e-05, |
|
"loss": 0.6867, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7333946521253534e-05, |
|
"loss": 0.6763, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7224448678361038e-05, |
|
"loss": 0.6709, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7114950835468546e-05, |
|
"loss": 0.681, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.7005452992576047e-05, |
|
"loss": 0.6749, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6895955149683554e-05, |
|
"loss": 0.6754, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.678645730679106e-05, |
|
"loss": 0.6796, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.667695946389856e-05, |
|
"loss": 0.6686, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6567461621006067e-05, |
|
"loss": 0.6747, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.645796377811357e-05, |
|
"loss": 0.6749, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.634846593522108e-05, |
|
"loss": 0.6705, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.623896809232858e-05, |
|
"loss": 0.6768, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6129470249436088e-05, |
|
"loss": 0.6705, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6019972406543592e-05, |
|
"loss": 0.666, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.59104745636511e-05, |
|
"loss": 0.6593, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.58009767207586e-05, |
|
"loss": 0.674, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5691478877866108e-05, |
|
"loss": 0.6629, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5581981034973612e-05, |
|
"loss": 0.676, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5472483192081113e-05, |
|
"loss": 0.6726, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5362985349188624e-05, |
|
"loss": 0.673, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5253487506296125e-05, |
|
"loss": 0.6647, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5143989663403633e-05, |
|
"loss": 0.6673, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5034491820511137e-05, |
|
"loss": 0.6712, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.492499397761864e-05, |
|
"loss": 0.6691, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4815496134726146e-05, |
|
"loss": 0.676, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.470599829183365e-05, |
|
"loss": 0.6632, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4596500448941158e-05, |
|
"loss": 0.6691, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4487002606048662e-05, |
|
"loss": 0.6546, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4377504763156166e-05, |
|
"loss": 0.6648, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4268006920263674e-05, |
|
"loss": 0.6516, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4158509077371178e-05, |
|
"loss": 0.6646, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4049011234478682e-05, |
|
"loss": 0.6734, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3939513391586187e-05, |
|
"loss": 0.6593, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3830015548693694e-05, |
|
"loss": 0.6588, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3720517705801195e-05, |
|
"loss": 0.6703, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.36110198629087e-05, |
|
"loss": 0.6453, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3501522020016207e-05, |
|
"loss": 0.6664, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.339202417712371e-05, |
|
"loss": 0.6475, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3282526334231216e-05, |
|
"loss": 0.6472, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3173028491338723e-05, |
|
"loss": 0.6458, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3063530648446228e-05, |
|
"loss": 0.6548, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.2954032805553732e-05, |
|
"loss": 0.6606, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2844534962661236e-05, |
|
"loss": 0.6513, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2735037119768744e-05, |
|
"loss": 0.6618, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2625539276876245e-05, |
|
"loss": 0.6514, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.251604143398375e-05, |
|
"loss": 0.6496, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2406543591091257e-05, |
|
"loss": 0.6475, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.229704574819876e-05, |
|
"loss": 0.6481, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2187547905306265e-05, |
|
"loss": 0.6553, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2078050062413773e-05, |
|
"loss": 0.6493, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.1968552219521277e-05, |
|
"loss": 0.6519, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.185905437662878e-05, |
|
"loss": 0.6516, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1749556533736286e-05, |
|
"loss": 0.6335, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1640058690843793e-05, |
|
"loss": 0.6562, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1530560847951298e-05, |
|
"loss": 0.6515, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.14210630050588e-05, |
|
"loss": 0.6519, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1311565162166306e-05, |
|
"loss": 0.6459, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.120206731927381e-05, |
|
"loss": 0.6395, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1092569476381315e-05, |
|
"loss": 0.636, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0983071633488823e-05, |
|
"loss": 0.6371, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0873573790596327e-05, |
|
"loss": 0.6407, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.076407594770383e-05, |
|
"loss": 0.6411, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0654578104811335e-05, |
|
"loss": 0.6305, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0545080261918843e-05, |
|
"loss": 0.638, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0435582419026347e-05, |
|
"loss": 0.6374, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0326084576133848e-05, |
|
"loss": 0.6438, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0216586733241356e-05, |
|
"loss": 0.6386, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.010708889034886e-05, |
|
"loss": 0.635, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.9997591047456364e-05, |
|
"loss": 0.644, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9888093204563872e-05, |
|
"loss": 0.6425, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9778595361671376e-05, |
|
"loss": 0.644, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.966909751877888e-05, |
|
"loss": 0.633, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9559599675886385e-05, |
|
"loss": 0.634, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9450101832993893e-05, |
|
"loss": 0.6326, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9340603990101397e-05, |
|
"loss": 0.6342, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.92311061472089e-05, |
|
"loss": 0.6322, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9121608304316405e-05, |
|
"loss": 0.6271, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.901211046142391e-05, |
|
"loss": 0.6322, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8902612618531414e-05, |
|
"loss": 0.6252, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.879311477563892e-05, |
|
"loss": 0.6372, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8683616932746426e-05, |
|
"loss": 0.629, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.857411908985393e-05, |
|
"loss": 0.6364, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8464621246961434e-05, |
|
"loss": 0.6246, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8355123404068942e-05, |
|
"loss": 0.6383, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8245625561176446e-05, |
|
"loss": 0.6194, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.813612771828395e-05, |
|
"loss": 0.628, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.802662987539146e-05, |
|
"loss": 0.6415, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.791713203249896e-05, |
|
"loss": 0.6312, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7807634189606464e-05, |
|
"loss": 0.6335, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.769813634671397e-05, |
|
"loss": 0.6393, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7588638503821475e-05, |
|
"loss": 0.6331, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.747914066092898e-05, |
|
"loss": 0.6232, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7369642818036484e-05, |
|
"loss": 0.6232, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7260144975143992e-05, |
|
"loss": 0.6184, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7150647132251496e-05, |
|
"loss": 0.6224, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7041149289359e-05, |
|
"loss": 0.6267, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6931651446466508e-05, |
|
"loss": 0.6205, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.682215360357401e-05, |
|
"loss": 0.6284, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6712655760681513e-05, |
|
"loss": 0.6135, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.660315791778902e-05, |
|
"loss": 0.5744, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6493660074896525e-05, |
|
"loss": 0.5435, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.638416223200403e-05, |
|
"loss": 0.5451, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6274664389111534e-05, |
|
"loss": 0.5414, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.616516654621904e-05, |
|
"loss": 0.5592, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6055668703326546e-05, |
|
"loss": 0.5435, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.594617086043405e-05, |
|
"loss": 0.539, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5836673017541557e-05, |
|
"loss": 0.5514, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5727175174649062e-05, |
|
"loss": 0.5431, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.5617677331756563e-05, |
|
"loss": 0.5465, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.550817948886407e-05, |
|
"loss": 0.5413, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5398681645971575e-05, |
|
"loss": 0.5478, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.528918380307908e-05, |
|
"loss": 0.5514, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5179685960186585e-05, |
|
"loss": 0.5491, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.507018811729409e-05, |
|
"loss": 0.543, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.4960690274401595e-05, |
|
"loss": 0.5492, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4851192431509101e-05, |
|
"loss": 0.5456, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4741694588616605e-05, |
|
"loss": 0.5318, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4632196745724111e-05, |
|
"loss": 0.5401, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4522698902831616e-05, |
|
"loss": 0.5485, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4413201059939118e-05, |
|
"loss": 0.5403, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4303703217046624e-05, |
|
"loss": 0.5438, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4194205374154128e-05, |
|
"loss": 0.5508, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4084707531261634e-05, |
|
"loss": 0.5468, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.397520968836914e-05, |
|
"loss": 0.5458, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3865711845476645e-05, |
|
"loss": 0.5391, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.375621400258415e-05, |
|
"loss": 0.5371, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3646716159691655e-05, |
|
"loss": 0.5578, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3537218316799161e-05, |
|
"loss": 0.5453, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3427720473906665e-05, |
|
"loss": 0.549, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3318222631014168e-05, |
|
"loss": 0.5402, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3208724788121674e-05, |
|
"loss": 0.5485, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3099226945229178e-05, |
|
"loss": 0.5424, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.2989729102336684e-05, |
|
"loss": 0.5416, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.288023125944419e-05, |
|
"loss": 0.5443, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2770733416551694e-05, |
|
"loss": 0.5434, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.26612355736592e-05, |
|
"loss": 0.5505, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2551737730766704e-05, |
|
"loss": 0.5425, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2442239887874209e-05, |
|
"loss": 0.5465, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2332742044981715e-05, |
|
"loss": 0.5428, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2223244202089219e-05, |
|
"loss": 0.5274, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2113746359196725e-05, |
|
"loss": 0.5355, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.200424851630423e-05, |
|
"loss": 0.536, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1894750673411735e-05, |
|
"loss": 0.535, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.178525283051924e-05, |
|
"loss": 0.5394, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1675754987626744e-05, |
|
"loss": 0.5428, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.156625714473425e-05, |
|
"loss": 0.534, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1456759301841754e-05, |
|
"loss": 0.5369, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.134726145894926e-05, |
|
"loss": 0.5296, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1237763616056764e-05, |
|
"loss": 0.5391, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1128265773164269e-05, |
|
"loss": 0.5324, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1018767930271775e-05, |
|
"loss": 0.5474, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0909270087379279e-05, |
|
"loss": 0.5328, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0799772244486785e-05, |
|
"loss": 0.5289, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0690274401594289e-05, |
|
"loss": 0.542, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0580776558701793e-05, |
|
"loss": 0.5307, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.04712787158093e-05, |
|
"loss": 0.5342, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0361780872916804e-05, |
|
"loss": 0.5404, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.025228303002431e-05, |
|
"loss": 0.5333, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0142785187131814e-05, |
|
"loss": 0.5283, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0033287344239318e-05, |
|
"loss": 0.5318, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.923789501346824e-06, |
|
"loss": 0.5371, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.814291658454328e-06, |
|
"loss": 0.5296, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.704793815561834e-06, |
|
"loss": 0.5281, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.59529597266934e-06, |
|
"loss": 0.5222, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.485798129776843e-06, |
|
"loss": 0.5345, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.376300286884349e-06, |
|
"loss": 0.5397, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.266802443991853e-06, |
|
"loss": 0.5283, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.157304601099359e-06, |
|
"loss": 0.5303, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.047806758206865e-06, |
|
"loss": 0.5374, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.938308915314368e-06, |
|
"loss": 0.5374, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.828811072421874e-06, |
|
"loss": 0.5311, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.719313229529378e-06, |
|
"loss": 0.5275, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.609815386636884e-06, |
|
"loss": 0.5344, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.50031754374439e-06, |
|
"loss": 0.534, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.390819700851892e-06, |
|
"loss": 0.5294, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.281321857959398e-06, |
|
"loss": 0.534, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.171824015066903e-06, |
|
"loss": 0.5225, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.062326172174409e-06, |
|
"loss": 0.5418, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.952828329281915e-06, |
|
"loss": 0.5368, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.843330486389419e-06, |
|
"loss": 0.5217, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.733832643496923e-06, |
|
"loss": 0.5265, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.624334800604428e-06, |
|
"loss": 0.5238, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.5148369577119335e-06, |
|
"loss": 0.5262, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.405339114819439e-06, |
|
"loss": 0.5293, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.295841271926944e-06, |
|
"loss": 0.5428, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.186343429034448e-06, |
|
"loss": 0.539, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.076845586141953e-06, |
|
"loss": 0.5341, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.967347743249458e-06, |
|
"loss": 0.53, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.857849900356963e-06, |
|
"loss": 0.5349, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.7483520574644685e-06, |
|
"loss": 0.5317, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.638854214571973e-06, |
|
"loss": 0.5134, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.529356371679478e-06, |
|
"loss": 0.5158, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.419858528786983e-06, |
|
"loss": 0.5242, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.310360685894488e-06, |
|
"loss": 0.53, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.200862843001993e-06, |
|
"loss": 0.5203, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.091365000109498e-06, |
|
"loss": 0.5177, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.9818671572170035e-06, |
|
"loss": 0.5282, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.872369314324508e-06, |
|
"loss": 0.5346, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.762871471432013e-06, |
|
"loss": 0.5176, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.653373628539518e-06, |
|
"loss": 0.5269, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.543875785647023e-06, |
|
"loss": 0.51, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.434377942754528e-06, |
|
"loss": 0.5168, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.324880099862033e-06, |
|
"loss": 0.526, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.215382256969538e-06, |
|
"loss": 0.5166, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.105884414077043e-06, |
|
"loss": 0.509, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.996386571184548e-06, |
|
"loss": 0.5235, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.886888728292053e-06, |
|
"loss": 0.5145, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.777390885399558e-06, |
|
"loss": 0.5147, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.6678930425070625e-06, |
|
"loss": 0.5306, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.558395199614568e-06, |
|
"loss": 0.5204, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.448897356722073e-06, |
|
"loss": 0.5216, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.339399513829578e-06, |
|
"loss": 0.5091, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.229901670937083e-06, |
|
"loss": 0.5138, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.120403828044587e-06, |
|
"loss": 0.5177, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.010905985152092e-06, |
|
"loss": 0.5238, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.901408142259598e-06, |
|
"loss": 0.5305, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.7919102993671026e-06, |
|
"loss": 0.5243, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6824124564746078e-06, |
|
"loss": 0.5042, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.5729146135821125e-06, |
|
"loss": 0.5218, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.4634167706896176e-06, |
|
"loss": 0.5204, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3539189277971227e-06, |
|
"loss": 0.5219, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.2444210849046274e-06, |
|
"loss": 0.5216, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1349232420121326e-06, |
|
"loss": 0.5129, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.0254253991196377e-06, |
|
"loss": 0.5211, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.9159275562271424e-06, |
|
"loss": 0.5137, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.8064297133346475e-06, |
|
"loss": 0.5144, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.6969318704421526e-06, |
|
"loss": 0.5143, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.5874340275496573e-06, |
|
"loss": 0.5239, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.4779361846571625e-06, |
|
"loss": 0.5175, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.3684383417646676e-06, |
|
"loss": 0.5144, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.2589404988721723e-06, |
|
"loss": 0.5212, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1494426559796774e-06, |
|
"loss": 0.518, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.039944813087182e-06, |
|
"loss": 0.5135, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.9304469701946872e-06, |
|
"loss": 0.5264, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8209491273021924e-06, |
|
"loss": 0.5051, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.7114512844096973e-06, |
|
"loss": 0.5202, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6019534415172022e-06, |
|
"loss": 0.5173, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.492455598624707e-06, |
|
"loss": 0.5164, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.382957755732212e-06, |
|
"loss": 0.5071, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.2734599128397171e-06, |
|
"loss": 0.5152, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.163962069947222e-06, |
|
"loss": 0.5046, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.054464227054727e-06, |
|
"loss": 0.5181, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.44966384162232e-07, |
|
"loss": 0.5052, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.35468541269737e-07, |
|
"loss": 0.509, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.25970698377242e-07, |
|
"loss": 0.5114, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.164728554847469e-07, |
|
"loss": 0.514, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.06975012592252e-07, |
|
"loss": 0.5077, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.9747716969975693e-07, |
|
"loss": 0.5119, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.879793268072619e-07, |
|
"loss": 0.5088, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.784814839147669e-07, |
|
"loss": 0.5137, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 6.898364102227187e-08, |
|
"loss": 0.5164, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 228315, |
|
"total_flos": 2.563380766380196e+17, |
|
"train_loss": 0.7300223506724237, |
|
"train_runtime": 64975.7743, |
|
"train_samples_per_second": 35.138, |
|
"train_steps_per_second": 3.514 |
|
} |
|
], |
|
"max_steps": 228315, |
|
"num_train_epochs": 3, |
|
"total_flos": 2.563380766380196e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|