|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.5696761816173477, |
|
"global_step": 756000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998269757295396e-05, |
|
"loss": 2.8529, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996539514590791e-05, |
|
"loss": 2.3953, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994809271886186e-05, |
|
"loss": 2.0905, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993079029181581e-05, |
|
"loss": 1.672, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.991348786476977e-05, |
|
"loss": 1.4671, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.989618543772373e-05, |
|
"loss": 1.3692, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.987888301067767e-05, |
|
"loss": 1.3064, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.986158058363163e-05, |
|
"loss": 1.2556, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.984427815658558e-05, |
|
"loss": 1.2133, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.982697572953954e-05, |
|
"loss": 1.1793, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9809673302493495e-05, |
|
"loss": 1.1548, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.979237087544744e-05, |
|
"loss": 1.1329, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.97750684484014e-05, |
|
"loss": 1.1147, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9757766021355354e-05, |
|
"loss": 1.0988, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9740463594309305e-05, |
|
"loss": 1.0875, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9723161167263256e-05, |
|
"loss": 1.074, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9705858740217207e-05, |
|
"loss": 1.062, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9688556313171164e-05, |
|
"loss": 1.0505, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.967125388612512e-05, |
|
"loss": 1.0386, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9653951459079066e-05, |
|
"loss": 1.0334, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.963664903203302e-05, |
|
"loss": 1.0225, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9619346604986974e-05, |
|
"loss": 1.0151, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.960204417794093e-05, |
|
"loss": 1.0073, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.958474175089489e-05, |
|
"loss": 0.9995, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.956743932384883e-05, |
|
"loss": 0.9931, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.955013689680279e-05, |
|
"loss": 0.987, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.953283446975674e-05, |
|
"loss": 0.9855, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.95155320427107e-05, |
|
"loss": 0.9781, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.949822961566465e-05, |
|
"loss": 0.975, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.94809271886186e-05, |
|
"loss": 0.9656, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.946362476157256e-05, |
|
"loss": 0.9622, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.944632233452651e-05, |
|
"loss": 0.9595, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.942901990748047e-05, |
|
"loss": 0.9548, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.941171748043442e-05, |
|
"loss": 0.9504, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.939441505338837e-05, |
|
"loss": 0.946, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9377112626342326e-05, |
|
"loss": 0.9376, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9359810199296284e-05, |
|
"loss": 0.9376, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.934250777225023e-05, |
|
"loss": 0.9326, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9325205345204185e-05, |
|
"loss": 0.9279, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9307902918158136e-05, |
|
"loss": 0.9285, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9290600491112093e-05, |
|
"loss": 0.9256, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9273298064066044e-05, |
|
"loss": 0.9209, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9255995637019995e-05, |
|
"loss": 0.9163, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.923869320997395e-05, |
|
"loss": 0.9134, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9221390782927903e-05, |
|
"loss": 0.9129, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.920408835588186e-05, |
|
"loss": 0.9094, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.918678592883581e-05, |
|
"loss": 0.9038, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.916948350178976e-05, |
|
"loss": 0.9096, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.915218107474372e-05, |
|
"loss": 0.9048, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.913487864769767e-05, |
|
"loss": 0.8989, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.911757622065162e-05, |
|
"loss": 0.8963, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.910027379360558e-05, |
|
"loss": 0.8994, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.908297136655953e-05, |
|
"loss": 0.8913, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.906566893951349e-05, |
|
"loss": 0.8912, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.904836651246744e-05, |
|
"loss": 0.8877, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.903106408542139e-05, |
|
"loss": 0.8891, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.901376165837535e-05, |
|
"loss": 0.8837, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.89964592313293e-05, |
|
"loss": 0.8786, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.8979156804283255e-05, |
|
"loss": 0.8785, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.8961854377237206e-05, |
|
"loss": 0.8798, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.894455195019116e-05, |
|
"loss": 0.8761, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.8927249523145114e-05, |
|
"loss": 0.8739, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8909947096099065e-05, |
|
"loss": 0.8718, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8892644669053016e-05, |
|
"loss": 0.8711, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8875342242006974e-05, |
|
"loss": 0.8694, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8858039814960924e-05, |
|
"loss": 0.8676, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.884073738791488e-05, |
|
"loss": 0.865, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.882343496086883e-05, |
|
"loss": 0.8607, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8806132533822784e-05, |
|
"loss": 0.859, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.878883010677674e-05, |
|
"loss": 0.863, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.877152767973069e-05, |
|
"loss": 0.8618, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.875422525268465e-05, |
|
"loss": 0.8585, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.87369228256386e-05, |
|
"loss": 0.8588, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.871962039859255e-05, |
|
"loss": 0.8581, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.870231797154651e-05, |
|
"loss": 0.8541, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.868501554450046e-05, |
|
"loss": 0.8515, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.866771311745442e-05, |
|
"loss": 0.8512, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.865041069040837e-05, |
|
"loss": 0.8508, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.863310826336232e-05, |
|
"loss": 0.8493, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.8615805836316276e-05, |
|
"loss": 0.8456, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.859850340927023e-05, |
|
"loss": 0.845, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.858120098222418e-05, |
|
"loss": 0.8439, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8563898555178135e-05, |
|
"loss": 0.843, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8546596128132086e-05, |
|
"loss": 0.8414, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8529293701086044e-05, |
|
"loss": 0.8401, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8511991274039995e-05, |
|
"loss": 0.8395, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8494688846993945e-05, |
|
"loss": 0.8404, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.84773864199479e-05, |
|
"loss": 0.8395, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8460083992901854e-05, |
|
"loss": 0.8342, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.844278156585581e-05, |
|
"loss": 0.837, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.842547913880976e-05, |
|
"loss": 0.8376, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.840817671176371e-05, |
|
"loss": 0.8338, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.839087428471767e-05, |
|
"loss": 0.829, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.837357185767162e-05, |
|
"loss": 0.831, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.835626943062557e-05, |
|
"loss": 0.8312, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.833896700357953e-05, |
|
"loss": 0.8275, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.832166457653348e-05, |
|
"loss": 0.8284, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.830436214948744e-05, |
|
"loss": 0.8291, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.828705972244139e-05, |
|
"loss": 0.8278, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.826975729539534e-05, |
|
"loss": 0.8224, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.82524548683493e-05, |
|
"loss": 0.8266, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.823515244130325e-05, |
|
"loss": 0.8224, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8217850014257206e-05, |
|
"loss": 0.8193, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8200547587211156e-05, |
|
"loss": 0.8193, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.818324516016511e-05, |
|
"loss": 0.824, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8165942733119065e-05, |
|
"loss": 0.8245, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8148640306073015e-05, |
|
"loss": 0.8204, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8131337879026966e-05, |
|
"loss": 0.8217, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8114035451980924e-05, |
|
"loss": 0.8162, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8096733024934875e-05, |
|
"loss": 0.8187, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.807943059788883e-05, |
|
"loss": 0.8161, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.806212817084278e-05, |
|
"loss": 0.8158, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.8044825743796734e-05, |
|
"loss": 0.815, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.802752331675069e-05, |
|
"loss": 0.8144, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.801022088970464e-05, |
|
"loss": 0.8111, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.79929184626586e-05, |
|
"loss": 0.8122, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.797561603561255e-05, |
|
"loss": 0.8077, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.79583136085665e-05, |
|
"loss": 0.8111, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.794101118152046e-05, |
|
"loss": 0.8113, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.792370875447441e-05, |
|
"loss": 0.81, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.790640632742837e-05, |
|
"loss": 0.8167, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.788910390038232e-05, |
|
"loss": 0.8092, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.787180147333627e-05, |
|
"loss": 0.8052, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7854499046290226e-05, |
|
"loss": 0.8035, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.783719661924418e-05, |
|
"loss": 0.8066, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.781989419219813e-05, |
|
"loss": 0.8046, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7802591765152086e-05, |
|
"loss": 0.8043, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7785289338106036e-05, |
|
"loss": 0.8023, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7767986911059994e-05, |
|
"loss": 0.8048, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.775068448401394e-05, |
|
"loss": 0.8039, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7733382056967896e-05, |
|
"loss": 0.8056, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.771607962992185e-05, |
|
"loss": 0.801, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7698777202875804e-05, |
|
"loss": 0.7974, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.768147477582976e-05, |
|
"loss": 0.7979, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7664172348783706e-05, |
|
"loss": 0.7978, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.764686992173766e-05, |
|
"loss": 0.8001, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.762956749469162e-05, |
|
"loss": 0.7976, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.761226506764557e-05, |
|
"loss": 0.8022, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.759496264059952e-05, |
|
"loss": 0.7929, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.757766021355348e-05, |
|
"loss": 0.7953, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.756035778650743e-05, |
|
"loss": 0.7933, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.754305535946139e-05, |
|
"loss": 0.7926, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.752575293241534e-05, |
|
"loss": 0.7925, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.750845050536929e-05, |
|
"loss": 0.795, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.749114807832325e-05, |
|
"loss": 0.7901, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.74738456512772e-05, |
|
"loss": 0.7915, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7456543224231156e-05, |
|
"loss": 0.7932, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.74392407971851e-05, |
|
"loss": 0.7913, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.742193837013906e-05, |
|
"loss": 0.7921, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7404635943093015e-05, |
|
"loss": 0.7902, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7387333516046966e-05, |
|
"loss": 0.7885, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.737003108900092e-05, |
|
"loss": 0.7925, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.735272866195487e-05, |
|
"loss": 0.7888, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7335426234908825e-05, |
|
"loss": 0.7879, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.731812380786278e-05, |
|
"loss": 0.7894, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.730082138081673e-05, |
|
"loss": 0.787, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7283518953770684e-05, |
|
"loss": 0.7846, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7266216526724635e-05, |
|
"loss": 0.7878, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.724891409967859e-05, |
|
"loss": 0.7859, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.723161167263255e-05, |
|
"loss": 0.784, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.7214309245586494e-05, |
|
"loss": 0.7876, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.719700681854045e-05, |
|
"loss": 0.7856, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.717970439149441e-05, |
|
"loss": 0.785, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.716240196444836e-05, |
|
"loss": 0.7831, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.714509953740232e-05, |
|
"loss": 0.7856, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.712779711035626e-05, |
|
"loss": 0.7805, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.711049468331022e-05, |
|
"loss": 0.7826, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.709319225626418e-05, |
|
"loss": 0.7822, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.707588982921813e-05, |
|
"loss": 0.7811, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.705858740217208e-05, |
|
"loss": 0.781, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.704128497512603e-05, |
|
"loss": 0.7836, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.702398254807999e-05, |
|
"loss": 0.7795, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.7006680121033944e-05, |
|
"loss": 0.7805, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.6989377693987895e-05, |
|
"loss": 0.7783, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.6972075266941846e-05, |
|
"loss": 0.7777, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.69547728398958e-05, |
|
"loss": 0.7762, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.6937470412849754e-05, |
|
"loss": 0.778, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.692016798580371e-05, |
|
"loss": 0.7771, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6902865558757656e-05, |
|
"loss": 0.7751, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6885563131711613e-05, |
|
"loss": 0.7801, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6868260704665564e-05, |
|
"loss": 0.7757, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.685095827761952e-05, |
|
"loss": 0.774, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.683365585057347e-05, |
|
"loss": 0.774, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.681635342352742e-05, |
|
"loss": 0.7757, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.679905099648138e-05, |
|
"loss": 0.7743, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.678174856943534e-05, |
|
"loss": 0.7753, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.676444614238929e-05, |
|
"loss": 0.7702, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.674714371534324e-05, |
|
"loss": 0.7765, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.672984128829719e-05, |
|
"loss": 0.7732, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.671253886125115e-05, |
|
"loss": 0.7707, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6695236434205106e-05, |
|
"loss": 0.7707, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.667793400715905e-05, |
|
"loss": 0.7742, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.666063158011301e-05, |
|
"loss": 0.7722, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.664332915306696e-05, |
|
"loss": 0.771, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6626026726020916e-05, |
|
"loss": 0.7725, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6608724298974874e-05, |
|
"loss": 0.7682, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.659142187192882e-05, |
|
"loss": 0.7698, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6574119444882775e-05, |
|
"loss": 0.7699, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6556817017836726e-05, |
|
"loss": 0.7701, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6539514590790684e-05, |
|
"loss": 0.7707, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6522212163744634e-05, |
|
"loss": 0.7673, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6504909736698585e-05, |
|
"loss": 0.7688, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.648760730965254e-05, |
|
"loss": 0.7683, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6470304882606494e-05, |
|
"loss": 0.7664, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.6453002455560444e-05, |
|
"loss": 0.7686, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.64357000285144e-05, |
|
"loss": 0.7631, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.641839760146835e-05, |
|
"loss": 0.766, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.640109517442231e-05, |
|
"loss": 0.763, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.638379274737627e-05, |
|
"loss": 0.7652, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.636649032033021e-05, |
|
"loss": 0.7643, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.634918789328417e-05, |
|
"loss": 0.7684, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.633188546623812e-05, |
|
"loss": 0.7648, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.631458303919208e-05, |
|
"loss": 0.7646, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.629728061214603e-05, |
|
"loss": 0.7632, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.627997818509998e-05, |
|
"loss": 0.7637, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.626267575805394e-05, |
|
"loss": 0.7655, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.624537333100789e-05, |
|
"loss": 0.763, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6228070903961845e-05, |
|
"loss": 0.7623, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6210768476915796e-05, |
|
"loss": 0.7644, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.619346604986975e-05, |
|
"loss": 0.7602, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6176163622823704e-05, |
|
"loss": 0.7603, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6158861195777655e-05, |
|
"loss": 0.7602, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6141558768731606e-05, |
|
"loss": 0.7619, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6124256341685564e-05, |
|
"loss": 0.7615, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6106953914639514e-05, |
|
"loss": 0.7613, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.608965148759347e-05, |
|
"loss": 0.758, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.607234906054742e-05, |
|
"loss": 0.7649, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.6055046633501374e-05, |
|
"loss": 0.7627, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.603774420645533e-05, |
|
"loss": 0.7607, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.602044177940928e-05, |
|
"loss": 0.7582, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.600313935236324e-05, |
|
"loss": 0.7613, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.598583692531719e-05, |
|
"loss": 0.7597, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.596853449827114e-05, |
|
"loss": 0.7553, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.59512320712251e-05, |
|
"loss": 0.759, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.593392964417905e-05, |
|
"loss": 0.7578, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5916627217133e-05, |
|
"loss": 0.7563, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.589932479008696e-05, |
|
"loss": 0.7547, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.588202236304091e-05, |
|
"loss": 0.7567, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5864719935994866e-05, |
|
"loss": 0.7622, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.584741750894882e-05, |
|
"loss": 0.7592, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.583011508190277e-05, |
|
"loss": 0.7524, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5812812654856725e-05, |
|
"loss": 0.7589, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5795510227810676e-05, |
|
"loss": 0.7546, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5778207800764634e-05, |
|
"loss": 0.7544, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5760905373718585e-05, |
|
"loss": 0.7557, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5743602946672535e-05, |
|
"loss": 0.7542, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.572630051962649e-05, |
|
"loss": 0.7563, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5708998092580444e-05, |
|
"loss": 0.7552, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5691695665534395e-05, |
|
"loss": 0.752, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.567439323848835e-05, |
|
"loss": 0.7533, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.56570908114423e-05, |
|
"loss": 0.7568, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.563978838439626e-05, |
|
"loss": 0.7557, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.562248595735021e-05, |
|
"loss": 0.7518, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.560518353030416e-05, |
|
"loss": 0.7511, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.558788110325812e-05, |
|
"loss": 0.7521, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.557057867621207e-05, |
|
"loss": 0.7536, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.555327624916603e-05, |
|
"loss": 0.7497, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.553597382211998e-05, |
|
"loss": 0.7507, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.551867139507393e-05, |
|
"loss": 0.7483, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.550136896802789e-05, |
|
"loss": 0.7484, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.548406654098184e-05, |
|
"loss": 0.7508, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.5466764113935796e-05, |
|
"loss": 0.749, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.5449461686889746e-05, |
|
"loss": 0.7509, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.54321592598437e-05, |
|
"loss": 0.7543, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5414856832797655e-05, |
|
"loss": 0.7492, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5397554405751606e-05, |
|
"loss": 0.7485, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5380251978705556e-05, |
|
"loss": 0.7489, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5362949551659514e-05, |
|
"loss": 0.7515, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5345647124613465e-05, |
|
"loss": 0.7492, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.532834469756742e-05, |
|
"loss": 0.7486, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.531104227052137e-05, |
|
"loss": 0.7482, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5293739843475324e-05, |
|
"loss": 0.7462, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.527643741642928e-05, |
|
"loss": 0.7432, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.525913498938323e-05, |
|
"loss": 0.7454, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.524183256233719e-05, |
|
"loss": 0.7498, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.522453013529114e-05, |
|
"loss": 0.7431, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.520722770824509e-05, |
|
"loss": 0.7475, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.518992528119905e-05, |
|
"loss": 0.7482, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5172622854153e-05, |
|
"loss": 0.741, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.515532042710695e-05, |
|
"loss": 0.7451, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.513801800006091e-05, |
|
"loss": 0.747, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.512071557301486e-05, |
|
"loss": 0.7433, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5103413145968817e-05, |
|
"loss": 0.7445, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.508611071892277e-05, |
|
"loss": 0.7464, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.506880829187672e-05, |
|
"loss": 0.7445, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5051505864830676e-05, |
|
"loss": 0.7465, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5034203437784627e-05, |
|
"loss": 0.7442, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5016901010738584e-05, |
|
"loss": 0.7444, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.4999598583692535e-05, |
|
"loss": 0.7452, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.4982296156646486e-05, |
|
"loss": 0.7492, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.496499372960044e-05, |
|
"loss": 0.7428, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.4947691302554394e-05, |
|
"loss": 0.7416, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.493038887550835e-05, |
|
"loss": 0.7431, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.49130864484623e-05, |
|
"loss": 0.7451, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.489578402141625e-05, |
|
"loss": 0.7441, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.487848159437021e-05, |
|
"loss": 0.7434, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.486117916732416e-05, |
|
"loss": 0.7429, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.484387674027811e-05, |
|
"loss": 0.7402, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.482657431323207e-05, |
|
"loss": 0.7403, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.480927188618602e-05, |
|
"loss": 0.74, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.479196945913998e-05, |
|
"loss": 0.7417, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.477466703209392e-05, |
|
"loss": 0.7424, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.475736460504788e-05, |
|
"loss": 0.7426, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.474006217800184e-05, |
|
"loss": 0.739, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.472275975095579e-05, |
|
"loss": 0.7382, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4705457323909746e-05, |
|
"loss": 0.7391, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.468815489686369e-05, |
|
"loss": 0.7398, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.467085246981765e-05, |
|
"loss": 0.7412, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4653550042771605e-05, |
|
"loss": 0.7394, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4636247615725556e-05, |
|
"loss": 0.7393, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.461894518867951e-05, |
|
"loss": 0.7386, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4601642761633464e-05, |
|
"loss": 0.7384, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.4584340334587415e-05, |
|
"loss": 0.7416, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.456703790754137e-05, |
|
"loss": 0.7376, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.454973548049532e-05, |
|
"loss": 0.7368, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4532433053449274e-05, |
|
"loss": 0.7374, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.451513062640323e-05, |
|
"loss": 0.7425, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.449782819935718e-05, |
|
"loss": 0.7384, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.448052577231114e-05, |
|
"loss": 0.7376, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4463223345265084e-05, |
|
"loss": 0.738, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.444592091821904e-05, |
|
"loss": 0.7409, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4428618491173e-05, |
|
"loss": 0.7379, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.441131606412695e-05, |
|
"loss": 0.7364, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.43940136370809e-05, |
|
"loss": 0.7382, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.437671121003485e-05, |
|
"loss": 0.7371, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.435940878298881e-05, |
|
"loss": 0.7337, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.434210635594277e-05, |
|
"loss": 0.7358, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.432480392889672e-05, |
|
"loss": 0.7358, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.430750150185067e-05, |
|
"loss": 0.7337, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.429019907480462e-05, |
|
"loss": 0.7362, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.427289664775858e-05, |
|
"loss": 0.7326, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4255594220712534e-05, |
|
"loss": 0.7357, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.423829179366648e-05, |
|
"loss": 0.735, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4220989366620436e-05, |
|
"loss": 0.7366, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4203686939574394e-05, |
|
"loss": 0.7352, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4186384512528344e-05, |
|
"loss": 0.7381, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.41690820854823e-05, |
|
"loss": 0.7354, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4151779658436246e-05, |
|
"loss": 0.7362, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.4134477231390203e-05, |
|
"loss": 0.7346, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.411717480434416e-05, |
|
"loss": 0.7321, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.409987237729811e-05, |
|
"loss": 0.732, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.408256995025206e-05, |
|
"loss": 0.7351, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.4065267523206013e-05, |
|
"loss": 0.738, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.404796509615997e-05, |
|
"loss": 0.735, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.403066266911393e-05, |
|
"loss": 0.7344, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.401336024206787e-05, |
|
"loss": 0.7299, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.399605781502183e-05, |
|
"loss": 0.7318, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.397875538797578e-05, |
|
"loss": 0.7329, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.396145296092974e-05, |
|
"loss": 0.7308, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.3944150533883696e-05, |
|
"loss": 0.7307, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.392684810683764e-05, |
|
"loss": 0.731, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.39095456797916e-05, |
|
"loss": 0.7324, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.389224325274555e-05, |
|
"loss": 0.7361, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3874940825699506e-05, |
|
"loss": 0.7347, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.385763839865346e-05, |
|
"loss": 0.7328, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.384033597160741e-05, |
|
"loss": 0.7287, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3823033544561365e-05, |
|
"loss": 0.7317, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.380573111751532e-05, |
|
"loss": 0.7326, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3788428690469274e-05, |
|
"loss": 0.7324, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3771126263423224e-05, |
|
"loss": 0.7319, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.3753823836377175e-05, |
|
"loss": 0.7338, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.373652140933113e-05, |
|
"loss": 0.7324, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.371921898228509e-05, |
|
"loss": 0.7341, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.3701916555239034e-05, |
|
"loss": 0.7282, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.368461412819299e-05, |
|
"loss": 0.7329, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.366731170114694e-05, |
|
"loss": 0.7307, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.36500092741009e-05, |
|
"loss": 0.7295, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.363270684705485e-05, |
|
"loss": 0.7281, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.36154044200088e-05, |
|
"loss": 0.73, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.359810199296276e-05, |
|
"loss": 0.7307, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.358079956591671e-05, |
|
"loss": 0.7268, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.356349713887067e-05, |
|
"loss": 0.7281, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.354619471182462e-05, |
|
"loss": 0.7274, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.352889228477857e-05, |
|
"loss": 0.7291, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.351158985773253e-05, |
|
"loss": 0.7303, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.349428743068648e-05, |
|
"loss": 0.7282, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.347698500364043e-05, |
|
"loss": 0.7278, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3459682576594386e-05, |
|
"loss": 0.7307, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.344238014954834e-05, |
|
"loss": 0.7314, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.3425077722502295e-05, |
|
"loss": 0.7284, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.340777529545625e-05, |
|
"loss": 0.7318, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3390472868410196e-05, |
|
"loss": 0.7323, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3373170441364154e-05, |
|
"loss": 0.726, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3355868014318105e-05, |
|
"loss": 0.7276, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.333856558727206e-05, |
|
"loss": 0.7252, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.332126316022601e-05, |
|
"loss": 0.7254, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.3303960733179964e-05, |
|
"loss": 0.7282, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.328665830613392e-05, |
|
"loss": 0.7261, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.326935587908787e-05, |
|
"loss": 0.7287, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.325205345204182e-05, |
|
"loss": 0.7259, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.323475102499578e-05, |
|
"loss": 0.7282, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.321744859794973e-05, |
|
"loss": 0.7254, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.320014617090369e-05, |
|
"loss": 0.7271, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.318284374385764e-05, |
|
"loss": 0.7235, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.316554131681159e-05, |
|
"loss": 0.7242, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.314823888976555e-05, |
|
"loss": 0.7259, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.31309364627195e-05, |
|
"loss": 0.7251, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.3113634035673456e-05, |
|
"loss": 0.7254, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.309633160862741e-05, |
|
"loss": 0.7232, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.307902918158136e-05, |
|
"loss": 0.7235, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.3061726754535316e-05, |
|
"loss": 0.7233, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.3044424327489266e-05, |
|
"loss": 0.7229, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.3027121900443224e-05, |
|
"loss": 0.7228, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.3009819473397175e-05, |
|
"loss": 0.7254, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2992517046351125e-05, |
|
"loss": 0.725, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.297521461930508e-05, |
|
"loss": 0.7247, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2957912192259034e-05, |
|
"loss": 0.7229, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2940609765212985e-05, |
|
"loss": 0.7233, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.292330733816694e-05, |
|
"loss": 0.7203, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.290600491112089e-05, |
|
"loss": 0.7258, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.288870248407485e-05, |
|
"loss": 0.723, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.28714000570288e-05, |
|
"loss": 0.7224, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.285409762998275e-05, |
|
"loss": 0.7254, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.283679520293671e-05, |
|
"loss": 0.7237, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.281949277589066e-05, |
|
"loss": 0.7216, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.280219034884462e-05, |
|
"loss": 0.7216, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.278488792179857e-05, |
|
"loss": 0.7225, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.276758549475252e-05, |
|
"loss": 0.722, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.275028306770648e-05, |
|
"loss": 0.7215, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.273298064066043e-05, |
|
"loss": 0.7224, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.271567821361438e-05, |
|
"loss": 0.7201, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2698375786568336e-05, |
|
"loss": 0.7197, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.268107335952229e-05, |
|
"loss": 0.7205, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2663770932476245e-05, |
|
"loss": 0.7191, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2646468505430196e-05, |
|
"loss": 0.7233, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2629166078384146e-05, |
|
"loss": 0.7181, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2611863651338104e-05, |
|
"loss": 0.7211, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2594561224292055e-05, |
|
"loss": 0.7217, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.257725879724601e-05, |
|
"loss": 0.7223, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.255995637019996e-05, |
|
"loss": 0.7218, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2542653943153914e-05, |
|
"loss": 0.7212, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.252535151610787e-05, |
|
"loss": 0.7201, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.250804908906182e-05, |
|
"loss": 0.7171, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.249074666201578e-05, |
|
"loss": 0.7244, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.247344423496973e-05, |
|
"loss": 0.7177, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.245614180792368e-05, |
|
"loss": 0.7195, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.243883938087764e-05, |
|
"loss": 0.717, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.242153695383159e-05, |
|
"loss": 0.7194, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.240423452678554e-05, |
|
"loss": 0.72, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.23869320997395e-05, |
|
"loss": 0.7252, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.236962967269345e-05, |
|
"loss": 0.7195, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2352327245647407e-05, |
|
"loss": 0.7186, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.233502481860136e-05, |
|
"loss": 0.7183, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.231772239155531e-05, |
|
"loss": 0.7199, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2300419964509266e-05, |
|
"loss": 0.7185, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2283117537463217e-05, |
|
"loss": 0.7179, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2265815110417174e-05, |
|
"loss": 0.7185, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2248512683371125e-05, |
|
"loss": 0.7201, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2231210256325076e-05, |
|
"loss": 0.7178, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.221390782927903e-05, |
|
"loss": 0.7179, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2196605402232984e-05, |
|
"loss": 0.7196, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2179302975186935e-05, |
|
"loss": 0.7172, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.216200054814089e-05, |
|
"loss": 0.7179, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.214469812109484e-05, |
|
"loss": 0.7194, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.21273956940488e-05, |
|
"loss": 0.7179, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.211009326700275e-05, |
|
"loss": 0.7179, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.20927908399567e-05, |
|
"loss": 0.7139, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.207548841291066e-05, |
|
"loss": 0.7156, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.205818598586461e-05, |
|
"loss": 0.7157, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.204088355881857e-05, |
|
"loss": 0.7157, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.202358113177252e-05, |
|
"loss": 0.7201, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.200627870472647e-05, |
|
"loss": 0.7166, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.198897627768043e-05, |
|
"loss": 0.716, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.197167385063438e-05, |
|
"loss": 0.7194, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.195437142358833e-05, |
|
"loss": 0.717, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.193706899654229e-05, |
|
"loss": 0.7136, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.191976656949624e-05, |
|
"loss": 0.7163, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1902464142450195e-05, |
|
"loss": 0.7172, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1885161715404146e-05, |
|
"loss": 0.7158, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.18678592883581e-05, |
|
"loss": 0.7176, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1850556861312054e-05, |
|
"loss": 0.7139, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1833254434266005e-05, |
|
"loss": 0.7185, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.181595200721996e-05, |
|
"loss": 0.7138, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.179864958017391e-05, |
|
"loss": 0.7171, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1781347153127864e-05, |
|
"loss": 0.7193, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.176404472608182e-05, |
|
"loss": 0.7183, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.174674229903577e-05, |
|
"loss": 0.7184, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.172943987198973e-05, |
|
"loss": 0.7173, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.1712137444943674e-05, |
|
"loss": 0.717, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.169483501789763e-05, |
|
"loss": 0.7115, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.167753259085159e-05, |
|
"loss": 0.7124, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.166023016380554e-05, |
|
"loss": 0.7157, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.164292773675949e-05, |
|
"loss": 0.7142, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.162562530971345e-05, |
|
"loss": 0.7142, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.16083228826674e-05, |
|
"loss": 0.7148, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.159102045562136e-05, |
|
"loss": 0.7122, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.15737180285753e-05, |
|
"loss": 0.7118, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.155641560152926e-05, |
|
"loss": 0.7149, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1539113174483216e-05, |
|
"loss": 0.7145, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.152181074743717e-05, |
|
"loss": 0.7131, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1504508320391124e-05, |
|
"loss": 0.7137, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.148720589334507e-05, |
|
"loss": 0.7131, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1469903466299026e-05, |
|
"loss": 0.715, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1452601039252984e-05, |
|
"loss": 0.7137, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1435298612206934e-05, |
|
"loss": 0.7131, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1417996185160885e-05, |
|
"loss": 0.7131, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1400693758114836e-05, |
|
"loss": 0.7128, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1383391331068794e-05, |
|
"loss": 0.715, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.136608890402275e-05, |
|
"loss": 0.7149, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.13487864769767e-05, |
|
"loss": 0.7122, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.133148404993065e-05, |
|
"loss": 0.7136, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.1314181622884603e-05, |
|
"loss": 0.7108, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.129687919583856e-05, |
|
"loss": 0.7133, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.127957676879252e-05, |
|
"loss": 0.7151, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.126227434174646e-05, |
|
"loss": 0.7166, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.124497191470042e-05, |
|
"loss": 0.7122, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.122766948765438e-05, |
|
"loss": 0.7154, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.121036706060833e-05, |
|
"loss": 0.7124, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.119306463356228e-05, |
|
"loss": 0.7121, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.117576220651623e-05, |
|
"loss": 0.7108, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.115845977947019e-05, |
|
"loss": 0.716, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1141157352424145e-05, |
|
"loss": 0.7137, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1123854925378096e-05, |
|
"loss": 0.712, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.110655249833205e-05, |
|
"loss": 0.7096, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1089250071286e-05, |
|
"loss": 0.7144, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.1071947644239955e-05, |
|
"loss": 0.7132, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.105464521719391e-05, |
|
"loss": 0.7105, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.103734279014786e-05, |
|
"loss": 0.7082, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.1020040363101814e-05, |
|
"loss": 0.7101, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.1002737936055765e-05, |
|
"loss": 0.7089, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.098543550900972e-05, |
|
"loss": 0.7088, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.096813308196368e-05, |
|
"loss": 0.7128, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.0950830654917624e-05, |
|
"loss": 0.7103, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.093352822787158e-05, |
|
"loss": 0.7126, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.091622580082553e-05, |
|
"loss": 0.7137, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.089892337377949e-05, |
|
"loss": 0.7093, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.088162094673344e-05, |
|
"loss": 0.708, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.086431851968739e-05, |
|
"loss": 0.7103, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.084701609264135e-05, |
|
"loss": 0.71, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.082971366559531e-05, |
|
"loss": 0.7127, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.081241123854925e-05, |
|
"loss": 0.7088, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.079510881150321e-05, |
|
"loss": 0.7087, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.077780638445716e-05, |
|
"loss": 0.7078, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.076050395741112e-05, |
|
"loss": 0.711, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0743201530365075e-05, |
|
"loss": 0.7097, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.072589910331902e-05, |
|
"loss": 0.7068, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0708596676272976e-05, |
|
"loss": 0.709, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.069129424922693e-05, |
|
"loss": 0.7079, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0673991822180885e-05, |
|
"loss": 0.708, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0656689395134835e-05, |
|
"loss": 0.7073, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0639386968088786e-05, |
|
"loss": 0.7082, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0622084541042744e-05, |
|
"loss": 0.7078, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.0604782113996695e-05, |
|
"loss": 0.7044, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.058747968695065e-05, |
|
"loss": 0.7074, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.05701772599046e-05, |
|
"loss": 0.7086, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0552874832858554e-05, |
|
"loss": 0.707, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.053557240581251e-05, |
|
"loss": 0.7045, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.051826997876646e-05, |
|
"loss": 0.7074, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.050096755172041e-05, |
|
"loss": 0.7056, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.048366512467437e-05, |
|
"loss": 0.7061, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.046636269762832e-05, |
|
"loss": 0.705, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.044906027058228e-05, |
|
"loss": 0.7063, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0431757843536236e-05, |
|
"loss": 0.7087, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.041445541649018e-05, |
|
"loss": 0.7069, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.039715298944414e-05, |
|
"loss": 0.707, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.037985056239809e-05, |
|
"loss": 0.7063, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0362548135352046e-05, |
|
"loss": 0.7088, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0345245708306e-05, |
|
"loss": 0.7041, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.032794328125995e-05, |
|
"loss": 0.7062, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0310640854213906e-05, |
|
"loss": 0.7066, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0293338427167856e-05, |
|
"loss": 0.7058, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.027603600012181e-05, |
|
"loss": 0.7072, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0258733573075765e-05, |
|
"loss": 0.7093, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0241431146029716e-05, |
|
"loss": 0.7062, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.022412871898367e-05, |
|
"loss": 0.7063, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0206826291937624e-05, |
|
"loss": 0.7064, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0189523864891575e-05, |
|
"loss": 0.7047, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.017222143784553e-05, |
|
"loss": 0.7075, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.015491901079948e-05, |
|
"loss": 0.7065, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.013761658375344e-05, |
|
"loss": 0.701, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.012031415670739e-05, |
|
"loss": 0.7071, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.010301172966134e-05, |
|
"loss": 0.7043, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.00857093026153e-05, |
|
"loss": 0.7066, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.006840687556925e-05, |
|
"loss": 0.7066, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.005110444852321e-05, |
|
"loss": 0.7052, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.003380202147716e-05, |
|
"loss": 0.7077, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.001649959443111e-05, |
|
"loss": 0.7045, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.999919716738507e-05, |
|
"loss": 0.7046, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.998189474033902e-05, |
|
"loss": 0.7035, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.996459231329297e-05, |
|
"loss": 0.704, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.9947289886246927e-05, |
|
"loss": 0.7048, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.992998745920088e-05, |
|
"loss": 0.7057, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9912685032154835e-05, |
|
"loss": 0.7046, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9895382605108786e-05, |
|
"loss": 0.7051, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9878080178062736e-05, |
|
"loss": 0.7015, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9860777751016694e-05, |
|
"loss": 0.7031, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9843475323970645e-05, |
|
"loss": 0.706, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.98261728969246e-05, |
|
"loss": 0.7017, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.980887046987855e-05, |
|
"loss": 0.7027, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9791568042832504e-05, |
|
"loss": 0.7043, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.977426561578646e-05, |
|
"loss": 0.707, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.975696318874041e-05, |
|
"loss": 0.7039, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.973966076169436e-05, |
|
"loss": 0.7039, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.972235833464832e-05, |
|
"loss": 0.7056, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.970505590760227e-05, |
|
"loss": 0.7066, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.968775348055623e-05, |
|
"loss": 0.7069, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.967045105351018e-05, |
|
"loss": 0.705, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.965314862646413e-05, |
|
"loss": 0.7062, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.963584619941809e-05, |
|
"loss": 0.7031, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.961854377237204e-05, |
|
"loss": 0.7028, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9601241345326e-05, |
|
"loss": 0.7007, |
|
"step": 300500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.958393891827995e-05, |
|
"loss": 0.7035, |
|
"step": 301000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.95666364912339e-05, |
|
"loss": 0.6988, |
|
"step": 301500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9549334064187856e-05, |
|
"loss": 0.6997, |
|
"step": 302000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.953203163714181e-05, |
|
"loss": 0.7025, |
|
"step": 302500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.951472921009576e-05, |
|
"loss": 0.7059, |
|
"step": 303000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9497426783049715e-05, |
|
"loss": 0.7001, |
|
"step": 303500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9480124356003666e-05, |
|
"loss": 0.7035, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.946282192895762e-05, |
|
"loss": 0.7031, |
|
"step": 304500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9445519501911574e-05, |
|
"loss": 0.7032, |
|
"step": 305000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9428217074865525e-05, |
|
"loss": 0.7046, |
|
"step": 305500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.941091464781948e-05, |
|
"loss": 0.7014, |
|
"step": 306000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.939361222077343e-05, |
|
"loss": 0.7028, |
|
"step": 306500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.937630979372739e-05, |
|
"loss": 0.7017, |
|
"step": 307000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.935900736668134e-05, |
|
"loss": 0.704, |
|
"step": 307500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.934170493963529e-05, |
|
"loss": 0.7001, |
|
"step": 308000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.932440251258925e-05, |
|
"loss": 0.7021, |
|
"step": 308500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.93071000855432e-05, |
|
"loss": 0.7044, |
|
"step": 309000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.928979765849716e-05, |
|
"loss": 0.7034, |
|
"step": 309500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.927249523145111e-05, |
|
"loss": 0.7024, |
|
"step": 310000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.925519280440506e-05, |
|
"loss": 0.7039, |
|
"step": 310500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.923789037735902e-05, |
|
"loss": 0.7054, |
|
"step": 311000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.922058795031297e-05, |
|
"loss": 0.7036, |
|
"step": 311500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.920328552326692e-05, |
|
"loss": 0.699, |
|
"step": 312000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.918598309622088e-05, |
|
"loss": 0.7025, |
|
"step": 312500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.916868066917483e-05, |
|
"loss": 0.7024, |
|
"step": 313000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9151378242128785e-05, |
|
"loss": 0.703, |
|
"step": 313500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.913407581508273e-05, |
|
"loss": 0.6986, |
|
"step": 314000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.911677338803669e-05, |
|
"loss": 0.7024, |
|
"step": 314500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9099470960990644e-05, |
|
"loss": 0.7017, |
|
"step": 315000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9082168533944595e-05, |
|
"loss": 0.7004, |
|
"step": 315500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.906486610689855e-05, |
|
"loss": 0.7005, |
|
"step": 316000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9047563679852503e-05, |
|
"loss": 0.7025, |
|
"step": 316500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9030261252806454e-05, |
|
"loss": 0.7, |
|
"step": 317000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.901295882576041e-05, |
|
"loss": 0.7022, |
|
"step": 317500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.899565639871436e-05, |
|
"loss": 0.6958, |
|
"step": 318000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.8978353971668313e-05, |
|
"loss": 0.7018, |
|
"step": 318500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.896105154462227e-05, |
|
"loss": 0.7028, |
|
"step": 319000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.894374911757622e-05, |
|
"loss": 0.6997, |
|
"step": 319500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.892644669053018e-05, |
|
"loss": 0.7023, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.890914426348413e-05, |
|
"loss": 0.6995, |
|
"step": 320500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.889184183643808e-05, |
|
"loss": 0.6987, |
|
"step": 321000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.887453940939204e-05, |
|
"loss": 0.6987, |
|
"step": 321500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.885723698234599e-05, |
|
"loss": 0.7028, |
|
"step": 322000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.883993455529995e-05, |
|
"loss": 0.7013, |
|
"step": 322500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.882263212825389e-05, |
|
"loss": 0.6988, |
|
"step": 323000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.880532970120785e-05, |
|
"loss": 0.6989, |
|
"step": 323500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.8788027274161806e-05, |
|
"loss": 0.7001, |
|
"step": 324000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.877072484711576e-05, |
|
"loss": 0.7, |
|
"step": 324500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.875342242006971e-05, |
|
"loss": 0.6978, |
|
"step": 325000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.873611999302366e-05, |
|
"loss": 0.6977, |
|
"step": 325500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8718817565977616e-05, |
|
"loss": 0.7004, |
|
"step": 326000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8701515138931574e-05, |
|
"loss": 0.7009, |
|
"step": 326500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8684212711885524e-05, |
|
"loss": 0.6988, |
|
"step": 327000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8666910284839475e-05, |
|
"loss": 0.7002, |
|
"step": 327500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.864960785779343e-05, |
|
"loss": 0.7003, |
|
"step": 328000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8632305430747384e-05, |
|
"loss": 0.7012, |
|
"step": 328500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.861500300370134e-05, |
|
"loss": 0.6975, |
|
"step": 329000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8597700576655285e-05, |
|
"loss": 0.6992, |
|
"step": 329500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.858039814960924e-05, |
|
"loss": 0.6955, |
|
"step": 330000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.85630957225632e-05, |
|
"loss": 0.6982, |
|
"step": 330500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.854579329551715e-05, |
|
"loss": 0.6977, |
|
"step": 331000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.852849086847111e-05, |
|
"loss": 0.7023, |
|
"step": 331500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.851118844142505e-05, |
|
"loss": 0.7031, |
|
"step": 332000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.849388601437901e-05, |
|
"loss": 0.698, |
|
"step": 332500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.847658358733297e-05, |
|
"loss": 0.6974, |
|
"step": 333000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.845928116028692e-05, |
|
"loss": 0.6984, |
|
"step": 333500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.844197873324087e-05, |
|
"loss": 0.6949, |
|
"step": 334000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.842467630619482e-05, |
|
"loss": 0.6974, |
|
"step": 334500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.840737387914878e-05, |
|
"loss": 0.696, |
|
"step": 335000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8390071452102735e-05, |
|
"loss": 0.6978, |
|
"step": 335500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8372769025056686e-05, |
|
"loss": 0.6987, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.835546659801064e-05, |
|
"loss": 0.6974, |
|
"step": 336500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.833816417096459e-05, |
|
"loss": 0.6989, |
|
"step": 337000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8320861743918545e-05, |
|
"loss": 0.6997, |
|
"step": 337500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.83035593168725e-05, |
|
"loss": 0.6982, |
|
"step": 338000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.828625688982645e-05, |
|
"loss": 0.6989, |
|
"step": 338500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.8268954462780405e-05, |
|
"loss": 0.6938, |
|
"step": 339000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.825165203573436e-05, |
|
"loss": 0.6975, |
|
"step": 339500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.823434960868831e-05, |
|
"loss": 0.6945, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8217047181642264e-05, |
|
"loss": 0.698, |
|
"step": 340500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8199744754596215e-05, |
|
"loss": 0.6962, |
|
"step": 341000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.818244232755017e-05, |
|
"loss": 0.6969, |
|
"step": 341500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.816513990050413e-05, |
|
"loss": 0.6971, |
|
"step": 342000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.814783747345808e-05, |
|
"loss": 0.6965, |
|
"step": 342500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.813053504641203e-05, |
|
"loss": 0.6893, |
|
"step": 343000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.811323261936598e-05, |
|
"loss": 0.6975, |
|
"step": 343500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.809593019231994e-05, |
|
"loss": 0.6978, |
|
"step": 344000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.80786277652739e-05, |
|
"loss": 0.6972, |
|
"step": 344500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.806132533822784e-05, |
|
"loss": 0.6947, |
|
"step": 345000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.80440229111818e-05, |
|
"loss": 0.6951, |
|
"step": 345500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.802672048413575e-05, |
|
"loss": 0.6948, |
|
"step": 346000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.800941805708971e-05, |
|
"loss": 0.6982, |
|
"step": 346500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7992115630043665e-05, |
|
"loss": 0.695, |
|
"step": 347000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.797481320299761e-05, |
|
"loss": 0.6963, |
|
"step": 347500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7957510775951566e-05, |
|
"loss": 0.6958, |
|
"step": 348000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.794020834890552e-05, |
|
"loss": 0.6963, |
|
"step": 348500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7922905921859475e-05, |
|
"loss": 0.6944, |
|
"step": 349000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7905603494813425e-05, |
|
"loss": 0.6924, |
|
"step": 349500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7888301067767376e-05, |
|
"loss": 0.696, |
|
"step": 350000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7870998640721334e-05, |
|
"loss": 0.6968, |
|
"step": 350500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.785369621367529e-05, |
|
"loss": 0.6939, |
|
"step": 351000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7836393786629235e-05, |
|
"loss": 0.6961, |
|
"step": 351500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.781909135958319e-05, |
|
"loss": 0.6965, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7801788932537144e-05, |
|
"loss": 0.6946, |
|
"step": 352500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.77844865054911e-05, |
|
"loss": 0.6949, |
|
"step": 353000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.776718407844506e-05, |
|
"loss": 0.6943, |
|
"step": 353500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7749881651399e-05, |
|
"loss": 0.6971, |
|
"step": 354000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.773257922435296e-05, |
|
"loss": 0.6944, |
|
"step": 354500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.771527679730691e-05, |
|
"loss": 0.696, |
|
"step": 355000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.769797437026087e-05, |
|
"loss": 0.6944, |
|
"step": 355500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.768067194321482e-05, |
|
"loss": 0.6947, |
|
"step": 356000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.766336951616877e-05, |
|
"loss": 0.6968, |
|
"step": 356500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.764606708912273e-05, |
|
"loss": 0.6951, |
|
"step": 357000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.762876466207668e-05, |
|
"loss": 0.6945, |
|
"step": 357500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7611462235030636e-05, |
|
"loss": 0.6945, |
|
"step": 358000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.759415980798459e-05, |
|
"loss": 0.6926, |
|
"step": 358500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.757685738093854e-05, |
|
"loss": 0.6965, |
|
"step": 359000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7559554953892496e-05, |
|
"loss": 0.6948, |
|
"step": 359500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7542252526846446e-05, |
|
"loss": 0.6941, |
|
"step": 360000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.75249500998004e-05, |
|
"loss": 0.6918, |
|
"step": 360500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7507647672754355e-05, |
|
"loss": 0.6922, |
|
"step": 361000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7490345245708306e-05, |
|
"loss": 0.6945, |
|
"step": 361500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.747304281866226e-05, |
|
"loss": 0.6972, |
|
"step": 362000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7455740391616214e-05, |
|
"loss": 0.6923, |
|
"step": 362500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7438437964570165e-05, |
|
"loss": 0.6946, |
|
"step": 363000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.742113553752412e-05, |
|
"loss": 0.692, |
|
"step": 363500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.740383311047807e-05, |
|
"loss": 0.6934, |
|
"step": 364000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.738653068343203e-05, |
|
"loss": 0.6938, |
|
"step": 364500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.736922825638598e-05, |
|
"loss": 0.6944, |
|
"step": 365000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.735192582933993e-05, |
|
"loss": 0.6936, |
|
"step": 365500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.733462340229389e-05, |
|
"loss": 0.6951, |
|
"step": 366000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.731732097524784e-05, |
|
"loss": 0.6938, |
|
"step": 366500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.730001854820179e-05, |
|
"loss": 0.6928, |
|
"step": 367000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.728271612115575e-05, |
|
"loss": 0.6913, |
|
"step": 367500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.72654136941097e-05, |
|
"loss": 0.6955, |
|
"step": 368000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.724811126706366e-05, |
|
"loss": 0.6886, |
|
"step": 368500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.723080884001761e-05, |
|
"loss": 0.6944, |
|
"step": 369000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.721350641297156e-05, |
|
"loss": 0.6942, |
|
"step": 369500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7196203985925517e-05, |
|
"loss": 0.6946, |
|
"step": 370000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.717890155887947e-05, |
|
"loss": 0.6934, |
|
"step": 370500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7161599131833425e-05, |
|
"loss": 0.6929, |
|
"step": 371000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7144296704787376e-05, |
|
"loss": 0.693, |
|
"step": 371500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7126994277741327e-05, |
|
"loss": 0.6918, |
|
"step": 372000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7109691850695284e-05, |
|
"loss": 0.6926, |
|
"step": 372500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7092389423649235e-05, |
|
"loss": 0.6937, |
|
"step": 373000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.7075086996603186e-05, |
|
"loss": 0.695, |
|
"step": 373500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.705778456955714e-05, |
|
"loss": 0.6922, |
|
"step": 374000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.7040482142511094e-05, |
|
"loss": 0.6917, |
|
"step": 374500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.702317971546505e-05, |
|
"loss": 0.6938, |
|
"step": 375000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.7005877288419e-05, |
|
"loss": 0.6909, |
|
"step": 375500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.698857486137295e-05, |
|
"loss": 0.6942, |
|
"step": 376000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.697127243432691e-05, |
|
"loss": 0.6931, |
|
"step": 376500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.695397000728086e-05, |
|
"loss": 0.6948, |
|
"step": 377000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.693666758023482e-05, |
|
"loss": 0.6937, |
|
"step": 377500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.691936515318877e-05, |
|
"loss": 0.69, |
|
"step": 378000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.690206272614272e-05, |
|
"loss": 0.691, |
|
"step": 378500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.688476029909668e-05, |
|
"loss": 0.6935, |
|
"step": 379000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.686745787205063e-05, |
|
"loss": 0.6907, |
|
"step": 379500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.685015544500459e-05, |
|
"loss": 0.6969, |
|
"step": 380000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.683285301795854e-05, |
|
"loss": 0.6874, |
|
"step": 380500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.681555059091249e-05, |
|
"loss": 0.6898, |
|
"step": 381000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6798248163866446e-05, |
|
"loss": 0.6952, |
|
"step": 381500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.67809457368204e-05, |
|
"loss": 0.6901, |
|
"step": 382000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.676364330977435e-05, |
|
"loss": 0.6917, |
|
"step": 382500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6746340882728305e-05, |
|
"loss": 0.6911, |
|
"step": 383000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6729038455682256e-05, |
|
"loss": 0.6894, |
|
"step": 383500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6711736028636213e-05, |
|
"loss": 0.6884, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6694433601590164e-05, |
|
"loss": 0.6952, |
|
"step": 384500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6677131174544115e-05, |
|
"loss": 0.6892, |
|
"step": 385000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.665982874749807e-05, |
|
"loss": 0.6908, |
|
"step": 385500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.6642526320452023e-05, |
|
"loss": 0.6895, |
|
"step": 386000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.662522389340598e-05, |
|
"loss": 0.6931, |
|
"step": 386500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.660792146635993e-05, |
|
"loss": 0.6886, |
|
"step": 387000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.659061903931388e-05, |
|
"loss": 0.6914, |
|
"step": 387500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.657331661226784e-05, |
|
"loss": 0.693, |
|
"step": 388000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.655601418522179e-05, |
|
"loss": 0.6895, |
|
"step": 388500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.653871175817574e-05, |
|
"loss": 0.6906, |
|
"step": 389000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.65214093311297e-05, |
|
"loss": 0.6939, |
|
"step": 389500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.650410690408365e-05, |
|
"loss": 0.6899, |
|
"step": 390000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.648680447703761e-05, |
|
"loss": 0.6899, |
|
"step": 390500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.646950204999156e-05, |
|
"loss": 0.6866, |
|
"step": 391000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.645219962294551e-05, |
|
"loss": 0.6906, |
|
"step": 391500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.643489719589947e-05, |
|
"loss": 0.6913, |
|
"step": 392000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.641759476885342e-05, |
|
"loss": 0.6874, |
|
"step": 392500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6400292341807375e-05, |
|
"loss": 0.6887, |
|
"step": 393000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6382989914761326e-05, |
|
"loss": 0.6923, |
|
"step": 393500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.636568748771528e-05, |
|
"loss": 0.6912, |
|
"step": 394000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6348385060669234e-05, |
|
"loss": 0.6916, |
|
"step": 394500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6331082633623185e-05, |
|
"loss": 0.6883, |
|
"step": 395000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6313780206577136e-05, |
|
"loss": 0.6907, |
|
"step": 395500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6296477779531094e-05, |
|
"loss": 0.688, |
|
"step": 396000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6279175352485044e-05, |
|
"loss": 0.6935, |
|
"step": 396500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6261872925439e-05, |
|
"loss": 0.6899, |
|
"step": 397000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.624457049839295e-05, |
|
"loss": 0.6912, |
|
"step": 397500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.6227268071346904e-05, |
|
"loss": 0.6924, |
|
"step": 398000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.620996564430086e-05, |
|
"loss": 0.6874, |
|
"step": 398500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.619266321725481e-05, |
|
"loss": 0.6869, |
|
"step": 399000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.617536079020877e-05, |
|
"loss": 0.6907, |
|
"step": 399500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.6158058363162713e-05, |
|
"loss": 0.6898, |
|
"step": 400000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.614075593611667e-05, |
|
"loss": 0.6914, |
|
"step": 400500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.612345350907063e-05, |
|
"loss": 0.6892, |
|
"step": 401000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.610615108202458e-05, |
|
"loss": 0.6897, |
|
"step": 401500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.608884865497854e-05, |
|
"loss": 0.6891, |
|
"step": 402000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.607154622793249e-05, |
|
"loss": 0.6895, |
|
"step": 402500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.605424380088644e-05, |
|
"loss": 0.6912, |
|
"step": 403000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.6036941373840396e-05, |
|
"loss": 0.6895, |
|
"step": 403500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.601963894679435e-05, |
|
"loss": 0.6892, |
|
"step": 404000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.60023365197483e-05, |
|
"loss": 0.6862, |
|
"step": 404500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.5985034092702255e-05, |
|
"loss": 0.6856, |
|
"step": 405000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.5967731665656206e-05, |
|
"loss": 0.6891, |
|
"step": 405500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.5950429238610164e-05, |
|
"loss": 0.6886, |
|
"step": 406000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.5933126811564114e-05, |
|
"loss": 0.6884, |
|
"step": 406500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5915824384518065e-05, |
|
"loss": 0.6897, |
|
"step": 407000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.589852195747202e-05, |
|
"loss": 0.6923, |
|
"step": 407500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5881219530425974e-05, |
|
"loss": 0.6864, |
|
"step": 408000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.586391710337993e-05, |
|
"loss": 0.6882, |
|
"step": 408500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5846614676333875e-05, |
|
"loss": 0.6919, |
|
"step": 409000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.582931224928783e-05, |
|
"loss": 0.6871, |
|
"step": 409500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.581200982224179e-05, |
|
"loss": 0.6887, |
|
"step": 410000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.579470739519574e-05, |
|
"loss": 0.6882, |
|
"step": 410500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.577740496814969e-05, |
|
"loss": 0.688, |
|
"step": 411000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.576010254110364e-05, |
|
"loss": 0.6881, |
|
"step": 411500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.57428001140576e-05, |
|
"loss": 0.6882, |
|
"step": 412000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.572549768701156e-05, |
|
"loss": 0.6906, |
|
"step": 412500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.570819525996551e-05, |
|
"loss": 0.6882, |
|
"step": 413000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.569089283291946e-05, |
|
"loss": 0.687, |
|
"step": 413500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.567359040587342e-05, |
|
"loss": 0.6896, |
|
"step": 414000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.565628797882737e-05, |
|
"loss": 0.6911, |
|
"step": 414500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.5638985551781325e-05, |
|
"loss": 0.6879, |
|
"step": 415000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.562168312473527e-05, |
|
"loss": 0.6892, |
|
"step": 415500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.560438069768923e-05, |
|
"loss": 0.6889, |
|
"step": 416000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.5587078270643185e-05, |
|
"loss": 0.685, |
|
"step": 416500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.5569775843597135e-05, |
|
"loss": 0.6835, |
|
"step": 417000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.555247341655109e-05, |
|
"loss": 0.6903, |
|
"step": 417500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.553517098950504e-05, |
|
"loss": 0.6837, |
|
"step": 418000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.5517868562458995e-05, |
|
"loss": 0.6899, |
|
"step": 418500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.550056613541295e-05, |
|
"loss": 0.6897, |
|
"step": 419000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.54832637083669e-05, |
|
"loss": 0.6879, |
|
"step": 419500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.5465961281320854e-05, |
|
"loss": 0.6874, |
|
"step": 420000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.5448658854274805e-05, |
|
"loss": 0.6864, |
|
"step": 420500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.543135642722876e-05, |
|
"loss": 0.6883, |
|
"step": 421000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.541405400018272e-05, |
|
"loss": 0.688, |
|
"step": 421500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.5396751573136664e-05, |
|
"loss": 0.683, |
|
"step": 422000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.537944914609062e-05, |
|
"loss": 0.6851, |
|
"step": 422500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.536214671904457e-05, |
|
"loss": 0.6862, |
|
"step": 423000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.534484429199853e-05, |
|
"loss": 0.6868, |
|
"step": 423500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.532754186495249e-05, |
|
"loss": 0.6852, |
|
"step": 424000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.531023943790643e-05, |
|
"loss": 0.6898, |
|
"step": 424500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.529293701086039e-05, |
|
"loss": 0.686, |
|
"step": 425000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.5275634583814346e-05, |
|
"loss": 0.69, |
|
"step": 425500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.52583321567683e-05, |
|
"loss": 0.6898, |
|
"step": 426000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.524102972972225e-05, |
|
"loss": 0.6886, |
|
"step": 426500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.52237273026762e-05, |
|
"loss": 0.6886, |
|
"step": 427000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5206424875630156e-05, |
|
"loss": 0.6862, |
|
"step": 427500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5189122448584114e-05, |
|
"loss": 0.6848, |
|
"step": 428000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5171820021538065e-05, |
|
"loss": 0.6854, |
|
"step": 428500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5154517594492016e-05, |
|
"loss": 0.6862, |
|
"step": 429000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5137215167445966e-05, |
|
"loss": 0.687, |
|
"step": 429500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5119912740399924e-05, |
|
"loss": 0.6928, |
|
"step": 430000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.510261031335388e-05, |
|
"loss": 0.6883, |
|
"step": 430500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5085307886307826e-05, |
|
"loss": 0.6858, |
|
"step": 431000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.506800545926178e-05, |
|
"loss": 0.6884, |
|
"step": 431500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.5050703032215734e-05, |
|
"loss": 0.6864, |
|
"step": 432000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.503340060516969e-05, |
|
"loss": 0.6876, |
|
"step": 432500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.501609817812364e-05, |
|
"loss": 0.6845, |
|
"step": 433000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.499879575107759e-05, |
|
"loss": 0.6896, |
|
"step": 433500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.498149332403155e-05, |
|
"loss": 0.685, |
|
"step": 434000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.49641908969855e-05, |
|
"loss": 0.6892, |
|
"step": 434500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.494688846993946e-05, |
|
"loss": 0.6848, |
|
"step": 435000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.492958604289341e-05, |
|
"loss": 0.686, |
|
"step": 435500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.491228361584736e-05, |
|
"loss": 0.6869, |
|
"step": 436000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.489498118880132e-05, |
|
"loss": 0.6859, |
|
"step": 436500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4877678761755276e-05, |
|
"loss": 0.6869, |
|
"step": 437000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.486037633470922e-05, |
|
"loss": 0.6863, |
|
"step": 437500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.484307390766318e-05, |
|
"loss": 0.6865, |
|
"step": 438000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.482577148061713e-05, |
|
"loss": 0.6875, |
|
"step": 438500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4808469053571086e-05, |
|
"loss": 0.6861, |
|
"step": 439000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.479116662652504e-05, |
|
"loss": 0.6882, |
|
"step": 439500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.477386419947899e-05, |
|
"loss": 0.6852, |
|
"step": 440000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4756561772432945e-05, |
|
"loss": 0.6836, |
|
"step": 440500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4739259345386896e-05, |
|
"loss": 0.6832, |
|
"step": 441000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.472195691834085e-05, |
|
"loss": 0.6867, |
|
"step": 441500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4704654491294804e-05, |
|
"loss": 0.685, |
|
"step": 442000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4687352064248755e-05, |
|
"loss": 0.6858, |
|
"step": 442500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.467004963720271e-05, |
|
"loss": 0.6841, |
|
"step": 443000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.465274721015666e-05, |
|
"loss": 0.6847, |
|
"step": 443500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4635444783110614e-05, |
|
"loss": 0.6858, |
|
"step": 444000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.461814235606457e-05, |
|
"loss": 0.6839, |
|
"step": 444500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.460083992901852e-05, |
|
"loss": 0.684, |
|
"step": 445000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.458353750197248e-05, |
|
"loss": 0.6866, |
|
"step": 445500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.456623507492643e-05, |
|
"loss": 0.6864, |
|
"step": 446000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.454893264788038e-05, |
|
"loss": 0.684, |
|
"step": 446500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.453163022083434e-05, |
|
"loss": 0.6844, |
|
"step": 447000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.451432779378829e-05, |
|
"loss": 0.6871, |
|
"step": 447500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.449702536674225e-05, |
|
"loss": 0.6847, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.44797229396962e-05, |
|
"loss": 0.6845, |
|
"step": 448500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.446242051265015e-05, |
|
"loss": 0.6842, |
|
"step": 449000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.444511808560411e-05, |
|
"loss": 0.6866, |
|
"step": 449500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.442781565855806e-05, |
|
"loss": 0.6845, |
|
"step": 450000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4410513231512015e-05, |
|
"loss": 0.6835, |
|
"step": 450500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4393210804465966e-05, |
|
"loss": 0.6835, |
|
"step": 451000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4375908377419917e-05, |
|
"loss": 0.6858, |
|
"step": 451500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4358605950373874e-05, |
|
"loss": 0.6825, |
|
"step": 452000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4341303523327825e-05, |
|
"loss": 0.6871, |
|
"step": 452500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4324001096281776e-05, |
|
"loss": 0.688, |
|
"step": 453000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.430669866923573e-05, |
|
"loss": 0.6829, |
|
"step": 453500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4289396242189684e-05, |
|
"loss": 0.6863, |
|
"step": 454000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.427209381514364e-05, |
|
"loss": 0.6852, |
|
"step": 454500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.425479138809759e-05, |
|
"loss": 0.6884, |
|
"step": 455000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.423748896105154e-05, |
|
"loss": 0.6869, |
|
"step": 455500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.42201865340055e-05, |
|
"loss": 0.6827, |
|
"step": 456000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.420288410695945e-05, |
|
"loss": 0.6814, |
|
"step": 456500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.418558167991341e-05, |
|
"loss": 0.6826, |
|
"step": 457000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.416827925286736e-05, |
|
"loss": 0.6849, |
|
"step": 457500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.415097682582131e-05, |
|
"loss": 0.6824, |
|
"step": 458000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.413367439877527e-05, |
|
"loss": 0.683, |
|
"step": 458500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.411637197172922e-05, |
|
"loss": 0.682, |
|
"step": 459000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.409906954468317e-05, |
|
"loss": 0.6837, |
|
"step": 459500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.408176711763713e-05, |
|
"loss": 0.6811, |
|
"step": 460000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.406446469059108e-05, |
|
"loss": 0.6821, |
|
"step": 460500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.4047162263545036e-05, |
|
"loss": 0.6816, |
|
"step": 461000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.402985983649899e-05, |
|
"loss": 0.6826, |
|
"step": 461500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.401255740945294e-05, |
|
"loss": 0.6811, |
|
"step": 462000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.3995254982406895e-05, |
|
"loss": 0.6849, |
|
"step": 462500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.3977952555360846e-05, |
|
"loss": 0.6855, |
|
"step": 463000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.3960650128314803e-05, |
|
"loss": 0.6849, |
|
"step": 463500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.3943347701268754e-05, |
|
"loss": 0.6831, |
|
"step": 464000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.3926045274222705e-05, |
|
"loss": 0.6838, |
|
"step": 464500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.390874284717666e-05, |
|
"loss": 0.6798, |
|
"step": 465000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3891440420130613e-05, |
|
"loss": 0.6813, |
|
"step": 465500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3874137993084564e-05, |
|
"loss": 0.6801, |
|
"step": 466000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.385683556603852e-05, |
|
"loss": 0.6828, |
|
"step": 466500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.383953313899247e-05, |
|
"loss": 0.6789, |
|
"step": 467000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.382223071194643e-05, |
|
"loss": 0.68, |
|
"step": 467500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.380492828490038e-05, |
|
"loss": 0.6828, |
|
"step": 468000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.378762585785433e-05, |
|
"loss": 0.6842, |
|
"step": 468500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.377032343080829e-05, |
|
"loss": 0.6823, |
|
"step": 469000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.375302100376224e-05, |
|
"loss": 0.6833, |
|
"step": 469500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.37357185767162e-05, |
|
"loss": 0.684, |
|
"step": 470000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.371841614967015e-05, |
|
"loss": 0.6805, |
|
"step": 470500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.37011137226241e-05, |
|
"loss": 0.6838, |
|
"step": 471000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.368381129557806e-05, |
|
"loss": 0.6798, |
|
"step": 471500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.366650886853201e-05, |
|
"loss": 0.681, |
|
"step": 472000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3649206441485965e-05, |
|
"loss": 0.6826, |
|
"step": 472500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3631904014439916e-05, |
|
"loss": 0.6838, |
|
"step": 473000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.361460158739387e-05, |
|
"loss": 0.6811, |
|
"step": 473500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3597299160347824e-05, |
|
"loss": 0.6824, |
|
"step": 474000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3579996733301775e-05, |
|
"loss": 0.6816, |
|
"step": 474500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3562694306255726e-05, |
|
"loss": 0.6807, |
|
"step": 475000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3545391879209684e-05, |
|
"loss": 0.6834, |
|
"step": 475500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3528089452163634e-05, |
|
"loss": 0.6829, |
|
"step": 476000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.351078702511759e-05, |
|
"loss": 0.6812, |
|
"step": 476500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.349348459807154e-05, |
|
"loss": 0.6841, |
|
"step": 477000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3476182171025494e-05, |
|
"loss": 0.6838, |
|
"step": 477500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.345887974397945e-05, |
|
"loss": 0.681, |
|
"step": 478000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.34415773169334e-05, |
|
"loss": 0.6794, |
|
"step": 478500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.342427488988736e-05, |
|
"loss": 0.6809, |
|
"step": 479000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.340697246284131e-05, |
|
"loss": 0.6832, |
|
"step": 479500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.338967003579526e-05, |
|
"loss": 0.6812, |
|
"step": 480000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.337236760874922e-05, |
|
"loss": 0.683, |
|
"step": 480500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.335506518170317e-05, |
|
"loss": 0.6796, |
|
"step": 481000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.333776275465712e-05, |
|
"loss": 0.6828, |
|
"step": 481500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.332046032761108e-05, |
|
"loss": 0.6783, |
|
"step": 482000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.330315790056503e-05, |
|
"loss": 0.6784, |
|
"step": 482500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.3285855473518986e-05, |
|
"loss": 0.6806, |
|
"step": 483000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.326855304647294e-05, |
|
"loss": 0.6821, |
|
"step": 483500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.325125061942689e-05, |
|
"loss": 0.678, |
|
"step": 484000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3233948192380845e-05, |
|
"loss": 0.6828, |
|
"step": 484500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3216645765334796e-05, |
|
"loss": 0.6796, |
|
"step": 485000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3199343338288754e-05, |
|
"loss": 0.6801, |
|
"step": 485500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.31820409112427e-05, |
|
"loss": 0.6773, |
|
"step": 486000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3164738484196655e-05, |
|
"loss": 0.676, |
|
"step": 486500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.314743605715061e-05, |
|
"loss": 0.676, |
|
"step": 487000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3130133630104564e-05, |
|
"loss": 0.681, |
|
"step": 487500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.311283120305852e-05, |
|
"loss": 0.6802, |
|
"step": 488000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.309552877601247e-05, |
|
"loss": 0.6765, |
|
"step": 488500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.307822634896642e-05, |
|
"loss": 0.6775, |
|
"step": 489000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.306092392192038e-05, |
|
"loss": 0.6824, |
|
"step": 489500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.304362149487433e-05, |
|
"loss": 0.6765, |
|
"step": 490000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.302631906782828e-05, |
|
"loss": 0.6804, |
|
"step": 490500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.300901664078224e-05, |
|
"loss": 0.6786, |
|
"step": 491000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.299171421373619e-05, |
|
"loss": 0.6803, |
|
"step": 491500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.297441178669015e-05, |
|
"loss": 0.6787, |
|
"step": 492000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.295710935964409e-05, |
|
"loss": 0.682, |
|
"step": 492500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.293980693259805e-05, |
|
"loss": 0.6748, |
|
"step": 493000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.292250450555201e-05, |
|
"loss": 0.6849, |
|
"step": 493500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.290520207850596e-05, |
|
"loss": 0.6794, |
|
"step": 494000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2887899651459916e-05, |
|
"loss": 0.6777, |
|
"step": 494500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.287059722441386e-05, |
|
"loss": 0.6768, |
|
"step": 495000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.285329479736782e-05, |
|
"loss": 0.6816, |
|
"step": 495500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2835992370321775e-05, |
|
"loss": 0.6757, |
|
"step": 496000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2818689943275725e-05, |
|
"loss": 0.6771, |
|
"step": 496500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2801387516229676e-05, |
|
"loss": 0.6794, |
|
"step": 497000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.278408508918363e-05, |
|
"loss": 0.6757, |
|
"step": 497500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2766782662137585e-05, |
|
"loss": 0.6783, |
|
"step": 498000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.274948023509154e-05, |
|
"loss": 0.6833, |
|
"step": 498500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.273217780804549e-05, |
|
"loss": 0.6767, |
|
"step": 499000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2714875380999444e-05, |
|
"loss": 0.6809, |
|
"step": 499500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.26975729539534e-05, |
|
"loss": 0.6789, |
|
"step": 500000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.268027052690735e-05, |
|
"loss": 0.6788, |
|
"step": 500500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.266296809986131e-05, |
|
"loss": 0.6774, |
|
"step": 501000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.2645665672815254e-05, |
|
"loss": 0.6786, |
|
"step": 501500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.262836324576921e-05, |
|
"loss": 0.676, |
|
"step": 502000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.261106081872317e-05, |
|
"loss": 0.6776, |
|
"step": 502500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.259375839167712e-05, |
|
"loss": 0.6806, |
|
"step": 503000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.257645596463107e-05, |
|
"loss": 0.6774, |
|
"step": 503500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.255915353758502e-05, |
|
"loss": 0.6787, |
|
"step": 504000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.254185111053898e-05, |
|
"loss": 0.6807, |
|
"step": 504500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2524548683492936e-05, |
|
"loss": 0.6822, |
|
"step": 505000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.250724625644689e-05, |
|
"loss": 0.6801, |
|
"step": 505500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.248994382940084e-05, |
|
"loss": 0.6781, |
|
"step": 506000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.247264140235479e-05, |
|
"loss": 0.6755, |
|
"step": 506500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2455338975308746e-05, |
|
"loss": 0.6797, |
|
"step": 507000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2438036548262704e-05, |
|
"loss": 0.6782, |
|
"step": 507500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.242073412121665e-05, |
|
"loss": 0.6748, |
|
"step": 508000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2403431694170606e-05, |
|
"loss": 0.6828, |
|
"step": 508500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2386129267124556e-05, |
|
"loss": 0.6777, |
|
"step": 509000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2368826840078514e-05, |
|
"loss": 0.6759, |
|
"step": 509500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.235152441303247e-05, |
|
"loss": 0.6751, |
|
"step": 510000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2334221985986416e-05, |
|
"loss": 0.6766, |
|
"step": 510500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.231691955894037e-05, |
|
"loss": 0.6756, |
|
"step": 511000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.229961713189433e-05, |
|
"loss": 0.6813, |
|
"step": 511500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.228231470484828e-05, |
|
"loss": 0.6765, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.226501227780223e-05, |
|
"loss": 0.6756, |
|
"step": 512500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.224770985075618e-05, |
|
"loss": 0.6762, |
|
"step": 513000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.223040742371014e-05, |
|
"loss": 0.674, |
|
"step": 513500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.22131049966641e-05, |
|
"loss": 0.677, |
|
"step": 514000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.219580256961804e-05, |
|
"loss": 0.6753, |
|
"step": 514500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.2178500142572e-05, |
|
"loss": 0.6803, |
|
"step": 515000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.216119771552595e-05, |
|
"loss": 0.6768, |
|
"step": 515500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.214389528847991e-05, |
|
"loss": 0.6785, |
|
"step": 516000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.2126592861433866e-05, |
|
"loss": 0.6769, |
|
"step": 516500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.210929043438781e-05, |
|
"loss": 0.6724, |
|
"step": 517000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.209198800734177e-05, |
|
"loss": 0.6771, |
|
"step": 517500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.207468558029572e-05, |
|
"loss": 0.6779, |
|
"step": 518000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.2057383153249676e-05, |
|
"loss": 0.6746, |
|
"step": 518500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.2040080726203627e-05, |
|
"loss": 0.6778, |
|
"step": 519000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.202277829915758e-05, |
|
"loss": 0.6775, |
|
"step": 519500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.2005475872111535e-05, |
|
"loss": 0.676, |
|
"step": 520000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.1988173445065486e-05, |
|
"loss": 0.6789, |
|
"step": 520500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.197087101801944e-05, |
|
"loss": 0.6768, |
|
"step": 521000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.1953568590973394e-05, |
|
"loss": 0.6766, |
|
"step": 521500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.1936266163927345e-05, |
|
"loss": 0.6744, |
|
"step": 522000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.19189637368813e-05, |
|
"loss": 0.6772, |
|
"step": 522500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.190166130983526e-05, |
|
"loss": 0.6766, |
|
"step": 523000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.1884358882789204e-05, |
|
"loss": 0.6766, |
|
"step": 523500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.186705645574316e-05, |
|
"loss": 0.675, |
|
"step": 524000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.184975402869711e-05, |
|
"loss": 0.6759, |
|
"step": 524500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.183245160165107e-05, |
|
"loss": 0.6753, |
|
"step": 525000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.181514917460502e-05, |
|
"loss": 0.6825, |
|
"step": 525500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.179784674755897e-05, |
|
"loss": 0.6761, |
|
"step": 526000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.178054432051293e-05, |
|
"loss": 0.6761, |
|
"step": 526500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.176324189346688e-05, |
|
"loss": 0.678, |
|
"step": 527000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.174593946642084e-05, |
|
"loss": 0.6751, |
|
"step": 527500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.172863703937479e-05, |
|
"loss": 0.6766, |
|
"step": 528000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.171133461232874e-05, |
|
"loss": 0.678, |
|
"step": 528500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.16940321852827e-05, |
|
"loss": 0.6762, |
|
"step": 529000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.167672975823665e-05, |
|
"loss": 0.6756, |
|
"step": 529500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.16594273311906e-05, |
|
"loss": 0.6774, |
|
"step": 530000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1642124904144556e-05, |
|
"loss": 0.6753, |
|
"step": 530500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.162482247709851e-05, |
|
"loss": 0.6745, |
|
"step": 531000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1607520050052464e-05, |
|
"loss": 0.677, |
|
"step": 531500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1590217623006415e-05, |
|
"loss": 0.677, |
|
"step": 532000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1572915195960366e-05, |
|
"loss": 0.6769, |
|
"step": 532500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1555612768914323e-05, |
|
"loss": 0.6753, |
|
"step": 533000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1538310341868274e-05, |
|
"loss": 0.6764, |
|
"step": 533500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.152100791482223e-05, |
|
"loss": 0.677, |
|
"step": 534000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.150370548777618e-05, |
|
"loss": 0.6775, |
|
"step": 534500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.148640306073013e-05, |
|
"loss": 0.6759, |
|
"step": 535000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.146910063368409e-05, |
|
"loss": 0.6793, |
|
"step": 535500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.145179820663804e-05, |
|
"loss": 0.6747, |
|
"step": 536000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1434495779592e-05, |
|
"loss": 0.6757, |
|
"step": 536500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.141719335254595e-05, |
|
"loss": 0.6767, |
|
"step": 537000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.13998909254999e-05, |
|
"loss": 0.6725, |
|
"step": 537500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.138258849845386e-05, |
|
"loss": 0.6763, |
|
"step": 538000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.136528607140781e-05, |
|
"loss": 0.678, |
|
"step": 538500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.134798364436176e-05, |
|
"loss": 0.6779, |
|
"step": 539000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.133068121731572e-05, |
|
"loss": 0.6758, |
|
"step": 539500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.131337879026967e-05, |
|
"loss": 0.6775, |
|
"step": 540000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1296076363223626e-05, |
|
"loss": 0.673, |
|
"step": 540500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.127877393617758e-05, |
|
"loss": 0.6738, |
|
"step": 541000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.126147150913153e-05, |
|
"loss": 0.6734, |
|
"step": 541500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1244169082085485e-05, |
|
"loss": 0.6748, |
|
"step": 542000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1226866655039436e-05, |
|
"loss": 0.6771, |
|
"step": 542500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1209564227993394e-05, |
|
"loss": 0.6727, |
|
"step": 543000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1192261800947344e-05, |
|
"loss": 0.6752, |
|
"step": 543500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1174959373901295e-05, |
|
"loss": 0.6759, |
|
"step": 544000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.115765694685525e-05, |
|
"loss": 0.6774, |
|
"step": 544500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1140354519809204e-05, |
|
"loss": 0.676, |
|
"step": 545000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.1123052092763154e-05, |
|
"loss": 0.6754, |
|
"step": 545500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.110574966571711e-05, |
|
"loss": 0.6734, |
|
"step": 546000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.108844723867106e-05, |
|
"loss": 0.6748, |
|
"step": 546500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.107114481162502e-05, |
|
"loss": 0.6757, |
|
"step": 547000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.105384238457897e-05, |
|
"loss": 0.6767, |
|
"step": 547500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.103653995753292e-05, |
|
"loss": 0.6741, |
|
"step": 548000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.101923753048688e-05, |
|
"loss": 0.6732, |
|
"step": 548500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.100193510344083e-05, |
|
"loss": 0.6744, |
|
"step": 549000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.098463267639479e-05, |
|
"loss": 0.6791, |
|
"step": 549500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.096733024934874e-05, |
|
"loss": 0.6769, |
|
"step": 550000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.095002782230269e-05, |
|
"loss": 0.6755, |
|
"step": 550500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.093272539525665e-05, |
|
"loss": 0.6746, |
|
"step": 551000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.09154229682106e-05, |
|
"loss": 0.6745, |
|
"step": 551500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.089812054116455e-05, |
|
"loss": 0.6774, |
|
"step": 552000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0880818114118506e-05, |
|
"loss": 0.6768, |
|
"step": 552500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.086351568707246e-05, |
|
"loss": 0.6749, |
|
"step": 553000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0846213260026415e-05, |
|
"loss": 0.6747, |
|
"step": 553500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0828910832980365e-05, |
|
"loss": 0.6756, |
|
"step": 554000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0811608405934316e-05, |
|
"loss": 0.6739, |
|
"step": 554500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0794305978888274e-05, |
|
"loss": 0.6742, |
|
"step": 555000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.0777003551842224e-05, |
|
"loss": 0.6781, |
|
"step": 555500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.075970112479618e-05, |
|
"loss": 0.6733, |
|
"step": 556000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.074239869775013e-05, |
|
"loss": 0.6763, |
|
"step": 556500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0725096270704084e-05, |
|
"loss": 0.6769, |
|
"step": 557000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.070779384365804e-05, |
|
"loss": 0.6755, |
|
"step": 557500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.069049141661199e-05, |
|
"loss": 0.6722, |
|
"step": 558000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.067318898956595e-05, |
|
"loss": 0.6759, |
|
"step": 558500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.06558865625199e-05, |
|
"loss": 0.6716, |
|
"step": 559000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.063858413547385e-05, |
|
"loss": 0.6743, |
|
"step": 559500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.062128170842781e-05, |
|
"loss": 0.675, |
|
"step": 560000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.060397928138176e-05, |
|
"loss": 0.6733, |
|
"step": 560500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.058667685433571e-05, |
|
"loss": 0.6749, |
|
"step": 561000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.056937442728967e-05, |
|
"loss": 0.6767, |
|
"step": 561500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.055207200024362e-05, |
|
"loss": 0.6743, |
|
"step": 562000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0534769573197576e-05, |
|
"loss": 0.675, |
|
"step": 562500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0517467146151524e-05, |
|
"loss": 0.6742, |
|
"step": 563000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0500164719105478e-05, |
|
"loss": 0.6753, |
|
"step": 563500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0482862292059432e-05, |
|
"loss": 0.6729, |
|
"step": 564000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0465559865013386e-05, |
|
"loss": 0.6728, |
|
"step": 564500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0448257437967344e-05, |
|
"loss": 0.6763, |
|
"step": 565000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.043095501092129e-05, |
|
"loss": 0.6706, |
|
"step": 565500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0413652583875245e-05, |
|
"loss": 0.6726, |
|
"step": 566000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.03963501568292e-05, |
|
"loss": 0.6739, |
|
"step": 566500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0379047729783157e-05, |
|
"loss": 0.6711, |
|
"step": 567000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0361745302737105e-05, |
|
"loss": 0.6759, |
|
"step": 567500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.034444287569106e-05, |
|
"loss": 0.6739, |
|
"step": 568000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0327140448645013e-05, |
|
"loss": 0.6722, |
|
"step": 568500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0309838021598967e-05, |
|
"loss": 0.6746, |
|
"step": 569000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0292535594552925e-05, |
|
"loss": 0.6759, |
|
"step": 569500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0275233167506872e-05, |
|
"loss": 0.6742, |
|
"step": 570000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0257930740460826e-05, |
|
"loss": 0.6727, |
|
"step": 570500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.024062831341478e-05, |
|
"loss": 0.6747, |
|
"step": 571000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0223325886368735e-05, |
|
"loss": 0.6721, |
|
"step": 571500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0206023459322685e-05, |
|
"loss": 0.6725, |
|
"step": 572000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.018872103227664e-05, |
|
"loss": 0.6743, |
|
"step": 572500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0171418605230594e-05, |
|
"loss": 0.6743, |
|
"step": 573000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0154116178184548e-05, |
|
"loss": 0.6712, |
|
"step": 573500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.01368137511385e-05, |
|
"loss": 0.6744, |
|
"step": 574000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0119511324092453e-05, |
|
"loss": 0.6753, |
|
"step": 574500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0102208897046407e-05, |
|
"loss": 0.6773, |
|
"step": 575000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.008490647000036e-05, |
|
"loss": 0.6741, |
|
"step": 575500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0067604042954316e-05, |
|
"loss": 0.6728, |
|
"step": 576000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0050301615908266e-05, |
|
"loss": 0.6735, |
|
"step": 576500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.003299918886222e-05, |
|
"loss": 0.6704, |
|
"step": 577000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0015696761816175e-05, |
|
"loss": 0.6722, |
|
"step": 577500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.999839433477013e-05, |
|
"loss": 0.6726, |
|
"step": 578000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.998109190772408e-05, |
|
"loss": 0.6737, |
|
"step": 578500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9963789480678034e-05, |
|
"loss": 0.6704, |
|
"step": 579000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9946487053631988e-05, |
|
"loss": 0.6691, |
|
"step": 579500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9929184626585942e-05, |
|
"loss": 0.6715, |
|
"step": 580000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9911882199539896e-05, |
|
"loss": 0.6752, |
|
"step": 580500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9894579772493847e-05, |
|
"loss": 0.6714, |
|
"step": 581000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.98772773454478e-05, |
|
"loss": 0.6705, |
|
"step": 581500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9859974918401756e-05, |
|
"loss": 0.6708, |
|
"step": 582000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.984267249135571e-05, |
|
"loss": 0.6747, |
|
"step": 582500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.982537006430966e-05, |
|
"loss": 0.6695, |
|
"step": 583000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9808067637263615e-05, |
|
"loss": 0.6729, |
|
"step": 583500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.979076521021757e-05, |
|
"loss": 0.6741, |
|
"step": 584000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9773462783171523e-05, |
|
"loss": 0.6695, |
|
"step": 584500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9756160356125474e-05, |
|
"loss": 0.6723, |
|
"step": 585000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9738857929079428e-05, |
|
"loss": 0.6745, |
|
"step": 585500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9721555502033382e-05, |
|
"loss": 0.6723, |
|
"step": 586000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9704253074987337e-05, |
|
"loss": 0.6718, |
|
"step": 586500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.968695064794129e-05, |
|
"loss": 0.6698, |
|
"step": 587000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.966964822089524e-05, |
|
"loss": 0.6734, |
|
"step": 587500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9652345793849196e-05, |
|
"loss": 0.6706, |
|
"step": 588000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.963504336680315e-05, |
|
"loss": 0.6716, |
|
"step": 588500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9617740939757104e-05, |
|
"loss": 0.6751, |
|
"step": 589000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9600438512711055e-05, |
|
"loss": 0.6726, |
|
"step": 589500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.958313608566501e-05, |
|
"loss": 0.6703, |
|
"step": 590000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9565833658618963e-05, |
|
"loss": 0.6726, |
|
"step": 590500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9548531231572917e-05, |
|
"loss": 0.6756, |
|
"step": 591000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.953122880452687e-05, |
|
"loss": 0.671, |
|
"step": 591500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9513926377480822e-05, |
|
"loss": 0.6724, |
|
"step": 592000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9496623950434777e-05, |
|
"loss": 0.6687, |
|
"step": 592500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.947932152338873e-05, |
|
"loss": 0.6694, |
|
"step": 593000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9462019096342685e-05, |
|
"loss": 0.6742, |
|
"step": 593500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9444716669296636e-05, |
|
"loss": 0.6729, |
|
"step": 594000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.942741424225059e-05, |
|
"loss": 0.6678, |
|
"step": 594500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9410111815204544e-05, |
|
"loss": 0.6731, |
|
"step": 595000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9392809388158498e-05, |
|
"loss": 0.6756, |
|
"step": 595500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.937550696111245e-05, |
|
"loss": 0.671, |
|
"step": 596000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9358204534066403e-05, |
|
"loss": 0.6738, |
|
"step": 596500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9340902107020357e-05, |
|
"loss": 0.6728, |
|
"step": 597000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.932359967997431e-05, |
|
"loss": 0.6713, |
|
"step": 597500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9306297252928266e-05, |
|
"loss": 0.6709, |
|
"step": 598000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9288994825882217e-05, |
|
"loss": 0.6685, |
|
"step": 598500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.927169239883617e-05, |
|
"loss": 0.6708, |
|
"step": 599000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9254389971790125e-05, |
|
"loss": 0.6712, |
|
"step": 599500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.923708754474408e-05, |
|
"loss": 0.6741, |
|
"step": 600000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.921978511769803e-05, |
|
"loss": 0.671, |
|
"step": 600500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9202482690651984e-05, |
|
"loss": 0.669, |
|
"step": 601000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.918518026360594e-05, |
|
"loss": 0.6739, |
|
"step": 601500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9167877836559893e-05, |
|
"loss": 0.6737, |
|
"step": 602000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9150575409513847e-05, |
|
"loss": 0.6731, |
|
"step": 602500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9133272982467798e-05, |
|
"loss": 0.6736, |
|
"step": 603000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.911597055542175e-05, |
|
"loss": 0.6722, |
|
"step": 603500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9098668128375706e-05, |
|
"loss": 0.6697, |
|
"step": 604000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.908136570132966e-05, |
|
"loss": 0.6733, |
|
"step": 604500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.906406327428361e-05, |
|
"loss": 0.6717, |
|
"step": 605000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.9046760847237565e-05, |
|
"loss": 0.6723, |
|
"step": 605500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.902945842019152e-05, |
|
"loss": 0.6717, |
|
"step": 606000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.9012155993145473e-05, |
|
"loss": 0.6708, |
|
"step": 606500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8994853566099428e-05, |
|
"loss": 0.6729, |
|
"step": 607000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.897755113905338e-05, |
|
"loss": 0.6695, |
|
"step": 607500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8960248712007333e-05, |
|
"loss": 0.6697, |
|
"step": 608000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8942946284961287e-05, |
|
"loss": 0.6692, |
|
"step": 608500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.892564385791524e-05, |
|
"loss": 0.6699, |
|
"step": 609000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8908341430869192e-05, |
|
"loss": 0.6711, |
|
"step": 609500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8891039003823146e-05, |
|
"loss": 0.6692, |
|
"step": 610000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.88737365767771e-05, |
|
"loss": 0.6701, |
|
"step": 610500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8856434149731054e-05, |
|
"loss": 0.6695, |
|
"step": 611000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8839131722685002e-05, |
|
"loss": 0.6667, |
|
"step": 611500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.882182929563896e-05, |
|
"loss": 0.671, |
|
"step": 612000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8804526868592913e-05, |
|
"loss": 0.6711, |
|
"step": 612500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8787224441546868e-05, |
|
"loss": 0.6733, |
|
"step": 613000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8769922014500822e-05, |
|
"loss": 0.6711, |
|
"step": 613500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8752619587454773e-05, |
|
"loss": 0.6698, |
|
"step": 614000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8735317160408727e-05, |
|
"loss": 0.6689, |
|
"step": 614500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.871801473336268e-05, |
|
"loss": 0.6709, |
|
"step": 615000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8700712306316635e-05, |
|
"loss": 0.6736, |
|
"step": 615500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8683409879270583e-05, |
|
"loss": 0.6688, |
|
"step": 616000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.866610745222454e-05, |
|
"loss": 0.6725, |
|
"step": 616500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8648805025178494e-05, |
|
"loss": 0.6678, |
|
"step": 617000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.863150259813245e-05, |
|
"loss": 0.6691, |
|
"step": 617500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8614200171086403e-05, |
|
"loss": 0.6712, |
|
"step": 618000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.859689774404035e-05, |
|
"loss": 0.6697, |
|
"step": 618500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8579595316994308e-05, |
|
"loss": 0.6696, |
|
"step": 619000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8562292889948262e-05, |
|
"loss": 0.6706, |
|
"step": 619500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8544990462902216e-05, |
|
"loss": 0.6697, |
|
"step": 620000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8527688035856163e-05, |
|
"loss": 0.6713, |
|
"step": 620500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.851038560881012e-05, |
|
"loss": 0.669, |
|
"step": 621000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8493083181764075e-05, |
|
"loss": 0.6664, |
|
"step": 621500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.847578075471803e-05, |
|
"loss": 0.6703, |
|
"step": 622000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8458478327671977e-05, |
|
"loss": 0.6706, |
|
"step": 622500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.844117590062593e-05, |
|
"loss": 0.6714, |
|
"step": 623000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.842387347357989e-05, |
|
"loss": 0.6744, |
|
"step": 623500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8406571046533843e-05, |
|
"loss": 0.6678, |
|
"step": 624000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8389268619487797e-05, |
|
"loss": 0.6706, |
|
"step": 624500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8371966192441744e-05, |
|
"loss": 0.669, |
|
"step": 625000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8354663765395702e-05, |
|
"loss": 0.6683, |
|
"step": 625500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8337361338349656e-05, |
|
"loss": 0.6704, |
|
"step": 626000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.832005891130361e-05, |
|
"loss": 0.6676, |
|
"step": 626500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8302756484257558e-05, |
|
"loss": 0.6738, |
|
"step": 627000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8285454057211512e-05, |
|
"loss": 0.6685, |
|
"step": 627500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.826815163016547e-05, |
|
"loss": 0.6716, |
|
"step": 628000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8250849203119424e-05, |
|
"loss": 0.6706, |
|
"step": 628500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8233546776073378e-05, |
|
"loss": 0.6694, |
|
"step": 629000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8216244349027325e-05, |
|
"loss": 0.6694, |
|
"step": 629500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8198941921981283e-05, |
|
"loss": 0.6707, |
|
"step": 630000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8181639494935237e-05, |
|
"loss": 0.6693, |
|
"step": 630500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.816433706788919e-05, |
|
"loss": 0.6702, |
|
"step": 631000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.814703464084314e-05, |
|
"loss": 0.6695, |
|
"step": 631500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8129732213797093e-05, |
|
"loss": 0.6705, |
|
"step": 632000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.811242978675105e-05, |
|
"loss": 0.6704, |
|
"step": 632500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8095127359705005e-05, |
|
"loss": 0.6676, |
|
"step": 633000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8077824932658952e-05, |
|
"loss": 0.6676, |
|
"step": 633500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8060522505612906e-05, |
|
"loss": 0.6684, |
|
"step": 634000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.804322007856686e-05, |
|
"loss": 0.6723, |
|
"step": 634500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8025917651520818e-05, |
|
"loss": 0.6689, |
|
"step": 635000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8008615224474772e-05, |
|
"loss": 0.6693, |
|
"step": 635500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.799131279742872e-05, |
|
"loss": 0.6708, |
|
"step": 636000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7974010370382674e-05, |
|
"loss": 0.6688, |
|
"step": 636500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.795670794333663e-05, |
|
"loss": 0.6645, |
|
"step": 637000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7939405516290585e-05, |
|
"loss": 0.6724, |
|
"step": 637500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7922103089244533e-05, |
|
"loss": 0.6693, |
|
"step": 638000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7904800662198487e-05, |
|
"loss": 0.6676, |
|
"step": 638500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.788749823515244e-05, |
|
"loss": 0.6684, |
|
"step": 639000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.78701958081064e-05, |
|
"loss": 0.6688, |
|
"step": 639500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7852893381060353e-05, |
|
"loss": 0.6695, |
|
"step": 640000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.78355909540143e-05, |
|
"loss": 0.6698, |
|
"step": 640500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7818288526968255e-05, |
|
"loss": 0.6697, |
|
"step": 641000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7800986099922212e-05, |
|
"loss": 0.67, |
|
"step": 641500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7783683672876166e-05, |
|
"loss": 0.6692, |
|
"step": 642000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7766381245830114e-05, |
|
"loss": 0.6666, |
|
"step": 642500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7749078818784068e-05, |
|
"loss": 0.6676, |
|
"step": 643000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7731776391738022e-05, |
|
"loss": 0.6663, |
|
"step": 643500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.771447396469198e-05, |
|
"loss": 0.6679, |
|
"step": 644000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7697171537645927e-05, |
|
"loss": 0.6683, |
|
"step": 644500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.767986911059988e-05, |
|
"loss": 0.6692, |
|
"step": 645000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7662566683553835e-05, |
|
"loss": 0.6701, |
|
"step": 645500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.764526425650779e-05, |
|
"loss": 0.6681, |
|
"step": 646000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7627961829461747e-05, |
|
"loss": 0.6703, |
|
"step": 646500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7610659402415695e-05, |
|
"loss": 0.6687, |
|
"step": 647000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.759335697536965e-05, |
|
"loss": 0.6677, |
|
"step": 647500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7576054548323603e-05, |
|
"loss": 0.6666, |
|
"step": 648000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.755875212127756e-05, |
|
"loss": 0.6666, |
|
"step": 648500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7541449694231508e-05, |
|
"loss": 0.6692, |
|
"step": 649000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7524147267185462e-05, |
|
"loss": 0.6687, |
|
"step": 649500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7506844840139416e-05, |
|
"loss": 0.6703, |
|
"step": 650000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.748954241309337e-05, |
|
"loss": 0.6716, |
|
"step": 650500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7472239986047328e-05, |
|
"loss": 0.6694, |
|
"step": 651000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7454937559001276e-05, |
|
"loss": 0.6703, |
|
"step": 651500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.743763513195523e-05, |
|
"loss": 0.6683, |
|
"step": 652000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7420332704909184e-05, |
|
"loss": 0.6657, |
|
"step": 652500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.740303027786314e-05, |
|
"loss": 0.6673, |
|
"step": 653000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.738572785081709e-05, |
|
"loss": 0.666, |
|
"step": 653500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7368425423771043e-05, |
|
"loss": 0.6688, |
|
"step": 654000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7351122996724997e-05, |
|
"loss": 0.6688, |
|
"step": 654500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.733382056967895e-05, |
|
"loss": 0.6676, |
|
"step": 655000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7316518142632902e-05, |
|
"loss": 0.67, |
|
"step": 655500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7299215715586856e-05, |
|
"loss": 0.6707, |
|
"step": 656000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.728191328854081e-05, |
|
"loss": 0.6689, |
|
"step": 656500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7264610861494765e-05, |
|
"loss": 0.6674, |
|
"step": 657000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.724730843444872e-05, |
|
"loss": 0.6671, |
|
"step": 657500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.723000600740267e-05, |
|
"loss": 0.6707, |
|
"step": 658000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7212703580356624e-05, |
|
"loss": 0.6682, |
|
"step": 658500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7195401153310578e-05, |
|
"loss": 0.6634, |
|
"step": 659000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7178098726264532e-05, |
|
"loss": 0.668, |
|
"step": 659500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7160796299218483e-05, |
|
"loss": 0.6663, |
|
"step": 660000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7143493872172437e-05, |
|
"loss": 0.6673, |
|
"step": 660500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.712619144512639e-05, |
|
"loss": 0.668, |
|
"step": 661000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7108889018080346e-05, |
|
"loss": 0.6675, |
|
"step": 661500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.70915865910343e-05, |
|
"loss": 0.669, |
|
"step": 662000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.707428416398825e-05, |
|
"loss": 0.665, |
|
"step": 662500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.7056981736942205e-05, |
|
"loss": 0.6649, |
|
"step": 663000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.703967930989616e-05, |
|
"loss": 0.6661, |
|
"step": 663500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.7022376882850113e-05, |
|
"loss": 0.668, |
|
"step": 664000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.7005074455804064e-05, |
|
"loss": 0.6703, |
|
"step": 664500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.6987772028758018e-05, |
|
"loss": 0.6681, |
|
"step": 665000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.6970469601711972e-05, |
|
"loss": 0.6698, |
|
"step": 665500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.6953167174665927e-05, |
|
"loss": 0.669, |
|
"step": 666000 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.6935864747619877e-05, |
|
"loss": 0.6676, |
|
"step": 666500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.691856232057383e-05, |
|
"loss": 0.6683, |
|
"step": 667000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6901259893527786e-05, |
|
"loss": 0.6678, |
|
"step": 667500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.688395746648174e-05, |
|
"loss": 0.666, |
|
"step": 668000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6866655039435694e-05, |
|
"loss": 0.6662, |
|
"step": 668500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6849352612389645e-05, |
|
"loss": 0.6681, |
|
"step": 669000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.68320501853436e-05, |
|
"loss": 0.6633, |
|
"step": 669500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6814747758297553e-05, |
|
"loss": 0.6659, |
|
"step": 670000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6797445331251507e-05, |
|
"loss": 0.6671, |
|
"step": 670500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6780142904205458e-05, |
|
"loss": 0.6668, |
|
"step": 671000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6762840477159412e-05, |
|
"loss": 0.6688, |
|
"step": 671500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6745538050113367e-05, |
|
"loss": 0.6635, |
|
"step": 672000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.672823562306732e-05, |
|
"loss": 0.666, |
|
"step": 672500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6710933196021275e-05, |
|
"loss": 0.6654, |
|
"step": 673000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6693630768975226e-05, |
|
"loss": 0.6655, |
|
"step": 673500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.667632834192918e-05, |
|
"loss": 0.6675, |
|
"step": 674000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6659025914883134e-05, |
|
"loss": 0.6703, |
|
"step": 674500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.664172348783709e-05, |
|
"loss": 0.6675, |
|
"step": 675000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.662442106079104e-05, |
|
"loss": 0.6666, |
|
"step": 675500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6607118633744993e-05, |
|
"loss": 0.6653, |
|
"step": 676000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6589816206698948e-05, |
|
"loss": 0.6669, |
|
"step": 676500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6572513779652902e-05, |
|
"loss": 0.663, |
|
"step": 677000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6555211352606856e-05, |
|
"loss": 0.6671, |
|
"step": 677500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6537908925560807e-05, |
|
"loss": 0.6677, |
|
"step": 678000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.652060649851476e-05, |
|
"loss": 0.6676, |
|
"step": 678500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6503304071468715e-05, |
|
"loss": 0.666, |
|
"step": 679000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.648600164442267e-05, |
|
"loss": 0.6649, |
|
"step": 679500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.646869921737662e-05, |
|
"loss": 0.6663, |
|
"step": 680000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6451396790330574e-05, |
|
"loss": 0.6657, |
|
"step": 680500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.643409436328453e-05, |
|
"loss": 0.6678, |
|
"step": 681000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6416791936238483e-05, |
|
"loss": 0.6685, |
|
"step": 681500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6399489509192433e-05, |
|
"loss": 0.6645, |
|
"step": 682000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6382187082146388e-05, |
|
"loss": 0.6694, |
|
"step": 682500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6364884655100342e-05, |
|
"loss": 0.6635, |
|
"step": 683000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6347582228054296e-05, |
|
"loss": 0.6664, |
|
"step": 683500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.633027980100825e-05, |
|
"loss": 0.6685, |
|
"step": 684000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.63129773739622e-05, |
|
"loss": 0.6666, |
|
"step": 684500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6295674946916155e-05, |
|
"loss": 0.6664, |
|
"step": 685000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.627837251987011e-05, |
|
"loss": 0.6692, |
|
"step": 685500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6261070092824063e-05, |
|
"loss": 0.6666, |
|
"step": 686000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6243767665778014e-05, |
|
"loss": 0.6659, |
|
"step": 686500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.622646523873197e-05, |
|
"loss": 0.6655, |
|
"step": 687000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6209162811685923e-05, |
|
"loss": 0.6686, |
|
"step": 687500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6191860384639877e-05, |
|
"loss": 0.6666, |
|
"step": 688000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.617455795759383e-05, |
|
"loss": 0.6678, |
|
"step": 688500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6157255530547782e-05, |
|
"loss": 0.664, |
|
"step": 689000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6139953103501736e-05, |
|
"loss": 0.6666, |
|
"step": 689500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.612265067645569e-05, |
|
"loss": 0.6665, |
|
"step": 690000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6105348249409644e-05, |
|
"loss": 0.6639, |
|
"step": 690500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6088045822363595e-05, |
|
"loss": 0.6666, |
|
"step": 691000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.607074339531755e-05, |
|
"loss": 0.6668, |
|
"step": 691500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6053440968271504e-05, |
|
"loss": 0.6672, |
|
"step": 692000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6036138541225458e-05, |
|
"loss": 0.6676, |
|
"step": 692500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6018836114179405e-05, |
|
"loss": 0.6652, |
|
"step": 693000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6001533687133363e-05, |
|
"loss": 0.6656, |
|
"step": 693500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.5984231260087317e-05, |
|
"loss": 0.6672, |
|
"step": 694000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.596692883304127e-05, |
|
"loss": 0.6662, |
|
"step": 694500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.5949626405995225e-05, |
|
"loss": 0.6678, |
|
"step": 695000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.5932323978949176e-05, |
|
"loss": 0.6641, |
|
"step": 695500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.591502155190313e-05, |
|
"loss": 0.6655, |
|
"step": 696000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5897719124857084e-05, |
|
"loss": 0.6616, |
|
"step": 696500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.588041669781104e-05, |
|
"loss": 0.669, |
|
"step": 697000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5863114270764986e-05, |
|
"loss": 0.6655, |
|
"step": 697500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5845811843718944e-05, |
|
"loss": 0.6651, |
|
"step": 698000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5828509416672898e-05, |
|
"loss": 0.6644, |
|
"step": 698500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5811206989626852e-05, |
|
"loss": 0.6666, |
|
"step": 699000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5793904562580806e-05, |
|
"loss": 0.6675, |
|
"step": 699500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5776602135534757e-05, |
|
"loss": 0.6653, |
|
"step": 700000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.575929970848871e-05, |
|
"loss": 0.6676, |
|
"step": 700500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5741997281442665e-05, |
|
"loss": 0.6657, |
|
"step": 701000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.572469485439662e-05, |
|
"loss": 0.6643, |
|
"step": 701500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5707392427350567e-05, |
|
"loss": 0.6646, |
|
"step": 702000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5690090000304524e-05, |
|
"loss": 0.6668, |
|
"step": 702500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.567278757325848e-05, |
|
"loss": 0.6649, |
|
"step": 703000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5655485146212433e-05, |
|
"loss": 0.6683, |
|
"step": 703500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.563818271916638e-05, |
|
"loss": 0.6649, |
|
"step": 704000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5620880292120334e-05, |
|
"loss": 0.6634, |
|
"step": 704500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5603577865074292e-05, |
|
"loss": 0.6651, |
|
"step": 705000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5586275438028246e-05, |
|
"loss": 0.6652, |
|
"step": 705500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.55689730109822e-05, |
|
"loss": 0.6681, |
|
"step": 706000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5551670583936148e-05, |
|
"loss": 0.6656, |
|
"step": 706500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5534368156890105e-05, |
|
"loss": 0.6674, |
|
"step": 707000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.551706572984406e-05, |
|
"loss": 0.666, |
|
"step": 707500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5499763302798014e-05, |
|
"loss": 0.6658, |
|
"step": 708000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.548246087575196e-05, |
|
"loss": 0.6636, |
|
"step": 708500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5465158448705915e-05, |
|
"loss": 0.6637, |
|
"step": 709000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5447856021659873e-05, |
|
"loss": 0.6679, |
|
"step": 709500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5430553594613827e-05, |
|
"loss": 0.6648, |
|
"step": 710000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.541325116756778e-05, |
|
"loss": 0.6635, |
|
"step": 710500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.539594874052173e-05, |
|
"loss": 0.6647, |
|
"step": 711000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5378646313475686e-05, |
|
"loss": 0.6641, |
|
"step": 711500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.536134388642964e-05, |
|
"loss": 0.6659, |
|
"step": 712000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5344041459383595e-05, |
|
"loss": 0.666, |
|
"step": 712500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5326739032337542e-05, |
|
"loss": 0.6667, |
|
"step": 713000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5309436605291496e-05, |
|
"loss": 0.6675, |
|
"step": 713500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5292134178245454e-05, |
|
"loss": 0.6668, |
|
"step": 714000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5274831751199408e-05, |
|
"loss": 0.6658, |
|
"step": 714500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5257529324153355e-05, |
|
"loss": 0.6658, |
|
"step": 715000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.524022689710731e-05, |
|
"loss": 0.6637, |
|
"step": 715500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5222924470061267e-05, |
|
"loss": 0.6621, |
|
"step": 716000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.520562204301522e-05, |
|
"loss": 0.6656, |
|
"step": 716500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5188319615969176e-05, |
|
"loss": 0.6604, |
|
"step": 717000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5171017188923123e-05, |
|
"loss": 0.6641, |
|
"step": 717500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5153714761877077e-05, |
|
"loss": 0.6653, |
|
"step": 718000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5136412334831035e-05, |
|
"loss": 0.6614, |
|
"step": 718500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.511910990778499e-05, |
|
"loss": 0.6643, |
|
"step": 719000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5101807480738936e-05, |
|
"loss": 0.6644, |
|
"step": 719500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.508450505369289e-05, |
|
"loss": 0.6678, |
|
"step": 720000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5067202626646845e-05, |
|
"loss": 0.6655, |
|
"step": 720500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5049900199600802e-05, |
|
"loss": 0.6613, |
|
"step": 721000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5032597772554756e-05, |
|
"loss": 0.664, |
|
"step": 721500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5015295345508704e-05, |
|
"loss": 0.6645, |
|
"step": 722000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4997992918462658e-05, |
|
"loss": 0.6618, |
|
"step": 722500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4980690491416616e-05, |
|
"loss": 0.6672, |
|
"step": 723000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4963388064370566e-05, |
|
"loss": 0.6673, |
|
"step": 723500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.494608563732452e-05, |
|
"loss": 0.667, |
|
"step": 724000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.492878321027847e-05, |
|
"loss": 0.6616, |
|
"step": 724500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4911480783232426e-05, |
|
"loss": 0.6639, |
|
"step": 725000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.489417835618638e-05, |
|
"loss": 0.6665, |
|
"step": 725500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4876875929140334e-05, |
|
"loss": 0.6634, |
|
"step": 726000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4859573502094288e-05, |
|
"loss": 0.6666, |
|
"step": 726500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.484227107504824e-05, |
|
"loss": 0.6633, |
|
"step": 727000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4824968648002196e-05, |
|
"loss": 0.6636, |
|
"step": 727500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4807666220956147e-05, |
|
"loss": 0.6653, |
|
"step": 728000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.47903637939101e-05, |
|
"loss": 0.6638, |
|
"step": 728500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4773061366864052e-05, |
|
"loss": 0.6662, |
|
"step": 729000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4755758939818006e-05, |
|
"loss": 0.6648, |
|
"step": 729500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.473845651277196e-05, |
|
"loss": 0.6651, |
|
"step": 730000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4721154085725915e-05, |
|
"loss": 0.6649, |
|
"step": 730500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4703851658679866e-05, |
|
"loss": 0.6626, |
|
"step": 731000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.468654923163382e-05, |
|
"loss": 0.6608, |
|
"step": 731500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4669246804587774e-05, |
|
"loss": 0.6616, |
|
"step": 732000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4651944377541728e-05, |
|
"loss": 0.6607, |
|
"step": 732500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4634641950495682e-05, |
|
"loss": 0.6615, |
|
"step": 733000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4617339523449633e-05, |
|
"loss": 0.6642, |
|
"step": 733500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4600037096403587e-05, |
|
"loss": 0.6612, |
|
"step": 734000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.458273466935754e-05, |
|
"loss": 0.6625, |
|
"step": 734500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4565432242311496e-05, |
|
"loss": 0.6619, |
|
"step": 735000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4548129815265446e-05, |
|
"loss": 0.6632, |
|
"step": 735500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.45308273882194e-05, |
|
"loss": 0.6647, |
|
"step": 736000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4513524961173355e-05, |
|
"loss": 0.6672, |
|
"step": 736500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.449622253412731e-05, |
|
"loss": 0.663, |
|
"step": 737000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4478920107081263e-05, |
|
"loss": 0.6659, |
|
"step": 737500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4461617680035214e-05, |
|
"loss": 0.663, |
|
"step": 738000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4444315252989168e-05, |
|
"loss": 0.6674, |
|
"step": 738500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4427012825943122e-05, |
|
"loss": 0.6651, |
|
"step": 739000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4409710398897077e-05, |
|
"loss": 0.662, |
|
"step": 739500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4392407971851027e-05, |
|
"loss": 0.6635, |
|
"step": 740000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.437510554480498e-05, |
|
"loss": 0.6647, |
|
"step": 740500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4357803117758936e-05, |
|
"loss": 0.6635, |
|
"step": 741000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.434050069071289e-05, |
|
"loss": 0.6634, |
|
"step": 741500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.432319826366684e-05, |
|
"loss": 0.6627, |
|
"step": 742000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4305895836620795e-05, |
|
"loss": 0.6632, |
|
"step": 742500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.428859340957475e-05, |
|
"loss": 0.6622, |
|
"step": 743000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4271290982528703e-05, |
|
"loss": 0.665, |
|
"step": 743500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4253988555482657e-05, |
|
"loss": 0.6626, |
|
"step": 744000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4236686128436608e-05, |
|
"loss": 0.6632, |
|
"step": 744500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4219383701390562e-05, |
|
"loss": 0.6652, |
|
"step": 745000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4202081274344517e-05, |
|
"loss": 0.6609, |
|
"step": 745500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.418477884729847e-05, |
|
"loss": 0.6665, |
|
"step": 746000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.416747642025242e-05, |
|
"loss": 0.6638, |
|
"step": 746500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4150173993206376e-05, |
|
"loss": 0.6657, |
|
"step": 747000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.413287156616033e-05, |
|
"loss": 0.6614, |
|
"step": 747500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4115569139114284e-05, |
|
"loss": 0.6635, |
|
"step": 748000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.409826671206824e-05, |
|
"loss": 0.6637, |
|
"step": 748500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.408096428502219e-05, |
|
"loss": 0.6648, |
|
"step": 749000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4063661857976143e-05, |
|
"loss": 0.6601, |
|
"step": 749500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4046359430930098e-05, |
|
"loss": 0.6636, |
|
"step": 750000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4029057003884052e-05, |
|
"loss": 0.6637, |
|
"step": 750500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4011754576838003e-05, |
|
"loss": 0.6631, |
|
"step": 751000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3994452149791957e-05, |
|
"loss": 0.6647, |
|
"step": 751500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3977149722745907e-05, |
|
"loss": 0.6634, |
|
"step": 752000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3959847295699865e-05, |
|
"loss": 0.6644, |
|
"step": 752500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3942544868653816e-05, |
|
"loss": 0.6613, |
|
"step": 753000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.392524244160777e-05, |
|
"loss": 0.6618, |
|
"step": 753500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3907940014561724e-05, |
|
"loss": 0.6634, |
|
"step": 754000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.389063758751568e-05, |
|
"loss": 0.6586, |
|
"step": 754500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3873335160469633e-05, |
|
"loss": 0.6655, |
|
"step": 755000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3856032733423583e-05, |
|
"loss": 0.6656, |
|
"step": 755500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3838730306377538e-05, |
|
"loss": 0.6636, |
|
"step": 756000 |
|
} |
|
], |
|
"max_steps": 1444884, |
|
"num_train_epochs": 3, |
|
"total_flos": 6.321173702639616e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|