|
{ |
|
"best_metric": 2.1569273471832275, |
|
"best_model_checkpoint": "/netscratch/gutsche/data/LOLTwitchBertTraining_da66d5ea-8560-4a50-b7a3-fb9e339ee1af/TwitchLeagueBert/checkpoint-500000", |
|
"epoch": 7.430303750817333, |
|
"global_step": 500000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.998142424062296e-05, |
|
"loss": 8.231, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9962848481245916e-05, |
|
"loss": 7.4239, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.994427272186887e-05, |
|
"loss": 7.1438, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.992569696249183e-05, |
|
"loss": 6.9853, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.990712120311479e-05, |
|
"loss": 6.8714, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9888545443737744e-05, |
|
"loss": 6.7786, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.98699696843607e-05, |
|
"loss": 6.7338, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.985139392498366e-05, |
|
"loss": 6.6649, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9832818165606614e-05, |
|
"loss": 6.6232, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9814242406229564e-05, |
|
"loss": 6.5875, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.979566664685252e-05, |
|
"loss": 6.5195, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.977709088747548e-05, |
|
"loss": 6.4647, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9758515128098435e-05, |
|
"loss": 6.2629, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.973993936872139e-05, |
|
"loss": 6.0753, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.972136360934435e-05, |
|
"loss": 5.9132, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9702787849967306e-05, |
|
"loss": 5.7549, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.968421209059026e-05, |
|
"loss": 5.5722, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.966563633121322e-05, |
|
"loss": 5.3825, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9647060571836176e-05, |
|
"loss": 5.2093, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.962848481245913e-05, |
|
"loss": 5.065, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.960990905308209e-05, |
|
"loss": 4.9503, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.959133329370505e-05, |
|
"loss": 4.8124, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.957275753432801e-05, |
|
"loss": 4.6793, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.955418177495097e-05, |
|
"loss": 4.5688, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9535606015573924e-05, |
|
"loss": 4.441, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.951703025619688e-05, |
|
"loss": 4.2956, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.949845449681984e-05, |
|
"loss": 4.221, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.947987873744279e-05, |
|
"loss": 4.129, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.9461302978065745e-05, |
|
"loss": 4.0381, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.94427272186887e-05, |
|
"loss": 3.9632, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.942415145931166e-05, |
|
"loss": 3.9009, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.9405575699934615e-05, |
|
"loss": 3.8547, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.938699994055757e-05, |
|
"loss": 3.787, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.936842418118053e-05, |
|
"loss": 3.7538, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.9349848421803486e-05, |
|
"loss": 3.71, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.933127266242644e-05, |
|
"loss": 3.6738, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.93126969030494e-05, |
|
"loss": 3.6138, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.929412114367236e-05, |
|
"loss": 3.5978, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.9275545384295314e-05, |
|
"loss": 3.5557, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.925696962491827e-05, |
|
"loss": 3.5383, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_loss": 3.430026054382324, |
|
"eval_runtime": 1172.1185, |
|
"eval_samples_per_second": 408.251, |
|
"eval_steps_per_second": 51.032, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.923839386554123e-05, |
|
"loss": 3.5163, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.9219818106164184e-05, |
|
"loss": 3.4927, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.920124234678714e-05, |
|
"loss": 3.4945, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.91826665874101e-05, |
|
"loss": 3.4371, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.9164090828033055e-05, |
|
"loss": 3.4193, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.914551506865601e-05, |
|
"loss": 3.4072, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.912693930927896e-05, |
|
"loss": 3.3675, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.910836354990192e-05, |
|
"loss": 3.3547, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.9089787790524875e-05, |
|
"loss": 3.3442, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.907121203114783e-05, |
|
"loss": 3.3343, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.905263627177079e-05, |
|
"loss": 3.3113, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.9034060512393746e-05, |
|
"loss": 3.2771, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.90154847530167e-05, |
|
"loss": 3.273, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.899690899363966e-05, |
|
"loss": 3.2727, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.897833323426262e-05, |
|
"loss": 3.2305, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.8959757474885574e-05, |
|
"loss": 3.2263, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.894118171550853e-05, |
|
"loss": 3.2133, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.892260595613149e-05, |
|
"loss": 3.2353, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.8904030196754444e-05, |
|
"loss": 3.1918, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.88854544373774e-05, |
|
"loss": 3.1688, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.886687867800036e-05, |
|
"loss": 3.1584, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.8848302918623315e-05, |
|
"loss": 3.1597, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.882972715924627e-05, |
|
"loss": 3.1416, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.881115139986923e-05, |
|
"loss": 3.1551, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.8792575640492185e-05, |
|
"loss": 3.1232, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.877399988111514e-05, |
|
"loss": 3.1159, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.87554241217381e-05, |
|
"loss": 3.0914, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.8736848362361056e-05, |
|
"loss": 3.0866, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.871827260298401e-05, |
|
"loss": 3.0784, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.869969684360697e-05, |
|
"loss": 3.0815, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.8681121084229927e-05, |
|
"loss": 3.0667, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.8662545324852883e-05, |
|
"loss": 3.0564, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.864396956547584e-05, |
|
"loss": 3.0549, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.86253938060988e-05, |
|
"loss": 3.0333, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.8606818046721754e-05, |
|
"loss": 3.0262, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.858824228734471e-05, |
|
"loss": 3.037, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.856966652796767e-05, |
|
"loss": 3.0241, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.8551090768590625e-05, |
|
"loss": 3.0134, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.853251500921358e-05, |
|
"loss": 3.0102, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.851393924983654e-05, |
|
"loss": 2.9842, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"eval_loss": 2.925679922103882, |
|
"eval_runtime": 1173.0278, |
|
"eval_samples_per_second": 407.935, |
|
"eval_steps_per_second": 50.992, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.8495363490459495e-05, |
|
"loss": 2.9908, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.847678773108245e-05, |
|
"loss": 2.9797, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.845821197170541e-05, |
|
"loss": 2.981, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.843963621232836e-05, |
|
"loss": 2.9777, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.8421060452951316e-05, |
|
"loss": 2.9611, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.840248469357427e-05, |
|
"loss": 2.9599, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.838390893419723e-05, |
|
"loss": 2.9528, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.8365333174820187e-05, |
|
"loss": 2.948, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.8346757415443143e-05, |
|
"loss": 2.946, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.83281816560661e-05, |
|
"loss": 2.949, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.830960589668906e-05, |
|
"loss": 2.9323, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.8291030137312014e-05, |
|
"loss": 2.9262, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.827245437793497e-05, |
|
"loss": 2.9044, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.825387861855793e-05, |
|
"loss": 2.9031, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.8235302859180885e-05, |
|
"loss": 2.9183, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.821672709980384e-05, |
|
"loss": 2.8872, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.81981513404268e-05, |
|
"loss": 2.8891, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.8179575581049755e-05, |
|
"loss": 2.8871, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.816099982167271e-05, |
|
"loss": 2.8634, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.814242406229567e-05, |
|
"loss": 2.9043, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.8123848302918626e-05, |
|
"loss": 2.8807, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.810527254354158e-05, |
|
"loss": 2.8617, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.808669678416454e-05, |
|
"loss": 2.8533, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.8068121024787496e-05, |
|
"loss": 2.8709, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.804954526541045e-05, |
|
"loss": 2.8524, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.803096950603341e-05, |
|
"loss": 2.8494, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.801239374665637e-05, |
|
"loss": 2.847, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.7993817987279324e-05, |
|
"loss": 2.8241, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.797524222790228e-05, |
|
"loss": 2.8502, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.795666646852524e-05, |
|
"loss": 2.8225, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.7938090709148195e-05, |
|
"loss": 2.8416, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.791951494977115e-05, |
|
"loss": 2.7997, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.790093919039411e-05, |
|
"loss": 2.8175, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.7882363431017065e-05, |
|
"loss": 2.8017, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.786378767164002e-05, |
|
"loss": 2.8122, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.784521191226298e-05, |
|
"loss": 2.7994, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.7826636152885936e-05, |
|
"loss": 2.7929, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.780806039350889e-05, |
|
"loss": 2.7802, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.778948463413185e-05, |
|
"loss": 2.7921, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.7770908874754806e-05, |
|
"loss": 2.7801, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"eval_loss": 2.7286553382873535, |
|
"eval_runtime": 1174.8336, |
|
"eval_samples_per_second": 407.308, |
|
"eval_steps_per_second": 50.914, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.7752333115377756e-05, |
|
"loss": 2.7903, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.773375735600071e-05, |
|
"loss": 2.783, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.771518159662367e-05, |
|
"loss": 2.7792, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.769660583724663e-05, |
|
"loss": 2.7564, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.7678030077869584e-05, |
|
"loss": 2.7652, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.765945431849254e-05, |
|
"loss": 2.777, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.76408785591155e-05, |
|
"loss": 2.7674, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.7622302799738455e-05, |
|
"loss": 2.7754, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.760372704036141e-05, |
|
"loss": 2.7536, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.758515128098437e-05, |
|
"loss": 2.7451, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.7566575521607325e-05, |
|
"loss": 2.7382, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.754799976223028e-05, |
|
"loss": 2.7419, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.752942400285324e-05, |
|
"loss": 2.7564, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.7510848243476196e-05, |
|
"loss": 2.7373, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.749227248409915e-05, |
|
"loss": 2.7353, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.747369672472211e-05, |
|
"loss": 2.7355, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7455120965345066e-05, |
|
"loss": 2.725, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.743654520596802e-05, |
|
"loss": 2.7095, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.741796944659097e-05, |
|
"loss": 2.7259, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.739939368721393e-05, |
|
"loss": 2.7142, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.738081792783689e-05, |
|
"loss": 2.7066, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.7362242168459844e-05, |
|
"loss": 2.7199, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.73436664090828e-05, |
|
"loss": 2.7113, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.7325090649705764e-05, |
|
"loss": 2.7058, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.730651489032872e-05, |
|
"loss": 2.709, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.728793913095168e-05, |
|
"loss": 2.6836, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.7269363371574635e-05, |
|
"loss": 2.6948, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.725078761219759e-05, |
|
"loss": 2.694, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.723221185282055e-05, |
|
"loss": 2.6896, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.7213636093443506e-05, |
|
"loss": 2.7087, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.719506033406646e-05, |
|
"loss": 2.6669, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.717648457468942e-05, |
|
"loss": 2.6623, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.7157908815312376e-05, |
|
"loss": 2.6854, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.713933305593533e-05, |
|
"loss": 2.6837, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.712075729655829e-05, |
|
"loss": 2.6628, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.710218153718125e-05, |
|
"loss": 2.6616, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.70836057778042e-05, |
|
"loss": 2.6765, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.7065030018427154e-05, |
|
"loss": 2.6744, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.704645425905011e-05, |
|
"loss": 2.6777, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.702787849967307e-05, |
|
"loss": 2.6607, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"eval_loss": 2.6154634952545166, |
|
"eval_runtime": 1176.1132, |
|
"eval_samples_per_second": 406.865, |
|
"eval_steps_per_second": 50.858, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.7009302740296025e-05, |
|
"loss": 2.6604, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.699072698091898e-05, |
|
"loss": 2.6638, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.697215122154194e-05, |
|
"loss": 2.6532, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.6953575462164895e-05, |
|
"loss": 2.6609, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.693499970278785e-05, |
|
"loss": 2.6634, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.691642394341081e-05, |
|
"loss": 2.6548, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.6897848184033766e-05, |
|
"loss": 2.6581, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.687927242465672e-05, |
|
"loss": 2.6587, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.686069666527968e-05, |
|
"loss": 2.6481, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.6842120905902636e-05, |
|
"loss": 2.6438, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.682354514652559e-05, |
|
"loss": 2.6372, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.680496938714855e-05, |
|
"loss": 2.6396, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.678639362777151e-05, |
|
"loss": 2.6389, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.6767817868394464e-05, |
|
"loss": 2.6416, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.674924210901742e-05, |
|
"loss": 2.6479, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.673066634964037e-05, |
|
"loss": 2.6172, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.671209059026333e-05, |
|
"loss": 2.612, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.6693514830886285e-05, |
|
"loss": 2.6318, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.667493907150924e-05, |
|
"loss": 2.6373, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.66563633121322e-05, |
|
"loss": 2.6444, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.6637787552755155e-05, |
|
"loss": 2.6158, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.661921179337811e-05, |
|
"loss": 2.6245, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.660063603400107e-05, |
|
"loss": 2.6326, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.6582060274624026e-05, |
|
"loss": 2.6318, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.656348451524698e-05, |
|
"loss": 2.6283, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.654490875586994e-05, |
|
"loss": 2.6029, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.6526332996492896e-05, |
|
"loss": 2.6183, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.650775723711585e-05, |
|
"loss": 2.6165, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.648918147773882e-05, |
|
"loss": 2.6081, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.6470605718361774e-05, |
|
"loss": 2.6337, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.645202995898473e-05, |
|
"loss": 2.5937, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.643345419960769e-05, |
|
"loss": 2.6143, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.6414878440230644e-05, |
|
"loss": 2.6044, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.6396302680853594e-05, |
|
"loss": 2.608, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.637772692147655e-05, |
|
"loss": 2.6047, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.635915116209951e-05, |
|
"loss": 2.5922, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.6340575402722465e-05, |
|
"loss": 2.6051, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.632199964334542e-05, |
|
"loss": 2.5986, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.630342388396838e-05, |
|
"loss": 2.5852, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.6284848124591336e-05, |
|
"loss": 2.5837, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"eval_loss": 2.5408413410186768, |
|
"eval_runtime": 1176.2652, |
|
"eval_samples_per_second": 406.812, |
|
"eval_steps_per_second": 50.852, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.626627236521429e-05, |
|
"loss": 2.583, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.624769660583725e-05, |
|
"loss": 2.586, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.6229120846460206e-05, |
|
"loss": 2.5929, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.621054508708316e-05, |
|
"loss": 2.5938, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.619196932770612e-05, |
|
"loss": 2.5759, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.617339356832908e-05, |
|
"loss": 2.5853, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.6154817808952034e-05, |
|
"loss": 2.5807, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.613624204957499e-05, |
|
"loss": 2.5606, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.611766629019795e-05, |
|
"loss": 2.5749, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.6099090530820904e-05, |
|
"loss": 2.5578, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.608051477144386e-05, |
|
"loss": 2.5818, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.606193901206682e-05, |
|
"loss": 2.5786, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.604336325268977e-05, |
|
"loss": 2.5885, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.6024787493312725e-05, |
|
"loss": 2.567, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.600621173393568e-05, |
|
"loss": 2.5774, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.598763597455864e-05, |
|
"loss": 2.5623, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.5969060215181596e-05, |
|
"loss": 2.5703, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.595048445580455e-05, |
|
"loss": 2.5592, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.593190869642751e-05, |
|
"loss": 2.5846, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.5913332937050466e-05, |
|
"loss": 2.5889, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.589475717767342e-05, |
|
"loss": 2.5598, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.587618141829638e-05, |
|
"loss": 2.5651, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.585760565891934e-05, |
|
"loss": 2.5668, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.5839029899542294e-05, |
|
"loss": 2.5509, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.582045414016525e-05, |
|
"loss": 2.5583, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.580187838078821e-05, |
|
"loss": 2.5532, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.5783302621411164e-05, |
|
"loss": 2.5517, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.576472686203412e-05, |
|
"loss": 2.5641, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.574615110265708e-05, |
|
"loss": 2.5515, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.5727575343280035e-05, |
|
"loss": 2.5724, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.570899958390299e-05, |
|
"loss": 2.5452, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.569042382452595e-05, |
|
"loss": 2.5574, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.5671848065148906e-05, |
|
"loss": 2.5637, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.565327230577186e-05, |
|
"loss": 2.5539, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.563469654639482e-05, |
|
"loss": 2.5437, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.5616120787017776e-05, |
|
"loss": 2.5361, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.559754502764073e-05, |
|
"loss": 2.5522, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.557896926826369e-05, |
|
"loss": 2.5208, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.556039350888665e-05, |
|
"loss": 2.5325, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.5541817749509604e-05, |
|
"loss": 2.548, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"eval_loss": 2.4832136631011963, |
|
"eval_runtime": 1173.7957, |
|
"eval_samples_per_second": 407.668, |
|
"eval_steps_per_second": 50.959, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.552324199013256e-05, |
|
"loss": 2.5339, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.550466623075552e-05, |
|
"loss": 2.5376, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.5486090471378474e-05, |
|
"loss": 2.5418, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.546751471200143e-05, |
|
"loss": 2.5264, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.544893895262439e-05, |
|
"loss": 2.5246, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.5430363193247345e-05, |
|
"loss": 2.5272, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.54117874338703e-05, |
|
"loss": 2.5343, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.539321167449326e-05, |
|
"loss": 2.5353, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.5374635915116215e-05, |
|
"loss": 2.5169, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.5356060155739166e-05, |
|
"loss": 2.5234, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.533748439636212e-05, |
|
"loss": 2.5327, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.531890863698508e-05, |
|
"loss": 2.5133, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.5300332877608036e-05, |
|
"loss": 2.511, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.528175711823099e-05, |
|
"loss": 2.5247, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.526318135885395e-05, |
|
"loss": 2.5223, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.524460559947691e-05, |
|
"loss": 2.519, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.5226029840099864e-05, |
|
"loss": 2.5239, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.520745408072282e-05, |
|
"loss": 2.5309, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.518887832134578e-05, |
|
"loss": 2.5285, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.5170302561968734e-05, |
|
"loss": 2.5085, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.515172680259169e-05, |
|
"loss": 2.5184, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.513315104321465e-05, |
|
"loss": 2.5042, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.5114575283837605e-05, |
|
"loss": 2.5227, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.509599952446056e-05, |
|
"loss": 2.5163, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.507742376508352e-05, |
|
"loss": 2.516, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.5058848005706475e-05, |
|
"loss": 2.509, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.504027224632943e-05, |
|
"loss": 2.4914, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.502169648695239e-05, |
|
"loss": 2.4957, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.5003120727575346e-05, |
|
"loss": 2.5123, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.49845449681983e-05, |
|
"loss": 2.4971, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 4.496596920882126e-05, |
|
"loss": 2.5011, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.494739344944422e-05, |
|
"loss": 2.4925, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.4928817690067174e-05, |
|
"loss": 2.4903, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.491024193069013e-05, |
|
"loss": 2.4814, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.489166617131309e-05, |
|
"loss": 2.4838, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.4873090411936044e-05, |
|
"loss": 2.4835, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.4854514652559e-05, |
|
"loss": 2.4766, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.483593889318196e-05, |
|
"loss": 2.5004, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.4817363133804915e-05, |
|
"loss": 2.4843, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.479878737442787e-05, |
|
"loss": 2.484, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"eval_loss": 2.4386236667633057, |
|
"eval_runtime": 1167.064, |
|
"eval_samples_per_second": 410.02, |
|
"eval_steps_per_second": 51.253, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.478021161505083e-05, |
|
"loss": 2.4955, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.4761635855673785e-05, |
|
"loss": 2.4862, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.474306009629674e-05, |
|
"loss": 2.4922, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.47244843369197e-05, |
|
"loss": 2.4734, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.4705908577542656e-05, |
|
"loss": 2.4911, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.468733281816561e-05, |
|
"loss": 2.4899, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.466875705878856e-05, |
|
"loss": 2.4889, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.465018129941152e-05, |
|
"loss": 2.4845, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.463160554003448e-05, |
|
"loss": 2.4755, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.4613029780657434e-05, |
|
"loss": 2.4836, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.459445402128039e-05, |
|
"loss": 2.4606, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.457587826190335e-05, |
|
"loss": 2.4963, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.4557302502526304e-05, |
|
"loss": 2.4754, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.453872674314926e-05, |
|
"loss": 2.4714, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.452015098377222e-05, |
|
"loss": 2.466, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.4501575224395175e-05, |
|
"loss": 2.4702, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.448299946501813e-05, |
|
"loss": 2.4735, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.446442370564109e-05, |
|
"loss": 2.4641, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 4.4445847946264045e-05, |
|
"loss": 2.4772, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 4.4427272186887e-05, |
|
"loss": 2.4615, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.440869642750996e-05, |
|
"loss": 2.4603, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.4390120668132916e-05, |
|
"loss": 2.4679, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 4.437154490875587e-05, |
|
"loss": 2.4712, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.435296914937883e-05, |
|
"loss": 2.4766, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 4.433439339000178e-05, |
|
"loss": 2.4638, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 4.431581763062474e-05, |
|
"loss": 2.4565, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.4297241871247694e-05, |
|
"loss": 2.4677, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.427866611187065e-05, |
|
"loss": 2.4417, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.426009035249361e-05, |
|
"loss": 2.4655, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.4241514593116564e-05, |
|
"loss": 2.4513, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.422293883373953e-05, |
|
"loss": 2.442, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.4204363074362485e-05, |
|
"loss": 2.4576, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 4.418578731498544e-05, |
|
"loss": 2.4634, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 4.41672115556084e-05, |
|
"loss": 2.4598, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.4148635796231355e-05, |
|
"loss": 2.4593, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 4.413006003685431e-05, |
|
"loss": 2.458, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.411148427747727e-05, |
|
"loss": 2.4646, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.4092908518100226e-05, |
|
"loss": 2.4422, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.407433275872318e-05, |
|
"loss": 2.457, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.405575699934614e-05, |
|
"loss": 2.4523, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"eval_loss": 2.4072976112365723, |
|
"eval_runtime": 1169.4217, |
|
"eval_samples_per_second": 409.193, |
|
"eval_steps_per_second": 51.149, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.4037181239969097e-05, |
|
"loss": 2.4538, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.401860548059205e-05, |
|
"loss": 2.4586, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.400002972121501e-05, |
|
"loss": 2.4356, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.398145396183796e-05, |
|
"loss": 2.4499, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.396287820246092e-05, |
|
"loss": 2.4595, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.3944302443083874e-05, |
|
"loss": 2.4467, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.392572668370683e-05, |
|
"loss": 2.4433, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 4.390715092432979e-05, |
|
"loss": 2.4355, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 4.3888575164952745e-05, |
|
"loss": 2.4455, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.38699994055757e-05, |
|
"loss": 2.4448, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.385142364619866e-05, |
|
"loss": 2.4265, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 4.3832847886821615e-05, |
|
"loss": 2.4565, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 4.381427212744457e-05, |
|
"loss": 2.4497, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 4.379569636806753e-05, |
|
"loss": 2.4439, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 4.3777120608690486e-05, |
|
"loss": 2.4371, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 4.375854484931344e-05, |
|
"loss": 2.4391, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 4.37399690899364e-05, |
|
"loss": 2.4305, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 4.3721393330559357e-05, |
|
"loss": 2.4374, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 4.3702817571182313e-05, |
|
"loss": 2.4512, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.368424181180527e-05, |
|
"loss": 2.4332, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.366566605242823e-05, |
|
"loss": 2.4393, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 4.364709029305118e-05, |
|
"loss": 2.441, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 4.3628514533674134e-05, |
|
"loss": 2.4285, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 4.360993877429709e-05, |
|
"loss": 2.4414, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 4.359136301492005e-05, |
|
"loss": 2.4428, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 4.3572787255543005e-05, |
|
"loss": 2.4217, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 4.355421149616596e-05, |
|
"loss": 2.4316, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 4.353563573678892e-05, |
|
"loss": 2.436, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 4.3517059977411875e-05, |
|
"loss": 2.434, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.349848421803483e-05, |
|
"loss": 2.446, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.347990845865779e-05, |
|
"loss": 2.4214, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 4.3461332699280746e-05, |
|
"loss": 2.4192, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 4.34427569399037e-05, |
|
"loss": 2.4253, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.342418118052666e-05, |
|
"loss": 2.4183, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.340560542114962e-05, |
|
"loss": 2.4335, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 4.338702966177258e-05, |
|
"loss": 2.4218, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 4.336845390239554e-05, |
|
"loss": 2.4254, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.3349878143018494e-05, |
|
"loss": 2.4325, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.333130238364145e-05, |
|
"loss": 2.4248, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.331272662426441e-05, |
|
"loss": 2.4108, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"eval_loss": 2.3746867179870605, |
|
"eval_runtime": 1167.3479, |
|
"eval_samples_per_second": 409.92, |
|
"eval_steps_per_second": 51.24, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.329415086488736e-05, |
|
"loss": 2.4213, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 4.3275575105510315e-05, |
|
"loss": 2.4314, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.325699934613327e-05, |
|
"loss": 2.4087, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.323842358675623e-05, |
|
"loss": 2.4095, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.3219847827379185e-05, |
|
"loss": 2.4348, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.320127206800214e-05, |
|
"loss": 2.4169, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.31826963086251e-05, |
|
"loss": 2.4208, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.3164120549248056e-05, |
|
"loss": 2.4124, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.314554478987101e-05, |
|
"loss": 2.4248, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.312696903049397e-05, |
|
"loss": 2.4132, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.3108393271116926e-05, |
|
"loss": 2.4087, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.308981751173988e-05, |
|
"loss": 2.4107, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.307124175236284e-05, |
|
"loss": 2.4198, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.30526659929858e-05, |
|
"loss": 2.4287, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.3034090233608754e-05, |
|
"loss": 2.4077, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.301551447423171e-05, |
|
"loss": 2.4149, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.299693871485467e-05, |
|
"loss": 2.4122, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.2978362955477625e-05, |
|
"loss": 2.4147, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.2959787196100575e-05, |
|
"loss": 2.4117, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.294121143672353e-05, |
|
"loss": 2.3958, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 4.292263567734649e-05, |
|
"loss": 2.4063, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 4.2904059917969445e-05, |
|
"loss": 2.4023, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.28854841585924e-05, |
|
"loss": 2.4059, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.286690839921536e-05, |
|
"loss": 2.4122, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.2848332639838316e-05, |
|
"loss": 2.3919, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 4.282975688046127e-05, |
|
"loss": 2.4151, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.281118112108423e-05, |
|
"loss": 2.4087, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.2792605361707186e-05, |
|
"loss": 2.4097, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 4.277402960233014e-05, |
|
"loss": 2.4022, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 4.27554538429531e-05, |
|
"loss": 2.4093, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.273687808357606e-05, |
|
"loss": 2.3997, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.2718302324199014e-05, |
|
"loss": 2.3854, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.269972656482197e-05, |
|
"loss": 2.4188, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 4.268115080544493e-05, |
|
"loss": 2.4187, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 4.2662575046067885e-05, |
|
"loss": 2.4008, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 4.264399928669084e-05, |
|
"loss": 2.4086, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 4.26254235273138e-05, |
|
"loss": 2.4052, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 4.2606847767936755e-05, |
|
"loss": 2.3966, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 4.258827200855971e-05, |
|
"loss": 2.3967, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.256969624918267e-05, |
|
"loss": 2.4206, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"eval_loss": 2.3452727794647217, |
|
"eval_runtime": 1168.756, |
|
"eval_samples_per_second": 409.426, |
|
"eval_steps_per_second": 51.178, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.2551120489805626e-05, |
|
"loss": 2.3994, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.253254473042858e-05, |
|
"loss": 2.4004, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.251396897105154e-05, |
|
"loss": 2.3882, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 4.2495393211674496e-05, |
|
"loss": 2.3894, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 4.247681745229745e-05, |
|
"loss": 2.3908, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 4.245824169292041e-05, |
|
"loss": 2.3832, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 4.243966593354337e-05, |
|
"loss": 2.3835, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 4.2421090174166324e-05, |
|
"loss": 2.374, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 4.240251441478928e-05, |
|
"loss": 2.3812, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 4.238393865541224e-05, |
|
"loss": 2.3771, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 4.2365362896035194e-05, |
|
"loss": 2.3816, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 4.234678713665815e-05, |
|
"loss": 2.3849, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 4.232821137728111e-05, |
|
"loss": 2.3945, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.2309635617904065e-05, |
|
"loss": 2.3823, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 4.229105985852702e-05, |
|
"loss": 2.3742, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 4.227248409914997e-05, |
|
"loss": 2.3822, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 4.225390833977293e-05, |
|
"loss": 2.3919, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 4.2235332580395886e-05, |
|
"loss": 2.3726, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 4.221675682101884e-05, |
|
"loss": 2.3727, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.21981810616418e-05, |
|
"loss": 2.3765, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 4.2179605302264756e-05, |
|
"loss": 2.3831, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.216102954288771e-05, |
|
"loss": 2.3838, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.214245378351067e-05, |
|
"loss": 2.3788, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 4.212387802413363e-05, |
|
"loss": 2.3722, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 4.2105302264756584e-05, |
|
"loss": 2.3849, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.208672650537954e-05, |
|
"loss": 2.3757, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 4.20681507460025e-05, |
|
"loss": 2.3825, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.2049574986625454e-05, |
|
"loss": 2.3783, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 4.203099922724841e-05, |
|
"loss": 2.3588, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 4.201242346787137e-05, |
|
"loss": 2.3763, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 4.1993847708494325e-05, |
|
"loss": 2.3748, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 4.197527194911728e-05, |
|
"loss": 2.3613, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 4.195669618974024e-05, |
|
"loss": 2.385, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 4.1938120430363196e-05, |
|
"loss": 2.3727, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 4.191954467098615e-05, |
|
"loss": 2.3911, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 4.190096891160911e-05, |
|
"loss": 2.358, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 4.1882393152232066e-05, |
|
"loss": 2.3718, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 4.186381739285502e-05, |
|
"loss": 2.3734, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 4.184524163347798e-05, |
|
"loss": 2.3699, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 4.182666587410094e-05, |
|
"loss": 2.362, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"eval_loss": 2.3277173042297363, |
|
"eval_runtime": 1172.0653, |
|
"eval_samples_per_second": 408.27, |
|
"eval_steps_per_second": 51.034, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 4.1808090114723894e-05, |
|
"loss": 2.378, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 4.178951435534685e-05, |
|
"loss": 2.3702, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 4.177093859596981e-05, |
|
"loss": 2.3787, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 4.1752362836592764e-05, |
|
"loss": 2.3755, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 4.173378707721572e-05, |
|
"loss": 2.3809, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 4.171521131783868e-05, |
|
"loss": 2.3747, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 4.1696635558461635e-05, |
|
"loss": 2.3726, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 4.167805979908459e-05, |
|
"loss": 2.3743, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 4.165948403970755e-05, |
|
"loss": 2.3715, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 4.1640908280330506e-05, |
|
"loss": 2.3734, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 4.162233252095346e-05, |
|
"loss": 2.3728, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 4.160375676157642e-05, |
|
"loss": 2.3459, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 4.158518100219937e-05, |
|
"loss": 2.3441, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 4.1566605242822326e-05, |
|
"loss": 2.3704, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 4.154802948344528e-05, |
|
"loss": 2.3557, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.152945372406824e-05, |
|
"loss": 2.3613, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.15108779646912e-05, |
|
"loss": 2.3701, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.1492302205314154e-05, |
|
"loss": 2.3622, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.147372644593711e-05, |
|
"loss": 2.3577, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 4.145515068656007e-05, |
|
"loss": 2.3664, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.1436574927183024e-05, |
|
"loss": 2.3697, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.141799916780598e-05, |
|
"loss": 2.3642, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.139942340842894e-05, |
|
"loss": 2.3529, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 4.1380847649051895e-05, |
|
"loss": 2.358, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.136227188967485e-05, |
|
"loss": 2.3583, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 4.134369613029781e-05, |
|
"loss": 2.3387, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 4.1325120370920766e-05, |
|
"loss": 2.3523, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 4.130654461154372e-05, |
|
"loss": 2.357, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 4.128796885216668e-05, |
|
"loss": 2.3451, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 4.1269393092789636e-05, |
|
"loss": 2.3608, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 4.1250817333412586e-05, |
|
"loss": 2.3598, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.123224157403554e-05, |
|
"loss": 2.3555, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.12136658146585e-05, |
|
"loss": 2.3693, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.119509005528146e-05, |
|
"loss": 2.3603, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.1176514295904414e-05, |
|
"loss": 2.3474, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.115793853652737e-05, |
|
"loss": 2.3459, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.1139362777150334e-05, |
|
"loss": 2.3358, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 4.112078701777329e-05, |
|
"loss": 2.3561, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 4.110221125839625e-05, |
|
"loss": 2.3444, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 4.1083635499019205e-05, |
|
"loss": 2.3487, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"eval_loss": 2.3072495460510254, |
|
"eval_runtime": 1169.8184, |
|
"eval_samples_per_second": 409.054, |
|
"eval_steps_per_second": 51.132, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 4.106505973964216e-05, |
|
"loss": 2.3609, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 4.104648398026512e-05, |
|
"loss": 2.3558, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 4.1027908220888075e-05, |
|
"loss": 2.3436, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 4.100933246151103e-05, |
|
"loss": 2.3591, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 4.099075670213399e-05, |
|
"loss": 2.351, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 4.0972180942756946e-05, |
|
"loss": 2.35, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.09536051833799e-05, |
|
"loss": 2.3409, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.093502942400286e-05, |
|
"loss": 2.3378, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.091645366462582e-05, |
|
"loss": 2.3332, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.089787790524877e-05, |
|
"loss": 2.3397, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.0879302145871724e-05, |
|
"loss": 2.3522, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 4.086072638649468e-05, |
|
"loss": 2.337, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 4.084215062711764e-05, |
|
"loss": 2.338, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 4.0823574867740594e-05, |
|
"loss": 2.3575, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 4.080499910836355e-05, |
|
"loss": 2.3475, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 4.078642334898651e-05, |
|
"loss": 2.342, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 4.0767847589609465e-05, |
|
"loss": 2.3267, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 4.074927183023242e-05, |
|
"loss": 2.3542, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 4.073069607085538e-05, |
|
"loss": 2.3408, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 4.0712120311478335e-05, |
|
"loss": 2.3391, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 4.069354455210129e-05, |
|
"loss": 2.3637, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 4.067496879272425e-05, |
|
"loss": 2.3387, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 4.0656393033347206e-05, |
|
"loss": 2.343, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 4.063781727397016e-05, |
|
"loss": 2.3342, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 4.061924151459312e-05, |
|
"loss": 2.3491, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 4.060066575521608e-05, |
|
"loss": 2.3389, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 4.0582089995839034e-05, |
|
"loss": 2.3436, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 4.0563514236461984e-05, |
|
"loss": 2.3422, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 4.054493847708494e-05, |
|
"loss": 2.3417, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 4.05263627177079e-05, |
|
"loss": 2.3427, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 4.0507786958330854e-05, |
|
"loss": 2.3573, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 4.048921119895381e-05, |
|
"loss": 2.3288, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 4.047063543957677e-05, |
|
"loss": 2.3442, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 4.0452059680199725e-05, |
|
"loss": 2.3388, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 4.043348392082268e-05, |
|
"loss": 2.3437, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 4.041490816144564e-05, |
|
"loss": 2.3443, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 4.0396332402068596e-05, |
|
"loss": 2.3411, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 4.037775664269155e-05, |
|
"loss": 2.3433, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 4.035918088331451e-05, |
|
"loss": 2.3394, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 4.0340605123937466e-05, |
|
"loss": 2.3362, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"eval_loss": 2.28617525100708, |
|
"eval_runtime": 1173.3291, |
|
"eval_samples_per_second": 407.83, |
|
"eval_steps_per_second": 50.979, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 4.032202936456042e-05, |
|
"loss": 2.3356, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 4.030345360518339e-05, |
|
"loss": 2.3292, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 4.0284877845806343e-05, |
|
"loss": 2.3394, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 4.02663020864293e-05, |
|
"loss": 2.3251, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 4.024772632705226e-05, |
|
"loss": 2.3336, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 4.0229150567675214e-05, |
|
"loss": 2.3347, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 4.0210574808298164e-05, |
|
"loss": 2.322, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 4.019199904892112e-05, |
|
"loss": 2.3369, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 4.017342328954408e-05, |
|
"loss": 2.3512, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 4.0154847530167035e-05, |
|
"loss": 2.3244, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 4.013627177078999e-05, |
|
"loss": 2.3438, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 4.011769601141295e-05, |
|
"loss": 2.3308, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.0099120252035905e-05, |
|
"loss": 2.3381, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 4.008054449265886e-05, |
|
"loss": 2.3419, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 4.006196873328182e-05, |
|
"loss": 2.32, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 4.0043392973904776e-05, |
|
"loss": 2.3292, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 4.002481721452773e-05, |
|
"loss": 2.3299, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 4.000624145515069e-05, |
|
"loss": 2.3196, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.998766569577365e-05, |
|
"loss": 2.3343, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 3.9969089936396604e-05, |
|
"loss": 2.3204, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 3.995051417701956e-05, |
|
"loss": 2.3187, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 3.993193841764252e-05, |
|
"loss": 2.3179, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 3.9913362658265474e-05, |
|
"loss": 2.3186, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 3.989478689888843e-05, |
|
"loss": 2.3148, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 3.987621113951138e-05, |
|
"loss": 2.3124, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.985763538013434e-05, |
|
"loss": 2.3076, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.9839059620757295e-05, |
|
"loss": 2.3126, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 3.982048386138025e-05, |
|
"loss": 2.3095, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 3.980190810200321e-05, |
|
"loss": 2.3168, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 3.9783332342626165e-05, |
|
"loss": 2.3255, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 3.976475658324912e-05, |
|
"loss": 2.3307, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 3.974618082387208e-05, |
|
"loss": 2.3287, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 3.9727605064495036e-05, |
|
"loss": 2.3289, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 3.970902930511799e-05, |
|
"loss": 2.3022, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 3.969045354574095e-05, |
|
"loss": 2.3183, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.967187778636391e-05, |
|
"loss": 2.3131, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 3.9653302026986864e-05, |
|
"loss": 2.3048, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 3.963472626760982e-05, |
|
"loss": 2.3163, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 3.961615050823278e-05, |
|
"loss": 2.2998, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 3.9597574748855734e-05, |
|
"loss": 2.3124, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"eval_loss": 2.2729697227478027, |
|
"eval_runtime": 1170.2787, |
|
"eval_samples_per_second": 408.893, |
|
"eval_steps_per_second": 51.112, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 3.957899898947869e-05, |
|
"loss": 2.3128, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 3.956042323010165e-05, |
|
"loss": 2.3044, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 3.9541847470724605e-05, |
|
"loss": 2.2981, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 3.952327171134756e-05, |
|
"loss": 2.3192, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 3.950469595197052e-05, |
|
"loss": 2.3131, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 3.9486120192593475e-05, |
|
"loss": 2.3026, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 3.946754443321643e-05, |
|
"loss": 2.3117, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 3.944896867383939e-05, |
|
"loss": 2.3112, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 3.9430392914462346e-05, |
|
"loss": 2.2928, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 3.94118171550853e-05, |
|
"loss": 2.3177, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 3.939324139570826e-05, |
|
"loss": 2.2941, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 3.9374665636331217e-05, |
|
"loss": 2.3002, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 3.9356089876954173e-05, |
|
"loss": 2.313, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 3.933751411757713e-05, |
|
"loss": 2.3148, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 3.931893835820009e-05, |
|
"loss": 2.2997, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 3.9300362598823044e-05, |
|
"loss": 2.3193, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 3.9281786839446e-05, |
|
"loss": 2.2981, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 3.926321108006896e-05, |
|
"loss": 2.3109, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 3.9244635320691915e-05, |
|
"loss": 2.3225, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 3.922605956131487e-05, |
|
"loss": 2.3158, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 3.920748380193783e-05, |
|
"loss": 2.3111, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 3.918890804256078e-05, |
|
"loss": 2.3073, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 3.9170332283183735e-05, |
|
"loss": 2.31, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 3.915175652380669e-05, |
|
"loss": 2.3014, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 3.913318076442965e-05, |
|
"loss": 2.3115, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 3.9114605005052606e-05, |
|
"loss": 2.3114, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 3.909602924567556e-05, |
|
"loss": 2.2953, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 3.907745348629852e-05, |
|
"loss": 2.3095, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 3.9058877726921477e-05, |
|
"loss": 2.306, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 3.9040301967544433e-05, |
|
"loss": 2.2984, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 3.902172620816739e-05, |
|
"loss": 2.3158, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.900315044879035e-05, |
|
"loss": 2.3099, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 3.8984574689413304e-05, |
|
"loss": 2.3073, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 3.896599893003626e-05, |
|
"loss": 2.2892, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 3.894742317065922e-05, |
|
"loss": 2.2994, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.8928847411282175e-05, |
|
"loss": 2.3005, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 3.891027165190513e-05, |
|
"loss": 2.2941, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 3.889169589252809e-05, |
|
"loss": 2.2916, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 3.8873120133151045e-05, |
|
"loss": 2.2974, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 3.8854544373774e-05, |
|
"loss": 2.3023, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"eval_loss": 2.2589497566223145, |
|
"eval_runtime": 1170.5911, |
|
"eval_samples_per_second": 408.784, |
|
"eval_steps_per_second": 51.098, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.883596861439696e-05, |
|
"loss": 2.3098, |
|
"step": 300500 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.8817392855019916e-05, |
|
"loss": 2.3006, |
|
"step": 301000 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 3.879881709564287e-05, |
|
"loss": 2.3057, |
|
"step": 301500 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 3.878024133626583e-05, |
|
"loss": 2.2969, |
|
"step": 302000 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 3.8761665576888786e-05, |
|
"loss": 2.2979, |
|
"step": 302500 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 3.874308981751174e-05, |
|
"loss": 2.3112, |
|
"step": 303000 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 3.87245140581347e-05, |
|
"loss": 2.301, |
|
"step": 303500 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 3.870593829875766e-05, |
|
"loss": 2.3021, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 3.8687362539380614e-05, |
|
"loss": 2.2979, |
|
"step": 304500 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 3.866878678000357e-05, |
|
"loss": 2.2935, |
|
"step": 305000 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 3.865021102062653e-05, |
|
"loss": 2.3147, |
|
"step": 305500 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 3.8631635261249485e-05, |
|
"loss": 2.2913, |
|
"step": 306000 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 3.861305950187244e-05, |
|
"loss": 2.2993, |
|
"step": 306500 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 3.85944837424954e-05, |
|
"loss": 2.2963, |
|
"step": 307000 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 3.8575907983118355e-05, |
|
"loss": 2.3011, |
|
"step": 307500 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 3.855733222374131e-05, |
|
"loss": 2.299, |
|
"step": 308000 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 3.853875646436427e-05, |
|
"loss": 2.2991, |
|
"step": 308500 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 3.8520180704987226e-05, |
|
"loss": 2.2924, |
|
"step": 309000 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 3.8501604945610176e-05, |
|
"loss": 2.3082, |
|
"step": 309500 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.848302918623313e-05, |
|
"loss": 2.2975, |
|
"step": 310000 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.846445342685609e-05, |
|
"loss": 2.2793, |
|
"step": 310500 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 3.8445877667479046e-05, |
|
"loss": 2.2974, |
|
"step": 311000 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 3.8427301908102e-05, |
|
"loss": 2.2725, |
|
"step": 311500 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 3.840872614872496e-05, |
|
"loss": 2.2981, |
|
"step": 312000 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 3.839015038934792e-05, |
|
"loss": 2.2918, |
|
"step": 312500 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 3.8371574629970874e-05, |
|
"loss": 2.3004, |
|
"step": 313000 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 3.835299887059383e-05, |
|
"loss": 2.2885, |
|
"step": 313500 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 3.833442311121679e-05, |
|
"loss": 2.2865, |
|
"step": 314000 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 3.8315847351839745e-05, |
|
"loss": 2.2962, |
|
"step": 314500 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 3.82972715924627e-05, |
|
"loss": 2.2958, |
|
"step": 315000 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 3.827869583308566e-05, |
|
"loss": 2.2916, |
|
"step": 315500 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 3.8260120073708615e-05, |
|
"loss": 2.286, |
|
"step": 316000 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 3.824154431433157e-05, |
|
"loss": 2.2902, |
|
"step": 316500 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 3.822296855495453e-05, |
|
"loss": 2.285, |
|
"step": 317000 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 3.8204392795577486e-05, |
|
"loss": 2.2856, |
|
"step": 317500 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 3.818581703620044e-05, |
|
"loss": 2.2944, |
|
"step": 318000 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 3.81672412768234e-05, |
|
"loss": 2.2838, |
|
"step": 318500 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 3.814866551744635e-05, |
|
"loss": 2.2844, |
|
"step": 319000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 3.8130089758069306e-05, |
|
"loss": 2.2976, |
|
"step": 319500 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 3.811151399869226e-05, |
|
"loss": 2.2939, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"eval_loss": 2.23964524269104, |
|
"eval_runtime": 1167.8582, |
|
"eval_samples_per_second": 409.741, |
|
"eval_steps_per_second": 51.218, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 3.809293823931522e-05, |
|
"loss": 2.2839, |
|
"step": 320500 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 3.807436247993818e-05, |
|
"loss": 2.2969, |
|
"step": 321000 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 3.805578672056114e-05, |
|
"loss": 2.2928, |
|
"step": 321500 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 3.80372109611841e-05, |
|
"loss": 2.2835, |
|
"step": 322000 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 3.8018635201807054e-05, |
|
"loss": 2.2863, |
|
"step": 322500 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 3.800005944243001e-05, |
|
"loss": 2.2792, |
|
"step": 323000 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 3.798148368305297e-05, |
|
"loss": 2.2975, |
|
"step": 323500 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 3.7962907923675925e-05, |
|
"loss": 2.2984, |
|
"step": 324000 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 3.794433216429888e-05, |
|
"loss": 2.2908, |
|
"step": 324500 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 3.792575640492184e-05, |
|
"loss": 2.2916, |
|
"step": 325000 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 3.7907180645544796e-05, |
|
"loss": 2.2923, |
|
"step": 325500 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 3.788860488616775e-05, |
|
"loss": 2.2853, |
|
"step": 326000 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 3.787002912679071e-05, |
|
"loss": 2.273, |
|
"step": 326500 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 3.7851453367413666e-05, |
|
"loss": 2.29, |
|
"step": 327000 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 3.783287760803662e-05, |
|
"loss": 2.2899, |
|
"step": 327500 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 3.781430184865957e-05, |
|
"loss": 2.2808, |
|
"step": 328000 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 3.779572608928253e-05, |
|
"loss": 2.2711, |
|
"step": 328500 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 3.777715032990549e-05, |
|
"loss": 2.2725, |
|
"step": 329000 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 3.7758574570528444e-05, |
|
"loss": 2.2751, |
|
"step": 329500 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 3.77399988111514e-05, |
|
"loss": 2.2781, |
|
"step": 330000 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 3.772142305177436e-05, |
|
"loss": 2.272, |
|
"step": 330500 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 3.7702847292397314e-05, |
|
"loss": 2.2925, |
|
"step": 331000 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 3.768427153302027e-05, |
|
"loss": 2.2816, |
|
"step": 331500 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 3.766569577364323e-05, |
|
"loss": 2.2754, |
|
"step": 332000 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 3.7647120014266185e-05, |
|
"loss": 2.2754, |
|
"step": 332500 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 3.762854425488914e-05, |
|
"loss": 2.2822, |
|
"step": 333000 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.76099684955121e-05, |
|
"loss": 2.296, |
|
"step": 333500 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.7591392736135056e-05, |
|
"loss": 2.2843, |
|
"step": 334000 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 3.757281697675801e-05, |
|
"loss": 2.286, |
|
"step": 334500 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 3.755424121738097e-05, |
|
"loss": 2.2677, |
|
"step": 335000 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 3.7535665458003926e-05, |
|
"loss": 2.2799, |
|
"step": 335500 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 3.751708969862688e-05, |
|
"loss": 2.2951, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 3.749851393924984e-05, |
|
"loss": 2.275, |
|
"step": 336500 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 3.74799381798728e-05, |
|
"loss": 2.2563, |
|
"step": 337000 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 3.746136242049575e-05, |
|
"loss": 2.2742, |
|
"step": 337500 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 3.7442786661118704e-05, |
|
"loss": 2.2554, |
|
"step": 338000 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 3.742421090174166e-05, |
|
"loss": 2.2593, |
|
"step": 338500 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 3.740563514236462e-05, |
|
"loss": 2.2618, |
|
"step": 339000 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 3.7387059382987574e-05, |
|
"loss": 2.2789, |
|
"step": 339500 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 3.736848362361053e-05, |
|
"loss": 2.259, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"eval_loss": 2.2331488132476807, |
|
"eval_runtime": 1167.7161, |
|
"eval_samples_per_second": 409.791, |
|
"eval_steps_per_second": 51.224, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 3.734990786423349e-05, |
|
"loss": 2.2633, |
|
"step": 340500 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 3.7331332104856445e-05, |
|
"loss": 2.2652, |
|
"step": 341000 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 3.73127563454794e-05, |
|
"loss": 2.265, |
|
"step": 341500 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 3.729418058610236e-05, |
|
"loss": 2.257, |
|
"step": 342000 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 3.7275604826725316e-05, |
|
"loss": 2.2673, |
|
"step": 342500 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 3.725702906734827e-05, |
|
"loss": 2.2638, |
|
"step": 343000 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 3.723845330797123e-05, |
|
"loss": 2.2633, |
|
"step": 343500 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 3.721987754859419e-05, |
|
"loss": 2.2703, |
|
"step": 344000 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 3.720130178921715e-05, |
|
"loss": 2.2733, |
|
"step": 344500 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 3.718272602984011e-05, |
|
"loss": 2.2554, |
|
"step": 345000 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 3.7164150270463064e-05, |
|
"loss": 2.2698, |
|
"step": 345500 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 3.714557451108602e-05, |
|
"loss": 2.2485, |
|
"step": 346000 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 3.712699875170897e-05, |
|
"loss": 2.2506, |
|
"step": 346500 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 3.710842299233193e-05, |
|
"loss": 2.2747, |
|
"step": 347000 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 3.7089847232954884e-05, |
|
"loss": 2.2738, |
|
"step": 347500 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 3.707127147357784e-05, |
|
"loss": 2.2735, |
|
"step": 348000 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 3.70526957142008e-05, |
|
"loss": 2.2552, |
|
"step": 348500 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 3.7034119954823755e-05, |
|
"loss": 2.2401, |
|
"step": 349000 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 3.701554419544671e-05, |
|
"loss": 2.2791, |
|
"step": 349500 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 3.699696843606967e-05, |
|
"loss": 2.2582, |
|
"step": 350000 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 3.6978392676692626e-05, |
|
"loss": 2.2702, |
|
"step": 350500 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 3.695981691731558e-05, |
|
"loss": 2.2713, |
|
"step": 351000 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 3.694124115793854e-05, |
|
"loss": 2.2739, |
|
"step": 351500 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 3.6922665398561496e-05, |
|
"loss": 2.2761, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 3.690408963918445e-05, |
|
"loss": 2.2729, |
|
"step": 352500 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 3.688551387980741e-05, |
|
"loss": 2.2559, |
|
"step": 353000 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 3.686693812043037e-05, |
|
"loss": 2.2592, |
|
"step": 353500 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 3.6848362361053324e-05, |
|
"loss": 2.2654, |
|
"step": 354000 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 3.682978660167628e-05, |
|
"loss": 2.2688, |
|
"step": 354500 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 3.681121084229924e-05, |
|
"loss": 2.2707, |
|
"step": 355000 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 3.679263508292219e-05, |
|
"loss": 2.2546, |
|
"step": 355500 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 3.6774059323545144e-05, |
|
"loss": 2.252, |
|
"step": 356000 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 3.67554835641681e-05, |
|
"loss": 2.2726, |
|
"step": 356500 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 3.673690780479106e-05, |
|
"loss": 2.2653, |
|
"step": 357000 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 3.6718332045414015e-05, |
|
"loss": 2.2718, |
|
"step": 357500 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 3.669975628603697e-05, |
|
"loss": 2.2759, |
|
"step": 358000 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 3.668118052665993e-05, |
|
"loss": 2.2726, |
|
"step": 358500 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 3.6662604767282886e-05, |
|
"loss": 2.2477, |
|
"step": 359000 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 3.664402900790584e-05, |
|
"loss": 2.2541, |
|
"step": 359500 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 3.66254532485288e-05, |
|
"loss": 2.2599, |
|
"step": 360000 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"eval_loss": 2.2197089195251465, |
|
"eval_runtime": 1171.2972, |
|
"eval_samples_per_second": 408.538, |
|
"eval_steps_per_second": 51.067, |
|
"step": 360000 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 3.6606877489151756e-05, |
|
"loss": 2.2723, |
|
"step": 360500 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 3.658830172977471e-05, |
|
"loss": 2.2763, |
|
"step": 361000 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 3.656972597039767e-05, |
|
"loss": 2.2848, |
|
"step": 361500 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 3.655115021102063e-05, |
|
"loss": 2.2703, |
|
"step": 362000 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 3.6532574451643584e-05, |
|
"loss": 2.2587, |
|
"step": 362500 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 3.651399869226654e-05, |
|
"loss": 2.2558, |
|
"step": 363000 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 3.64954229328895e-05, |
|
"loss": 2.2648, |
|
"step": 363500 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 3.6476847173512454e-05, |
|
"loss": 2.2641, |
|
"step": 364000 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 3.645827141413541e-05, |
|
"loss": 2.27, |
|
"step": 364500 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 3.643969565475837e-05, |
|
"loss": 2.2674, |
|
"step": 365000 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 3.6421119895381325e-05, |
|
"loss": 2.2521, |
|
"step": 365500 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 3.640254413600428e-05, |
|
"loss": 2.2581, |
|
"step": 366000 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 3.638396837662724e-05, |
|
"loss": 2.2676, |
|
"step": 366500 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 3.6365392617250196e-05, |
|
"loss": 2.2647, |
|
"step": 367000 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 3.634681685787315e-05, |
|
"loss": 2.2673, |
|
"step": 367500 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 3.632824109849611e-05, |
|
"loss": 2.2454, |
|
"step": 368000 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 3.6309665339119066e-05, |
|
"loss": 2.2705, |
|
"step": 368500 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 3.629108957974202e-05, |
|
"loss": 2.261, |
|
"step": 369000 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 3.627251382036498e-05, |
|
"loss": 2.2623, |
|
"step": 369500 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 3.625393806098794e-05, |
|
"loss": 2.2501, |
|
"step": 370000 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 3.6235362301610894e-05, |
|
"loss": 2.2412, |
|
"step": 370500 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 3.621678654223385e-05, |
|
"loss": 2.2633, |
|
"step": 371000 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 3.619821078285681e-05, |
|
"loss": 2.2673, |
|
"step": 371500 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 3.6179635023479764e-05, |
|
"loss": 2.254, |
|
"step": 372000 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 3.616105926410272e-05, |
|
"loss": 2.2498, |
|
"step": 372500 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 3.614248350472568e-05, |
|
"loss": 2.2674, |
|
"step": 373000 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 3.6123907745348635e-05, |
|
"loss": 2.2512, |
|
"step": 373500 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 3.6105331985971585e-05, |
|
"loss": 2.2631, |
|
"step": 374000 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 3.608675622659454e-05, |
|
"loss": 2.2676, |
|
"step": 374500 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 3.60681804672175e-05, |
|
"loss": 2.2579, |
|
"step": 375000 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 3.6049604707840456e-05, |
|
"loss": 2.2494, |
|
"step": 375500 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 3.603102894846341e-05, |
|
"loss": 2.2611, |
|
"step": 376000 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 3.601245318908637e-05, |
|
"loss": 2.2579, |
|
"step": 376500 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 3.5993877429709326e-05, |
|
"loss": 2.2426, |
|
"step": 377000 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 3.597530167033228e-05, |
|
"loss": 2.2619, |
|
"step": 377500 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 3.595672591095524e-05, |
|
"loss": 2.2522, |
|
"step": 378000 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 3.59381501515782e-05, |
|
"loss": 2.2407, |
|
"step": 378500 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 3.5919574392201154e-05, |
|
"loss": 2.246, |
|
"step": 379000 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 3.590099863282411e-05, |
|
"loss": 2.2502, |
|
"step": 379500 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 3.588242287344707e-05, |
|
"loss": 2.2592, |
|
"step": 380000 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"eval_loss": 2.2073557376861572, |
|
"eval_runtime": 1169.5113, |
|
"eval_samples_per_second": 409.161, |
|
"eval_steps_per_second": 51.145, |
|
"step": 380000 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 3.5863847114070024e-05, |
|
"loss": 2.2533, |
|
"step": 380500 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 3.584527135469298e-05, |
|
"loss": 2.2621, |
|
"step": 381000 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 3.582669559531594e-05, |
|
"loss": 2.271, |
|
"step": 381500 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 3.5808119835938895e-05, |
|
"loss": 2.2483, |
|
"step": 382000 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 3.578954407656185e-05, |
|
"loss": 2.2463, |
|
"step": 382500 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 3.577096831718481e-05, |
|
"loss": 2.2378, |
|
"step": 383000 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 3.5752392557807765e-05, |
|
"loss": 2.2455, |
|
"step": 383500 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 3.573381679843072e-05, |
|
"loss": 2.2523, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 3.571524103905368e-05, |
|
"loss": 2.2671, |
|
"step": 384500 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 3.5696665279676636e-05, |
|
"loss": 2.2539, |
|
"step": 385000 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 3.567808952029959e-05, |
|
"loss": 2.2363, |
|
"step": 385500 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 3.565951376092255e-05, |
|
"loss": 2.2596, |
|
"step": 386000 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 3.564093800154551e-05, |
|
"loss": 2.2579, |
|
"step": 386500 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 3.5622362242168464e-05, |
|
"loss": 2.2377, |
|
"step": 387000 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 3.560378648279142e-05, |
|
"loss": 2.2446, |
|
"step": 387500 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 3.558521072341438e-05, |
|
"loss": 2.2591, |
|
"step": 388000 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 3.5566634964037334e-05, |
|
"loss": 2.2553, |
|
"step": 388500 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 3.554805920466029e-05, |
|
"loss": 2.2583, |
|
"step": 389000 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 3.552948344528325e-05, |
|
"loss": 2.2662, |
|
"step": 389500 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 3.5510907685906205e-05, |
|
"loss": 2.2561, |
|
"step": 390000 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 3.549233192652916e-05, |
|
"loss": 2.2549, |
|
"step": 390500 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 3.547375616715212e-05, |
|
"loss": 2.2479, |
|
"step": 391000 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 3.5455180407775075e-05, |
|
"loss": 2.252, |
|
"step": 391500 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 3.543660464839803e-05, |
|
"loss": 2.2437, |
|
"step": 392000 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 3.541802888902098e-05, |
|
"loss": 2.2543, |
|
"step": 392500 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 3.539945312964394e-05, |
|
"loss": 2.2443, |
|
"step": 393000 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 3.5380877370266896e-05, |
|
"loss": 2.2419, |
|
"step": 393500 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 3.536230161088985e-05, |
|
"loss": 2.246, |
|
"step": 394000 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 3.534372585151281e-05, |
|
"loss": 2.2477, |
|
"step": 394500 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 3.532515009213577e-05, |
|
"loss": 2.2246, |
|
"step": 395000 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 3.5306574332758724e-05, |
|
"loss": 2.239, |
|
"step": 395500 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 3.528799857338168e-05, |
|
"loss": 2.2489, |
|
"step": 396000 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 3.526942281400464e-05, |
|
"loss": 2.258, |
|
"step": 396500 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 3.5250847054627594e-05, |
|
"loss": 2.2508, |
|
"step": 397000 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 3.523227129525055e-05, |
|
"loss": 2.2514, |
|
"step": 397500 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 3.521369553587351e-05, |
|
"loss": 2.2521, |
|
"step": 398000 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 3.5195119776496465e-05, |
|
"loss": 2.2354, |
|
"step": 398500 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 3.517654401711942e-05, |
|
"loss": 2.2356, |
|
"step": 399000 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 3.515796825774238e-05, |
|
"loss": 2.2398, |
|
"step": 399500 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 3.5139392498365335e-05, |
|
"loss": 2.2509, |
|
"step": 400000 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"eval_loss": 2.1972875595092773, |
|
"eval_runtime": 1180.0595, |
|
"eval_samples_per_second": 405.504, |
|
"eval_steps_per_second": 50.688, |
|
"step": 400000 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 3.512081673898829e-05, |
|
"loss": 2.2406, |
|
"step": 400500 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 3.510224097961125e-05, |
|
"loss": 2.248, |
|
"step": 401000 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 3.5083665220234206e-05, |
|
"loss": 2.2454, |
|
"step": 401500 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 3.5065089460857156e-05, |
|
"loss": 2.2378, |
|
"step": 402000 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 3.504651370148011e-05, |
|
"loss": 2.2527, |
|
"step": 402500 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 3.502793794210307e-05, |
|
"loss": 2.2499, |
|
"step": 403000 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 3.500936218272603e-05, |
|
"loss": 2.2522, |
|
"step": 403500 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 3.4990786423348984e-05, |
|
"loss": 2.2534, |
|
"step": 404000 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 3.497221066397194e-05, |
|
"loss": 2.2333, |
|
"step": 404500 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 3.4953634904594904e-05, |
|
"loss": 2.2299, |
|
"step": 405000 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 3.493505914521786e-05, |
|
"loss": 2.2253, |
|
"step": 405500 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 3.491648338584082e-05, |
|
"loss": 2.2195, |
|
"step": 406000 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 3.4897907626463775e-05, |
|
"loss": 2.2268, |
|
"step": 406500 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 3.487933186708673e-05, |
|
"loss": 2.2463, |
|
"step": 407000 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 3.486075610770969e-05, |
|
"loss": 2.2348, |
|
"step": 407500 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 3.4842180348332645e-05, |
|
"loss": 2.2322, |
|
"step": 408000 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 3.48236045889556e-05, |
|
"loss": 2.2329, |
|
"step": 408500 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 3.480502882957856e-05, |
|
"loss": 2.2468, |
|
"step": 409000 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 3.4786453070201516e-05, |
|
"loss": 2.2346, |
|
"step": 409500 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 3.476787731082447e-05, |
|
"loss": 2.2336, |
|
"step": 410000 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 3.474930155144743e-05, |
|
"loss": 2.2469, |
|
"step": 410500 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 3.473072579207038e-05, |
|
"loss": 2.2323, |
|
"step": 411000 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 3.4712150032693337e-05, |
|
"loss": 2.2362, |
|
"step": 411500 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 3.4693574273316293e-05, |
|
"loss": 2.2301, |
|
"step": 412000 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 3.467499851393925e-05, |
|
"loss": 2.2322, |
|
"step": 412500 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 3.465642275456221e-05, |
|
"loss": 2.2366, |
|
"step": 413000 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 3.4637846995185164e-05, |
|
"loss": 2.2288, |
|
"step": 413500 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 3.461927123580812e-05, |
|
"loss": 2.2249, |
|
"step": 414000 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 3.460069547643108e-05, |
|
"loss": 2.223, |
|
"step": 414500 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 3.4582119717054035e-05, |
|
"loss": 2.2451, |
|
"step": 415000 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 3.456354395767699e-05, |
|
"loss": 2.2193, |
|
"step": 415500 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 3.454496819829995e-05, |
|
"loss": 2.2199, |
|
"step": 416000 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 3.4526392438922905e-05, |
|
"loss": 2.2209, |
|
"step": 416500 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 3.450781667954586e-05, |
|
"loss": 2.2412, |
|
"step": 417000 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 3.448924092016882e-05, |
|
"loss": 2.2233, |
|
"step": 417500 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 3.4470665160791776e-05, |
|
"loss": 2.2238, |
|
"step": 418000 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 3.445208940141473e-05, |
|
"loss": 2.2278, |
|
"step": 418500 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 3.443351364203769e-05, |
|
"loss": 2.2463, |
|
"step": 419000 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 3.4414937882660646e-05, |
|
"loss": 2.2274, |
|
"step": 419500 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 3.43963621232836e-05, |
|
"loss": 2.2398, |
|
"step": 420000 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"eval_loss": 2.1873366832733154, |
|
"eval_runtime": 1175.9686, |
|
"eval_samples_per_second": 406.915, |
|
"eval_steps_per_second": 50.864, |
|
"step": 420000 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 3.4377786363906553e-05, |
|
"loss": 2.2302, |
|
"step": 420500 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 3.435921060452951e-05, |
|
"loss": 2.2177, |
|
"step": 421000 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 3.434063484515247e-05, |
|
"loss": 2.2148, |
|
"step": 421500 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 3.4322059085775424e-05, |
|
"loss": 2.2423, |
|
"step": 422000 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 3.430348332639838e-05, |
|
"loss": 2.2232, |
|
"step": 422500 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 3.428490756702134e-05, |
|
"loss": 2.2277, |
|
"step": 423000 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 3.4266331807644295e-05, |
|
"loss": 2.2159, |
|
"step": 423500 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 3.424775604826725e-05, |
|
"loss": 2.2154, |
|
"step": 424000 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 3.422918028889021e-05, |
|
"loss": 2.2239, |
|
"step": 424500 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 3.4210604529513165e-05, |
|
"loss": 2.2272, |
|
"step": 425000 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 3.419202877013612e-05, |
|
"loss": 2.2287, |
|
"step": 425500 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 3.417345301075908e-05, |
|
"loss": 2.2073, |
|
"step": 426000 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 3.4154877251382036e-05, |
|
"loss": 2.2283, |
|
"step": 426500 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 3.4136301492005e-05, |
|
"loss": 2.2245, |
|
"step": 427000 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 3.4117725732627956e-05, |
|
"loss": 2.2176, |
|
"step": 427500 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 3.409914997325091e-05, |
|
"loss": 2.2198, |
|
"step": 428000 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 3.408057421387387e-05, |
|
"loss": 2.2204, |
|
"step": 428500 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 3.406199845449683e-05, |
|
"loss": 2.228, |
|
"step": 429000 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 3.404342269511978e-05, |
|
"loss": 2.2358, |
|
"step": 429500 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 3.4024846935742734e-05, |
|
"loss": 2.2181, |
|
"step": 430000 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 3.400627117636569e-05, |
|
"loss": 2.2349, |
|
"step": 430500 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 3.398769541698865e-05, |
|
"loss": 2.2303, |
|
"step": 431000 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 3.3969119657611605e-05, |
|
"loss": 2.2166, |
|
"step": 431500 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 3.395054389823456e-05, |
|
"loss": 2.2314, |
|
"step": 432000 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 3.393196813885752e-05, |
|
"loss": 2.223, |
|
"step": 432500 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 3.3913392379480475e-05, |
|
"loss": 2.2243, |
|
"step": 433000 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 3.389481662010343e-05, |
|
"loss": 2.2128, |
|
"step": 433500 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 3.387624086072639e-05, |
|
"loss": 2.2173, |
|
"step": 434000 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 3.3857665101349346e-05, |
|
"loss": 2.2156, |
|
"step": 434500 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 3.38390893419723e-05, |
|
"loss": 2.2206, |
|
"step": 435000 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 3.382051358259526e-05, |
|
"loss": 2.2302, |
|
"step": 435500 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 3.3801937823218216e-05, |
|
"loss": 2.2321, |
|
"step": 436000 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 3.378336206384117e-05, |
|
"loss": 2.2234, |
|
"step": 436500 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 3.376478630446413e-05, |
|
"loss": 2.2253, |
|
"step": 437000 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 3.374621054508709e-05, |
|
"loss": 2.2317, |
|
"step": 437500 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 3.3727634785710044e-05, |
|
"loss": 2.2242, |
|
"step": 438000 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 3.3709059026333e-05, |
|
"loss": 2.2215, |
|
"step": 438500 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 3.369048326695595e-05, |
|
"loss": 2.2171, |
|
"step": 439000 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 3.367190750757891e-05, |
|
"loss": 2.2373, |
|
"step": 439500 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 3.3653331748201865e-05, |
|
"loss": 2.2186, |
|
"step": 440000 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"eval_loss": 2.1790640354156494, |
|
"eval_runtime": 1174.122, |
|
"eval_samples_per_second": 407.555, |
|
"eval_steps_per_second": 50.944, |
|
"step": 440000 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 3.363475598882482e-05, |
|
"loss": 2.2281, |
|
"step": 440500 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 3.361618022944778e-05, |
|
"loss": 2.22, |
|
"step": 441000 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 3.3597604470070735e-05, |
|
"loss": 2.2231, |
|
"step": 441500 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 3.357902871069369e-05, |
|
"loss": 2.2214, |
|
"step": 442000 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 3.356045295131665e-05, |
|
"loss": 2.2324, |
|
"step": 442500 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 3.3541877191939606e-05, |
|
"loss": 2.2181, |
|
"step": 443000 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 3.352330143256256e-05, |
|
"loss": 2.2236, |
|
"step": 443500 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 3.350472567318552e-05, |
|
"loss": 2.2106, |
|
"step": 444000 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 3.3486149913808476e-05, |
|
"loss": 2.2127, |
|
"step": 444500 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 3.346757415443143e-05, |
|
"loss": 2.2115, |
|
"step": 445000 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 3.344899839505439e-05, |
|
"loss": 2.2187, |
|
"step": 445500 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 3.343042263567735e-05, |
|
"loss": 2.2218, |
|
"step": 446000 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 3.3411846876300304e-05, |
|
"loss": 2.2248, |
|
"step": 446500 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 3.339327111692326e-05, |
|
"loss": 2.232, |
|
"step": 447000 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 3.337469535754622e-05, |
|
"loss": 2.212, |
|
"step": 447500 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 3.3356119598169175e-05, |
|
"loss": 2.2106, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 3.333754383879213e-05, |
|
"loss": 2.2225, |
|
"step": 448500 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 3.331896807941509e-05, |
|
"loss": 2.2321, |
|
"step": 449000 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 3.3300392320038045e-05, |
|
"loss": 2.2221, |
|
"step": 449500 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 3.3281816560661e-05, |
|
"loss": 2.2185, |
|
"step": 450000 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 3.326324080128396e-05, |
|
"loss": 2.2134, |
|
"step": 450500 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 3.3244665041906916e-05, |
|
"loss": 2.2039, |
|
"step": 451000 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 3.322608928252987e-05, |
|
"loss": 2.2098, |
|
"step": 451500 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 3.320751352315283e-05, |
|
"loss": 2.2225, |
|
"step": 452000 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 3.3188937763775786e-05, |
|
"loss": 2.2206, |
|
"step": 452500 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 3.317036200439874e-05, |
|
"loss": 2.2199, |
|
"step": 453000 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 3.31517862450217e-05, |
|
"loss": 2.2166, |
|
"step": 453500 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 3.313321048564466e-05, |
|
"loss": 2.2239, |
|
"step": 454000 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 3.3114634726267614e-05, |
|
"loss": 2.2111, |
|
"step": 454500 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 3.309605896689057e-05, |
|
"loss": 2.2194, |
|
"step": 455000 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 3.307748320751353e-05, |
|
"loss": 2.2126, |
|
"step": 455500 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 3.3058907448136484e-05, |
|
"loss": 2.217, |
|
"step": 456000 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 3.304033168875944e-05, |
|
"loss": 2.2248, |
|
"step": 456500 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 3.30217559293824e-05, |
|
"loss": 2.2098, |
|
"step": 457000 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 3.300318017000535e-05, |
|
"loss": 2.2119, |
|
"step": 457500 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 3.2984604410628305e-05, |
|
"loss": 2.219, |
|
"step": 458000 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 3.296602865125126e-05, |
|
"loss": 2.2106, |
|
"step": 458500 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 3.294745289187422e-05, |
|
"loss": 2.2204, |
|
"step": 459000 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 3.2928877132497176e-05, |
|
"loss": 2.2149, |
|
"step": 459500 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 3.291030137312013e-05, |
|
"loss": 2.2104, |
|
"step": 460000 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"eval_loss": 2.169421672821045, |
|
"eval_runtime": 1168.4614, |
|
"eval_samples_per_second": 409.529, |
|
"eval_steps_per_second": 51.191, |
|
"step": 460000 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 3.289172561374309e-05, |
|
"loss": 2.2138, |
|
"step": 460500 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 3.2873149854366046e-05, |
|
"loss": 2.2321, |
|
"step": 461000 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 3.2854574094989e-05, |
|
"loss": 2.2211, |
|
"step": 461500 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 3.283599833561196e-05, |
|
"loss": 2.2242, |
|
"step": 462000 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 3.281742257623492e-05, |
|
"loss": 2.2124, |
|
"step": 462500 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 3.2798846816857874e-05, |
|
"loss": 2.1997, |
|
"step": 463000 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 3.278027105748083e-05, |
|
"loss": 2.1998, |
|
"step": 463500 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 3.276169529810379e-05, |
|
"loss": 2.2351, |
|
"step": 464000 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 3.2743119538726744e-05, |
|
"loss": 2.212, |
|
"step": 464500 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 3.27245437793497e-05, |
|
"loss": 2.2183, |
|
"step": 465000 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 3.270596801997266e-05, |
|
"loss": 2.2212, |
|
"step": 465500 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 3.2687392260595615e-05, |
|
"loss": 2.213, |
|
"step": 466000 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 3.266881650121857e-05, |
|
"loss": 2.2161, |
|
"step": 466500 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 3.265024074184153e-05, |
|
"loss": 2.2158, |
|
"step": 467000 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 3.2631664982464486e-05, |
|
"loss": 2.2063, |
|
"step": 467500 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 3.261308922308744e-05, |
|
"loss": 2.2006, |
|
"step": 468000 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 3.25945134637104e-05, |
|
"loss": 2.2047, |
|
"step": 468500 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 3.2575937704333356e-05, |
|
"loss": 2.21, |
|
"step": 469000 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 3.255736194495631e-05, |
|
"loss": 2.2152, |
|
"step": 469500 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 3.253878618557927e-05, |
|
"loss": 2.2111, |
|
"step": 470000 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 3.252021042620223e-05, |
|
"loss": 2.212, |
|
"step": 470500 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 3.2501634666825184e-05, |
|
"loss": 2.2223, |
|
"step": 471000 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 3.248305890744814e-05, |
|
"loss": 2.2091, |
|
"step": 471500 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 3.24644831480711e-05, |
|
"loss": 2.1951, |
|
"step": 472000 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 3.2445907388694054e-05, |
|
"loss": 2.2172, |
|
"step": 472500 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 3.242733162931701e-05, |
|
"loss": 2.2028, |
|
"step": 473000 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 3.240875586993997e-05, |
|
"loss": 2.2088, |
|
"step": 473500 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 3.2390180110562925e-05, |
|
"loss": 2.1987, |
|
"step": 474000 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 3.237160435118588e-05, |
|
"loss": 2.2137, |
|
"step": 474500 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 3.235302859180884e-05, |
|
"loss": 2.192, |
|
"step": 475000 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 3.2334452832431796e-05, |
|
"loss": 2.2041, |
|
"step": 475500 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 3.2315877073054746e-05, |
|
"loss": 2.2021, |
|
"step": 476000 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 3.22973013136777e-05, |
|
"loss": 2.2162, |
|
"step": 476500 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 3.227872555430066e-05, |
|
"loss": 2.2022, |
|
"step": 477000 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 3.2260149794923616e-05, |
|
"loss": 2.2008, |
|
"step": 477500 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 3.224157403554657e-05, |
|
"loss": 2.19, |
|
"step": 478000 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 3.222299827616953e-05, |
|
"loss": 2.191, |
|
"step": 478500 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 3.220442251679249e-05, |
|
"loss": 2.2063, |
|
"step": 479000 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 3.2185846757415444e-05, |
|
"loss": 2.2085, |
|
"step": 479500 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 3.21672709980384e-05, |
|
"loss": 2.2138, |
|
"step": 480000 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"eval_loss": 2.1657702922821045, |
|
"eval_runtime": 1171.0463, |
|
"eval_samples_per_second": 408.625, |
|
"eval_steps_per_second": 51.078, |
|
"step": 480000 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 3.214869523866136e-05, |
|
"loss": 2.1954, |
|
"step": 480500 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 3.2130119479284314e-05, |
|
"loss": 2.2031, |
|
"step": 481000 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 3.211154371990727e-05, |
|
"loss": 2.2035, |
|
"step": 481500 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 3.209296796053023e-05, |
|
"loss": 2.1875, |
|
"step": 482000 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 3.2074392201153185e-05, |
|
"loss": 2.1987, |
|
"step": 482500 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 3.205581644177614e-05, |
|
"loss": 2.2026, |
|
"step": 483000 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 3.20372406823991e-05, |
|
"loss": 2.2065, |
|
"step": 483500 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 3.2018664923022056e-05, |
|
"loss": 2.2025, |
|
"step": 484000 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 3.200008916364501e-05, |
|
"loss": 2.1838, |
|
"step": 484500 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 3.198151340426796e-05, |
|
"loss": 2.208, |
|
"step": 485000 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 3.196293764489092e-05, |
|
"loss": 2.196, |
|
"step": 485500 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 3.1944361885513876e-05, |
|
"loss": 2.2111, |
|
"step": 486000 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 3.192578612613683e-05, |
|
"loss": 2.1992, |
|
"step": 486500 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 3.190721036675979e-05, |
|
"loss": 2.1927, |
|
"step": 487000 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 3.188863460738275e-05, |
|
"loss": 2.2105, |
|
"step": 487500 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 3.187005884800571e-05, |
|
"loss": 2.1957, |
|
"step": 488000 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 3.185148308862867e-05, |
|
"loss": 2.2054, |
|
"step": 488500 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 3.1832907329251624e-05, |
|
"loss": 2.1903, |
|
"step": 489000 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 3.181433156987458e-05, |
|
"loss": 2.2005, |
|
"step": 489500 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 3.179575581049754e-05, |
|
"loss": 2.2015, |
|
"step": 490000 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 3.1777180051120495e-05, |
|
"loss": 2.1928, |
|
"step": 490500 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 3.175860429174345e-05, |
|
"loss": 2.2067, |
|
"step": 491000 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 3.174002853236641e-05, |
|
"loss": 2.1882, |
|
"step": 491500 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 3.1721452772989365e-05, |
|
"loss": 2.1994, |
|
"step": 492000 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 3.170287701361232e-05, |
|
"loss": 2.2026, |
|
"step": 492500 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 3.168430125423528e-05, |
|
"loss": 2.1945, |
|
"step": 493000 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 3.1665725494858236e-05, |
|
"loss": 2.2011, |
|
"step": 493500 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 3.1647149735481186e-05, |
|
"loss": 2.1971, |
|
"step": 494000 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 3.162857397610414e-05, |
|
"loss": 2.1957, |
|
"step": 494500 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 3.16099982167271e-05, |
|
"loss": 2.1941, |
|
"step": 495000 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 3.159142245735006e-05, |
|
"loss": 2.1832, |
|
"step": 495500 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 3.1572846697973014e-05, |
|
"loss": 2.2005, |
|
"step": 496000 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 3.155427093859597e-05, |
|
"loss": 2.1913, |
|
"step": 496500 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 3.153569517921893e-05, |
|
"loss": 2.2032, |
|
"step": 497000 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 3.1517119419841884e-05, |
|
"loss": 2.2084, |
|
"step": 497500 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 3.149854366046484e-05, |
|
"loss": 2.2094, |
|
"step": 498000 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 3.14799679010878e-05, |
|
"loss": 2.177, |
|
"step": 498500 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 3.1461392141710755e-05, |
|
"loss": 2.1894, |
|
"step": 499000 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 3.144281638233371e-05, |
|
"loss": 2.1928, |
|
"step": 499500 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 3.142424062295667e-05, |
|
"loss": 2.1995, |
|
"step": 500000 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"eval_loss": 2.1569273471832275, |
|
"eval_runtime": 1174.6025, |
|
"eval_samples_per_second": 407.388, |
|
"eval_steps_per_second": 50.924, |
|
"step": 500000 |
|
} |
|
], |
|
"max_steps": 1345840, |
|
"num_train_epochs": 20, |
|
"total_flos": 1.0609639946214728e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|