|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"global_step": 250000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9980000000000002e-05, |
|
"loss": 1.5856, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9960000000000002e-05, |
|
"loss": 1.1851, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9940000000000002e-05, |
|
"loss": 1.1964, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9920000000000002e-05, |
|
"loss": 1.1492, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9900000000000003e-05, |
|
"loss": 1.1113, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9880000000000003e-05, |
|
"loss": 1.1244, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9860000000000003e-05, |
|
"loss": 1.1124, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9840000000000003e-05, |
|
"loss": 1.0842, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.982e-05, |
|
"loss": 1.0997, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.98e-05, |
|
"loss": 1.0618, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.978e-05, |
|
"loss": 1.1082, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.976e-05, |
|
"loss": 1.0901, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.974e-05, |
|
"loss": 1.1198, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.972e-05, |
|
"loss": 1.1091, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.97e-05, |
|
"loss": 1.0496, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.968e-05, |
|
"loss": 1.0579, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.966e-05, |
|
"loss": 1.0638, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9640000000000002e-05, |
|
"loss": 1.0912, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9620000000000002e-05, |
|
"loss": 1.0564, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 1.0759, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9580000000000002e-05, |
|
"loss": 1.0336, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9560000000000002e-05, |
|
"loss": 1.0601, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9540000000000003e-05, |
|
"loss": 1.0388, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9520000000000003e-05, |
|
"loss": 1.0621, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.95e-05, |
|
"loss": 1.0859, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.948e-05, |
|
"loss": 1.0492, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.946e-05, |
|
"loss": 1.0551, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.944e-05, |
|
"loss": 1.0248, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.942e-05, |
|
"loss": 1.0416, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.94e-05, |
|
"loss": 1.0339, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.938e-05, |
|
"loss": 1.0452, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.936e-05, |
|
"loss": 1.0178, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.934e-05, |
|
"loss": 1.0517, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.932e-05, |
|
"loss": 1.0101, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.93e-05, |
|
"loss": 1.0358, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9280000000000002e-05, |
|
"loss": 1.0145, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9260000000000002e-05, |
|
"loss": 1.0449, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9240000000000002e-05, |
|
"loss": 1.0334, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9220000000000002e-05, |
|
"loss": 0.988, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 1.0197, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.918e-05, |
|
"loss": 1.0102, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.916e-05, |
|
"loss": 1.0621, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.914e-05, |
|
"loss": 1.0236, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.912e-05, |
|
"loss": 1.0556, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.91e-05, |
|
"loss": 1.0212, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.908e-05, |
|
"loss": 0.9991, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.906e-05, |
|
"loss": 1.0431, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.904e-05, |
|
"loss": 1.0099, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.902e-05, |
|
"loss": 1.0235, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9e-05, |
|
"loss": 1.0044, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.898e-05, |
|
"loss": 1.0476, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.896e-05, |
|
"loss": 1.0168, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.894e-05, |
|
"loss": 1.03, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8920000000000002e-05, |
|
"loss": 1.035, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8900000000000002e-05, |
|
"loss": 1.0196, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8880000000000002e-05, |
|
"loss": 1.0446, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.886e-05, |
|
"loss": 1.0376, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.884e-05, |
|
"loss": 0.9865, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.882e-05, |
|
"loss": 1.0091, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.88e-05, |
|
"loss": 1.0268, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.878e-05, |
|
"loss": 0.999, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.876e-05, |
|
"loss": 0.9908, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8740000000000004e-05, |
|
"loss": 1.0225, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8720000000000004e-05, |
|
"loss": 1.0211, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8700000000000004e-05, |
|
"loss": 1.0369, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8680000000000004e-05, |
|
"loss": 0.9936, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.866e-05, |
|
"loss": 0.9871, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.864e-05, |
|
"loss": 1.0057, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.862e-05, |
|
"loss": 0.9983, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.86e-05, |
|
"loss": 1.0156, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.858e-05, |
|
"loss": 1.0123, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.8560000000000002e-05, |
|
"loss": 1.0401, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.8540000000000002e-05, |
|
"loss": 0.9898, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.8520000000000002e-05, |
|
"loss": 1.0112, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8500000000000002e-05, |
|
"loss": 1.0024, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.8480000000000003e-05, |
|
"loss": 0.9794, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.8460000000000003e-05, |
|
"loss": 0.9986, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.8440000000000003e-05, |
|
"loss": 1.0181, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.8420000000000003e-05, |
|
"loss": 0.9949, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 0.9803, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8380000000000004e-05, |
|
"loss": 1.0184, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.8360000000000004e-05, |
|
"loss": 0.9792, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.834e-05, |
|
"loss": 0.9938, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.832e-05, |
|
"loss": 1.0015, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.83e-05, |
|
"loss": 0.9809, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.828e-05, |
|
"loss": 0.9914, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.826e-05, |
|
"loss": 0.9457, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.824e-05, |
|
"loss": 1.0056, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.8220000000000002e-05, |
|
"loss": 1.0131, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.8200000000000002e-05, |
|
"loss": 1.0017, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.8180000000000002e-05, |
|
"loss": 0.9875, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.8160000000000002e-05, |
|
"loss": 1.004, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.8140000000000003e-05, |
|
"loss": 1.0072, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.8120000000000003e-05, |
|
"loss": 1.003, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.8100000000000003e-05, |
|
"loss": 0.9947, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.8080000000000003e-05, |
|
"loss": 0.9802, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.8060000000000003e-05, |
|
"loss": 0.9708, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.8040000000000003e-05, |
|
"loss": 1.0065, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.802e-05, |
|
"loss": 1.0028, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.9957, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.0293926000595093, |
|
"eval_runtime": 36.7211, |
|
"eval_samples_per_second": 229.813, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.798e-05, |
|
"loss": 1.0013, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.796e-05, |
|
"loss": 0.9563, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.794e-05, |
|
"loss": 0.9488, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.792e-05, |
|
"loss": 0.9799, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.79e-05, |
|
"loss": 0.975, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.788e-05, |
|
"loss": 0.9706, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.7860000000000002e-05, |
|
"loss": 0.9602, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.7840000000000002e-05, |
|
"loss": 0.9734, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.7820000000000002e-05, |
|
"loss": 0.9597, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.7800000000000002e-05, |
|
"loss": 0.9637, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.7780000000000003e-05, |
|
"loss": 0.9735, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.7760000000000003e-05, |
|
"loss": 0.9829, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.7740000000000003e-05, |
|
"loss": 0.9762, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.7720000000000003e-05, |
|
"loss": 0.9713, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.77e-05, |
|
"loss": 0.9629, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.768e-05, |
|
"loss": 0.9717, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.766e-05, |
|
"loss": 0.9928, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.764e-05, |
|
"loss": 0.925, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.762e-05, |
|
"loss": 0.9672, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.76e-05, |
|
"loss": 0.9915, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.758e-05, |
|
"loss": 0.9888, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.756e-05, |
|
"loss": 0.9473, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.754e-05, |
|
"loss": 0.9552, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.752e-05, |
|
"loss": 0.9878, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.7500000000000002e-05, |
|
"loss": 0.972, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.7480000000000002e-05, |
|
"loss": 1.0064, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.7460000000000002e-05, |
|
"loss": 0.9613, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.7440000000000002e-05, |
|
"loss": 0.9594, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.7420000000000003e-05, |
|
"loss": 0.978, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.7400000000000003e-05, |
|
"loss": 0.9826, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.7380000000000003e-05, |
|
"loss": 0.958, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.736e-05, |
|
"loss": 0.9604, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.734e-05, |
|
"loss": 0.9622, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.732e-05, |
|
"loss": 0.938, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.73e-05, |
|
"loss": 0.9648, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.728e-05, |
|
"loss": 0.9453, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.726e-05, |
|
"loss": 0.9513, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.724e-05, |
|
"loss": 0.9811, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.722e-05, |
|
"loss": 0.9797, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.72e-05, |
|
"loss": 0.9241, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.718e-05, |
|
"loss": 0.948, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.7160000000000002e-05, |
|
"loss": 0.9596, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.7140000000000002e-05, |
|
"loss": 0.9635, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.7120000000000002e-05, |
|
"loss": 0.983, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.7100000000000002e-05, |
|
"loss": 0.9788, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.7080000000000002e-05, |
|
"loss": 0.9827, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.7060000000000003e-05, |
|
"loss": 0.9305, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.704e-05, |
|
"loss": 0.9367, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.702e-05, |
|
"loss": 0.9379, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.7e-05, |
|
"loss": 0.9826, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.698e-05, |
|
"loss": 0.9659, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.696e-05, |
|
"loss": 0.9776, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.694e-05, |
|
"loss": 0.9575, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.692e-05, |
|
"loss": 0.9614, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.69e-05, |
|
"loss": 0.9275, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.688e-05, |
|
"loss": 0.9523, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.686e-05, |
|
"loss": 0.9806, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.684e-05, |
|
"loss": 0.9169, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.682e-05, |
|
"loss": 0.9539, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 0.9378, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.6780000000000002e-05, |
|
"loss": 0.9668, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.6760000000000002e-05, |
|
"loss": 0.9496, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.6740000000000002e-05, |
|
"loss": 0.9712, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.672e-05, |
|
"loss": 0.9617, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.67e-05, |
|
"loss": 0.9189, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.668e-05, |
|
"loss": 0.9294, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.666e-05, |
|
"loss": 0.9742, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.664e-05, |
|
"loss": 0.9528, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.662e-05, |
|
"loss": 0.9349, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.66e-05, |
|
"loss": 0.981, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.658e-05, |
|
"loss": 0.9718, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.656e-05, |
|
"loss": 0.9832, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.654e-05, |
|
"loss": 0.9193, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.652e-05, |
|
"loss": 0.9687, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.65e-05, |
|
"loss": 0.9637, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.648e-05, |
|
"loss": 0.978, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.646e-05, |
|
"loss": 0.9461, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.6440000000000002e-05, |
|
"loss": 0.9461, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.6420000000000002e-05, |
|
"loss": 0.9343, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.64e-05, |
|
"loss": 0.9584, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.638e-05, |
|
"loss": 0.9405, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.636e-05, |
|
"loss": 0.9369, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.634e-05, |
|
"loss": 0.9672, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.632e-05, |
|
"loss": 0.9436, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.63e-05, |
|
"loss": 0.9468, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.628e-05, |
|
"loss": 0.928, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.626e-05, |
|
"loss": 0.9393, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.6240000000000004e-05, |
|
"loss": 0.9574, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.6220000000000004e-05, |
|
"loss": 0.9465, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.62e-05, |
|
"loss": 0.958, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.618e-05, |
|
"loss": 0.9365, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.616e-05, |
|
"loss": 0.9451, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.614e-05, |
|
"loss": 0.9642, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.612e-05, |
|
"loss": 0.9629, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.6100000000000002e-05, |
|
"loss": 0.9441, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.6080000000000002e-05, |
|
"loss": 0.9367, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.6060000000000002e-05, |
|
"loss": 0.9527, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6040000000000002e-05, |
|
"loss": 0.9322, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6020000000000002e-05, |
|
"loss": 0.9424, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.9206, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.0401068925857544, |
|
"eval_runtime": 38.2298, |
|
"eval_samples_per_second": 220.744, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.5980000000000003e-05, |
|
"loss": 0.9206, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.5960000000000003e-05, |
|
"loss": 0.9442, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.5940000000000003e-05, |
|
"loss": 0.9081, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5920000000000003e-05, |
|
"loss": 0.9236, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5900000000000004e-05, |
|
"loss": 0.9012, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.588e-05, |
|
"loss": 0.9594, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.586e-05, |
|
"loss": 0.9216, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.584e-05, |
|
"loss": 0.9125, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.582e-05, |
|
"loss": 0.9117, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.58e-05, |
|
"loss": 0.9444, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.578e-05, |
|
"loss": 0.9321, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.576e-05, |
|
"loss": 0.9515, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.5740000000000002e-05, |
|
"loss": 0.9496, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.5720000000000002e-05, |
|
"loss": 0.9315, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.5700000000000002e-05, |
|
"loss": 0.9478, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.5680000000000002e-05, |
|
"loss": 0.938, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.5660000000000003e-05, |
|
"loss": 0.8838, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.5640000000000003e-05, |
|
"loss": 0.9082, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.5620000000000003e-05, |
|
"loss": 0.9313, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 0.917, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.5580000000000003e-05, |
|
"loss": 0.9334, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.556e-05, |
|
"loss": 0.9425, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.554e-05, |
|
"loss": 0.9317, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.552e-05, |
|
"loss": 0.9415, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.55e-05, |
|
"loss": 0.9138, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.548e-05, |
|
"loss": 0.9132, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.546e-05, |
|
"loss": 0.9338, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.544e-05, |
|
"loss": 0.9474, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.542e-05, |
|
"loss": 0.9141, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.54e-05, |
|
"loss": 0.9, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.5380000000000002e-05, |
|
"loss": 0.9178, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.5360000000000002e-05, |
|
"loss": 0.9222, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.5340000000000002e-05, |
|
"loss": 0.8756, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.5320000000000002e-05, |
|
"loss": 0.9079, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.5300000000000003e-05, |
|
"loss": 0.9154, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.5280000000000003e-05, |
|
"loss": 0.9191, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.5260000000000003e-05, |
|
"loss": 0.9101, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.5240000000000001e-05, |
|
"loss": 0.9268, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.5220000000000002e-05, |
|
"loss": 0.9455, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 0.8976, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.5180000000000002e-05, |
|
"loss": 0.9389, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.516e-05, |
|
"loss": 0.9626, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.514e-05, |
|
"loss": 0.9094, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.5120000000000001e-05, |
|
"loss": 0.9505, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.5100000000000001e-05, |
|
"loss": 0.941, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.5080000000000001e-05, |
|
"loss": 0.8881, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.5060000000000001e-05, |
|
"loss": 0.8981, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5040000000000002e-05, |
|
"loss": 0.935, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.5020000000000002e-05, |
|
"loss": 0.9324, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.9057, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.498e-05, |
|
"loss": 0.9071, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.496e-05, |
|
"loss": 0.9294, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.4940000000000001e-05, |
|
"loss": 0.9223, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.4920000000000001e-05, |
|
"loss": 0.9375, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.4900000000000001e-05, |
|
"loss": 0.9598, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.4880000000000002e-05, |
|
"loss": 0.9026, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.4860000000000002e-05, |
|
"loss": 0.9126, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.4840000000000002e-05, |
|
"loss": 0.9249, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.482e-05, |
|
"loss": 0.9315, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.48e-05, |
|
"loss": 0.9222, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.478e-05, |
|
"loss": 0.9345, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.4760000000000001e-05, |
|
"loss": 0.907, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.4740000000000001e-05, |
|
"loss": 0.9335, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.4720000000000001e-05, |
|
"loss": 0.9016, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.4700000000000002e-05, |
|
"loss": 0.9286, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.4680000000000002e-05, |
|
"loss": 0.9309, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.466e-05, |
|
"loss": 0.8985, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.464e-05, |
|
"loss": 0.931, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.462e-05, |
|
"loss": 0.8933, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.46e-05, |
|
"loss": 0.9064, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.4580000000000001e-05, |
|
"loss": 0.8963, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.4560000000000001e-05, |
|
"loss": 0.9055, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.4540000000000001e-05, |
|
"loss": 0.8748, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.4520000000000002e-05, |
|
"loss": 0.9339, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.45e-05, |
|
"loss": 0.9336, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.448e-05, |
|
"loss": 0.9413, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.446e-05, |
|
"loss": 0.8967, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.444e-05, |
|
"loss": 0.9144, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.4420000000000001e-05, |
|
"loss": 0.9165, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 0.9088, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.4380000000000001e-05, |
|
"loss": 0.9127, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.4360000000000001e-05, |
|
"loss": 0.9226, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.434e-05, |
|
"loss": 0.9374, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.432e-05, |
|
"loss": 0.9111, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.43e-05, |
|
"loss": 0.9213, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.428e-05, |
|
"loss": 0.9115, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.426e-05, |
|
"loss": 0.9207, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.4240000000000001e-05, |
|
"loss": 0.9162, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.4220000000000001e-05, |
|
"loss": 0.9338, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.4200000000000001e-05, |
|
"loss": 0.9201, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.418e-05, |
|
"loss": 0.8941, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.416e-05, |
|
"loss": 0.8942, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.414e-05, |
|
"loss": 0.9272, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.412e-05, |
|
"loss": 0.9401, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.41e-05, |
|
"loss": 0.9248, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.408e-05, |
|
"loss": 0.9509, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.4060000000000001e-05, |
|
"loss": 0.9258, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.4040000000000001e-05, |
|
"loss": 0.914, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.402e-05, |
|
"loss": 0.9115, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.9401, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.0360552072525024, |
|
"eval_runtime": 38.0361, |
|
"eval_samples_per_second": 221.868, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.398e-05, |
|
"loss": 0.8871, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.396e-05, |
|
"loss": 0.885, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 1.394e-05, |
|
"loss": 0.9214, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.392e-05, |
|
"loss": 0.9042, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.39e-05, |
|
"loss": 0.8641, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.3880000000000001e-05, |
|
"loss": 0.8995, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.386e-05, |
|
"loss": 0.9037, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 1.384e-05, |
|
"loss": 0.9184, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.382e-05, |
|
"loss": 0.9066, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 1.38e-05, |
|
"loss": 0.8938, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 1.378e-05, |
|
"loss": 0.9076, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 1.376e-05, |
|
"loss": 0.8916, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.3740000000000002e-05, |
|
"loss": 0.8846, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.3720000000000002e-05, |
|
"loss": 0.887, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.3700000000000003e-05, |
|
"loss": 0.8879, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 1.3680000000000003e-05, |
|
"loss": 0.9067, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.3660000000000001e-05, |
|
"loss": 0.9291, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.3640000000000002e-05, |
|
"loss": 0.9288, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.3620000000000002e-05, |
|
"loss": 0.869, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 0.9057, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.3580000000000002e-05, |
|
"loss": 0.8712, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.3560000000000002e-05, |
|
"loss": 0.9002, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.3540000000000003e-05, |
|
"loss": 0.8941, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.3520000000000003e-05, |
|
"loss": 0.9078, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 0.9013, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.3480000000000001e-05, |
|
"loss": 0.9113, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.3460000000000002e-05, |
|
"loss": 0.9183, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.3440000000000002e-05, |
|
"loss": 0.8761, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.3420000000000002e-05, |
|
"loss": 0.9013, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.3400000000000002e-05, |
|
"loss": 0.9201, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.3380000000000002e-05, |
|
"loss": 0.9147, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.3360000000000003e-05, |
|
"loss": 0.8999, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.3340000000000001e-05, |
|
"loss": 0.8977, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.3320000000000001e-05, |
|
"loss": 0.9165, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 1.3300000000000001e-05, |
|
"loss": 0.88, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 1.3280000000000002e-05, |
|
"loss": 0.8984, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.3260000000000002e-05, |
|
"loss": 0.9192, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 1.3240000000000002e-05, |
|
"loss": 0.9002, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.3220000000000002e-05, |
|
"loss": 0.9063, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 0.8966, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 1.3180000000000001e-05, |
|
"loss": 0.8963, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 1.3160000000000001e-05, |
|
"loss": 0.9023, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.3140000000000001e-05, |
|
"loss": 0.9139, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.3120000000000001e-05, |
|
"loss": 0.8753, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 1.3100000000000002e-05, |
|
"loss": 0.9033, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 1.3080000000000002e-05, |
|
"loss": 0.8899, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.3060000000000002e-05, |
|
"loss": 0.9349, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 1.3040000000000002e-05, |
|
"loss": 0.9069, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 1.302e-05, |
|
"loss": 0.8968, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 0.8852, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 1.2980000000000001e-05, |
|
"loss": 0.8884, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 1.2960000000000001e-05, |
|
"loss": 0.9019, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 1.2940000000000001e-05, |
|
"loss": 0.8991, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 1.2920000000000002e-05, |
|
"loss": 0.8742, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.2900000000000002e-05, |
|
"loss": 0.9328, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 1.2880000000000002e-05, |
|
"loss": 0.9012, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.286e-05, |
|
"loss": 0.9047, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 1.284e-05, |
|
"loss": 0.8978, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 1.2820000000000001e-05, |
|
"loss": 0.8974, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 0.8864, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.2780000000000001e-05, |
|
"loss": 0.883, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.2760000000000001e-05, |
|
"loss": 0.9191, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 1.2740000000000002e-05, |
|
"loss": 0.9185, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.2720000000000002e-05, |
|
"loss": 0.8723, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.27e-05, |
|
"loss": 0.8941, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.268e-05, |
|
"loss": 0.8825, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.266e-05, |
|
"loss": 0.8716, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.2640000000000001e-05, |
|
"loss": 0.8752, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.2620000000000001e-05, |
|
"loss": 0.9101, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.2600000000000001e-05, |
|
"loss": 0.9017, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.2580000000000002e-05, |
|
"loss": 0.9123, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.2560000000000002e-05, |
|
"loss": 0.9461, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.254e-05, |
|
"loss": 0.915, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.252e-05, |
|
"loss": 0.8812, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.9088, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.248e-05, |
|
"loss": 0.9244, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.2460000000000001e-05, |
|
"loss": 0.8963, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.2440000000000001e-05, |
|
"loss": 0.88, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.2420000000000001e-05, |
|
"loss": 0.8912, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 0.8959, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.2380000000000002e-05, |
|
"loss": 0.8935, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 1.236e-05, |
|
"loss": 0.8894, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.234e-05, |
|
"loss": 0.8805, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.232e-05, |
|
"loss": 0.9075, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.23e-05, |
|
"loss": 0.8917, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.2280000000000001e-05, |
|
"loss": 0.8614, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.2260000000000001e-05, |
|
"loss": 0.8729, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 1.2240000000000001e-05, |
|
"loss": 0.8799, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.2220000000000002e-05, |
|
"loss": 0.9237, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 1.22e-05, |
|
"loss": 0.9123, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.218e-05, |
|
"loss": 0.9212, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.216e-05, |
|
"loss": 0.8751, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 1.214e-05, |
|
"loss": 0.8942, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.2120000000000001e-05, |
|
"loss": 0.8926, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.2100000000000001e-05, |
|
"loss": 0.8766, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.2080000000000001e-05, |
|
"loss": 0.8951, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.2060000000000001e-05, |
|
"loss": 0.9063, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 1.204e-05, |
|
"loss": 0.8995, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.202e-05, |
|
"loss": 0.8949, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.9091, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.0091001987457275, |
|
"eval_runtime": 34.9666, |
|
"eval_samples_per_second": 241.345, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.198e-05, |
|
"loss": 0.8757, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 1.196e-05, |
|
"loss": 0.8703, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 1.1940000000000001e-05, |
|
"loss": 0.9002, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.1920000000000001e-05, |
|
"loss": 0.8746, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.1900000000000001e-05, |
|
"loss": 0.8543, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 1.188e-05, |
|
"loss": 0.8602, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.186e-05, |
|
"loss": 0.8668, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 1.184e-05, |
|
"loss": 0.8917, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.182e-05, |
|
"loss": 0.8905, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.18e-05, |
|
"loss": 0.8664, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 1.178e-05, |
|
"loss": 0.8751, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.1760000000000001e-05, |
|
"loss": 0.8781, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 1.1740000000000001e-05, |
|
"loss": 0.8737, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 1.172e-05, |
|
"loss": 0.8616, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 1.17e-05, |
|
"loss": 0.8776, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 1.168e-05, |
|
"loss": 0.9032, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.166e-05, |
|
"loss": 0.8972, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 1.164e-05, |
|
"loss": 0.89, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 1.162e-05, |
|
"loss": 0.863, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.16e-05, |
|
"loss": 0.8609, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 1.1580000000000001e-05, |
|
"loss": 0.8667, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.156e-05, |
|
"loss": 0.8581, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 1.154e-05, |
|
"loss": 0.8668, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 1.152e-05, |
|
"loss": 0.9037, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.15e-05, |
|
"loss": 0.9107, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 1.148e-05, |
|
"loss": 0.8756, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.146e-05, |
|
"loss": 0.8658, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 1.144e-05, |
|
"loss": 0.8647, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 1.142e-05, |
|
"loss": 0.9025, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.14e-05, |
|
"loss": 0.8775, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 1.138e-05, |
|
"loss": 0.8759, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 1.136e-05, |
|
"loss": 0.8681, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 1.134e-05, |
|
"loss": 0.8751, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 1.132e-05, |
|
"loss": 0.9005, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 1.13e-05, |
|
"loss": 0.8791, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 1.128e-05, |
|
"loss": 0.8595, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 1.126e-05, |
|
"loss": 0.905, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 1.1240000000000002e-05, |
|
"loss": 0.9036, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 1.1220000000000003e-05, |
|
"loss": 0.8793, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 0.8686, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 1.1180000000000001e-05, |
|
"loss": 0.8397, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 1.1160000000000002e-05, |
|
"loss": 0.8454, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 1.1140000000000002e-05, |
|
"loss": 0.8576, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 1.1120000000000002e-05, |
|
"loss": 0.8645, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 1.1100000000000002e-05, |
|
"loss": 0.882, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 1.1080000000000002e-05, |
|
"loss": 0.8672, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 1.1060000000000003e-05, |
|
"loss": 0.8772, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 1.1040000000000001e-05, |
|
"loss": 0.8743, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 1.1020000000000001e-05, |
|
"loss": 0.8809, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 0.865, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 1.0980000000000002e-05, |
|
"loss": 0.8971, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 1.0960000000000002e-05, |
|
"loss": 0.8752, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.0940000000000002e-05, |
|
"loss": 0.8505, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 1.0920000000000002e-05, |
|
"loss": 0.845, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 1.0900000000000002e-05, |
|
"loss": 0.873, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 1.0880000000000001e-05, |
|
"loss": 0.914, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.0860000000000001e-05, |
|
"loss": 0.8504, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 1.0840000000000001e-05, |
|
"loss": 0.8951, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 1.0820000000000001e-05, |
|
"loss": 0.8836, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 0.8557, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 1.0780000000000002e-05, |
|
"loss": 0.8748, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 1.0760000000000002e-05, |
|
"loss": 0.8803, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 1.0740000000000002e-05, |
|
"loss": 0.9195, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 1.072e-05, |
|
"loss": 0.8697, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.0700000000000001e-05, |
|
"loss": 0.8872, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 1.0680000000000001e-05, |
|
"loss": 0.8618, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 1.0660000000000001e-05, |
|
"loss": 0.8387, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 1.0640000000000001e-05, |
|
"loss": 0.8702, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.0620000000000002e-05, |
|
"loss": 0.8726, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.0600000000000002e-05, |
|
"loss": 0.8956, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.0580000000000002e-05, |
|
"loss": 0.8636, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.056e-05, |
|
"loss": 0.8649, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.054e-05, |
|
"loss": 0.8856, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.0520000000000001e-05, |
|
"loss": 0.8849, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.0500000000000001e-05, |
|
"loss": 0.8673, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.0480000000000001e-05, |
|
"loss": 0.8557, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.0460000000000001e-05, |
|
"loss": 0.8508, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.0440000000000002e-05, |
|
"loss": 0.8562, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.0420000000000002e-05, |
|
"loss": 0.8952, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 1.04e-05, |
|
"loss": 0.8949, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 1.038e-05, |
|
"loss": 0.882, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.036e-05, |
|
"loss": 0.8593, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.0340000000000001e-05, |
|
"loss": 0.879, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 1.0320000000000001e-05, |
|
"loss": 0.9084, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 1.0300000000000001e-05, |
|
"loss": 0.871, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 1.0280000000000002e-05, |
|
"loss": 0.8417, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.0260000000000002e-05, |
|
"loss": 0.8789, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 1.024e-05, |
|
"loss": 0.8516, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 1.022e-05, |
|
"loss": 0.893, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 1.02e-05, |
|
"loss": 0.8977, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 1.018e-05, |
|
"loss": 0.8702, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 1.0160000000000001e-05, |
|
"loss": 0.8389, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.0140000000000001e-05, |
|
"loss": 0.8826, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 1.0120000000000001e-05, |
|
"loss": 0.8712, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 1.0100000000000002e-05, |
|
"loss": 0.8674, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 1.008e-05, |
|
"loss": 0.8871, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 1.006e-05, |
|
"loss": 0.8929, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.004e-05, |
|
"loss": 0.8592, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 1.002e-05, |
|
"loss": 0.8824, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8465, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.0072413682937622, |
|
"eval_runtime": 34.9811, |
|
"eval_samples_per_second": 241.244, |
|
"step": 250000 |
|
} |
|
], |
|
"max_steps": 500000, |
|
"num_train_epochs": 10, |
|
"total_flos": 6.924912949067942e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|