|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 20.0, |
|
"eval_steps": 500, |
|
"global_step": 1920, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.6041666666666667e-08, |
|
"loss": 2.3378, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.3020833333333334e-07, |
|
"loss": 2.6781, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.604166666666667e-07, |
|
"loss": 2.6965, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.90625e-07, |
|
"loss": 2.6689, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 5.208333333333334e-07, |
|
"loss": 2.7272, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 6.510416666666668e-07, |
|
"loss": 2.697, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 7.8125e-07, |
|
"loss": 2.6381, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.114583333333333e-07, |
|
"loss": 2.6254, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.0416666666666667e-06, |
|
"loss": 2.6373, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1718750000000001e-06, |
|
"loss": 2.6103, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.3020833333333335e-06, |
|
"loss": 2.5922, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4322916666666667e-06, |
|
"loss": 2.578, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5625e-06, |
|
"loss": 2.6198, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.6927083333333335e-06, |
|
"loss": 2.6088, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.8229166666666666e-06, |
|
"loss": 2.6466, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.953125e-06, |
|
"loss": 2.6893, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.0833333333333334e-06, |
|
"loss": 2.4503, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.213541666666667e-06, |
|
"loss": 2.6438, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.3437500000000002e-06, |
|
"loss": 2.6285, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.4739583333333336e-06, |
|
"loss": 2.6831, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 2.6346399784088135, |
|
"eval_runtime": 172.9512, |
|
"eval_samples_per_second": 4.429, |
|
"eval_steps_per_second": 1.11, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.604166666666667e-06, |
|
"loss": 2.718, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.7343750000000004e-06, |
|
"loss": 2.6197, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.8645833333333334e-06, |
|
"loss": 2.6603, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.994791666666667e-06, |
|
"loss": 2.7052, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.125e-06, |
|
"loss": 2.6982, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.2552083333333335e-06, |
|
"loss": 2.5888, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.385416666666667e-06, |
|
"loss": 2.7218, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.5156250000000003e-06, |
|
"loss": 2.6269, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.6458333333333333e-06, |
|
"loss": 2.6213, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.776041666666667e-06, |
|
"loss": 2.5227, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.90625e-06, |
|
"loss": 2.6565, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.0364583333333335e-06, |
|
"loss": 2.6106, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 2.5478, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.296875e-06, |
|
"loss": 2.6953, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.427083333333334e-06, |
|
"loss": 2.7111, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.557291666666667e-06, |
|
"loss": 2.661, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.6875000000000004e-06, |
|
"loss": 2.5927, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.817708333333334e-06, |
|
"loss": 2.4611, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.947916666666667e-06, |
|
"loss": 2.6684, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 2.6288113594055176, |
|
"eval_runtime": 172.9222, |
|
"eval_samples_per_second": 4.43, |
|
"eval_steps_per_second": 1.11, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.999962815348555e-06, |
|
"loss": 2.5517, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.999735579817769e-06, |
|
"loss": 2.6882, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.999301785650102e-06, |
|
"loss": 2.6676, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.998661468690914e-06, |
|
"loss": 2.5453, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.997814681851004e-06, |
|
"loss": 2.7482, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.996761495102227e-06, |
|
"loss": 2.6014, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.995501995471712e-06, |
|
"loss": 2.6348, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.994036287034682e-06, |
|
"loss": 2.6335, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.992364490905838e-06, |
|
"loss": 2.5545, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 4.990486745229364e-06, |
|
"loss": 2.5585, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 4.988403205167508e-06, |
|
"loss": 2.6359, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.986114042887756e-06, |
|
"loss": 2.5413, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.983619447548611e-06, |
|
"loss": 2.6614, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.980919625283962e-06, |
|
"loss": 2.6331, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.978014799186046e-06, |
|
"loss": 2.6829, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.97490520928702e-06, |
|
"loss": 2.6249, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 4.971591112539121e-06, |
|
"loss": 2.6775, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.968072782793436e-06, |
|
"loss": 2.5929, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.964350510777273e-06, |
|
"loss": 2.687, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 2.6191797256469727, |
|
"eval_runtime": 172.991, |
|
"eval_samples_per_second": 4.428, |
|
"eval_steps_per_second": 1.11, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 4.9604246040701384e-06, |
|
"loss": 2.5492, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 4.956295387078319e-06, |
|
"loss": 2.6643, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.9519632010080765e-06, |
|
"loss": 2.6451, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 4.947428403837454e-06, |
|
"loss": 2.447, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 4.942691370286693e-06, |
|
"loss": 2.682, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 4.937752491787272e-06, |
|
"loss": 2.5597, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 4.93261217644956e-06, |
|
"loss": 2.5953, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.927270849029095e-06, |
|
"loss": 2.5892, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 4.9217289508914836e-06, |
|
"loss": 2.6764, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 4.91598693997593e-06, |
|
"loss": 2.5029, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.910045290757399e-06, |
|
"loss": 2.676, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 4.9039044942074055e-06, |
|
"loss": 2.5671, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.8975650577534435e-06, |
|
"loss": 2.6389, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 4.89102750523706e-06, |
|
"loss": 2.5506, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 4.884292376870567e-06, |
|
"loss": 2.5381, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 4.877360229192404e-06, |
|
"loss": 2.6787, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 4.8702316350211445e-06, |
|
"loss": 2.6745, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 4.86290718340817e-06, |
|
"loss": 2.6619, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.855387479588991e-06, |
|
"loss": 2.6597, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 2.6108741760253906, |
|
"eval_runtime": 172.9898, |
|
"eval_samples_per_second": 4.428, |
|
"eval_steps_per_second": 1.11, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 4.847673144933239e-06, |
|
"loss": 2.6435, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 4.839764816893315e-06, |
|
"loss": 2.5638, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 4.83166314895172e-06, |
|
"loss": 2.5875, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 4.823368810567056e-06, |
|
"loss": 2.6563, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 4.814882487118708e-06, |
|
"loss": 2.6723, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 4.806204879850206e-06, |
|
"loss": 2.5723, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 4.797336705811282e-06, |
|
"loss": 2.6688, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 4.788278697798619e-06, |
|
"loss": 2.5098, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 4.779031604295297e-06, |
|
"loss": 2.5736, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 4.769596189408947e-06, |
|
"loss": 2.5369, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.759973232808609e-06, |
|
"loss": 2.5826, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 4.750163529660303e-06, |
|
"loss": 2.722, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 4.740167890561331e-06, |
|
"loss": 2.5048, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 4.729987141473286e-06, |
|
"loss": 2.7039, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 4.719622123653809e-06, |
|
"loss": 2.5838, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 4.70907369358707e-06, |
|
"loss": 2.5589, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 4.698342722912993e-06, |
|
"loss": 2.6799, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 4.687430098355237e-06, |
|
"loss": 2.654, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 4.676336721647917e-06, |
|
"loss": 2.6297, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 2.6019, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 2.6053690910339355, |
|
"eval_runtime": 172.9519, |
|
"eval_samples_per_second": 4.429, |
|
"eval_steps_per_second": 1.11, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 4.6536113933250395e-06, |
|
"loss": 2.6562, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 4.641981319553232e-06, |
|
"loss": 2.654, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 4.630174249164197e-06, |
|
"loss": 2.5684, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 4.61819115780207e-06, |
|
"loss": 2.5459, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 4.606033035655989e-06, |
|
"loss": 2.5952, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 4.59370088737827e-06, |
|
"loss": 2.7195, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 4.581195732001388e-06, |
|
"loss": 2.591, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 4.568518602853776e-06, |
|
"loss": 2.5691, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 4.555670547474438e-06, |
|
"loss": 2.5543, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 4.542652627526385e-06, |
|
"loss": 2.589, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 4.529465918708911e-06, |
|
"loss": 2.6588, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 4.516111510668707e-06, |
|
"loss": 2.5623, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 4.502590506909815e-06, |
|
"loss": 2.6214, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 4.48890402470245e-06, |
|
"loss": 2.6125, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 4.475053194990673e-06, |
|
"loss": 2.6154, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 4.46103916229894e-06, |
|
"loss": 2.5869, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 4.446863084637527e-06, |
|
"loss": 2.5409, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 4.432526133406843e-06, |
|
"loss": 2.6322, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 4.4180294933006315e-06, |
|
"loss": 2.6118, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 2.602187395095825, |
|
"eval_runtime": 172.8706, |
|
"eval_samples_per_second": 4.431, |
|
"eval_steps_per_second": 1.111, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 4.403374362208078e-06, |
|
"loss": 2.5985, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 4.388561951114829e-06, |
|
"loss": 2.6755, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 4.3735934840029185e-06, |
|
"loss": 2.5744, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 4.358470197749635e-06, |
|
"loss": 2.6534, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 4.34319334202531e-06, |
|
"loss": 2.4585, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 4.327764179190061e-06, |
|
"loss": 2.5897, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 4.312183984189471e-06, |
|
"loss": 2.6968, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 4.296454044449245e-06, |
|
"loss": 2.598, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 4.280575659768828e-06, |
|
"loss": 2.614, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 4.26455014221399e-06, |
|
"loss": 2.5943, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 4.248378816008418e-06, |
|
"loss": 2.6974, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 4.232063017424285e-06, |
|
"loss": 2.5015, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 4.215604094671835e-06, |
|
"loss": 2.5402, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 4.1990034077879746e-06, |
|
"loss": 2.5983, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 4.1822623285238944e-06, |
|
"loss": 2.5948, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 4.165382240231713e-06, |
|
"loss": 2.5587, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 4.1483645377501726e-06, |
|
"loss": 2.5544, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 4.1312106272893745e-06, |
|
"loss": 2.5757, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 4.113921926314587e-06, |
|
"loss": 2.7286, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 2.600128173828125, |
|
"eval_runtime": 172.8748, |
|
"eval_samples_per_second": 4.431, |
|
"eval_steps_per_second": 1.111, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 4.096499863429113e-06, |
|
"loss": 2.5867, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 4.078945878256244e-06, |
|
"loss": 2.6628, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 4.061261421320298e-06, |
|
"loss": 2.6499, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 4.043447953926763e-06, |
|
"loss": 2.5979, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 4.025506948041542e-06, |
|
"loss": 2.5506, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 4.0074398861693244e-06, |
|
"loss": 2.5433, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 3.989248261231084e-06, |
|
"loss": 2.5475, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 3.9709335764407115e-06, |
|
"loss": 2.781, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 3.952497345180806e-06, |
|
"loss": 2.556, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 3.933941090877615e-06, |
|
"loss": 2.6076, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 3.915266346875157e-06, |
|
"loss": 2.5666, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 3.896474656308512e-06, |
|
"loss": 2.5663, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 3.877567571976311e-06, |
|
"loss": 2.5254, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 3.858546656212425e-06, |
|
"loss": 2.6294, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 3.8394134807568695e-06, |
|
"loss": 2.5674, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 3.82016962662592e-06, |
|
"loss": 2.6185, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 3.8008166839814776e-06, |
|
"loss": 2.5857, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 3.7813562519996633e-06, |
|
"loss": 2.6275, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 3.7617899387386803e-06, |
|
"loss": 2.6341, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 2.5987157821655273, |
|
"eval_runtime": 173.4207, |
|
"eval_samples_per_second": 4.417, |
|
"eval_steps_per_second": 1.107, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 3.7421193610059347e-06, |
|
"loss": 2.6327, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 3.722346144224432e-06, |
|
"loss": 2.5603, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 3.7024719222984696e-06, |
|
"loss": 2.5164, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 3.6824983374786216e-06, |
|
"loss": 2.6151, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 3.6624270402260355e-06, |
|
"loss": 2.7189, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 3.642259689076052e-06, |
|
"loss": 2.5992, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 3.621997950501156e-06, |
|
"loss": 2.571, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 3.6016434987732716e-06, |
|
"loss": 2.6104, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 3.5811980158254156e-06, |
|
"loss": 2.5709, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 3.5606631911127143e-06, |
|
"loss": 2.6242, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 3.5400407214727983e-06, |
|
"loss": 2.5912, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 3.519332310985592e-06, |
|
"loss": 2.65, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 3.4985396708325014e-06, |
|
"loss": 2.5836, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 3.477664519155014e-06, |
|
"loss": 2.5325, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 3.4567085809127247e-06, |
|
"loss": 2.5664, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 3.435673587740801e-06, |
|
"loss": 2.56, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 3.414561277806893e-06, |
|
"loss": 2.6283, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 3.3933733956675006e-06, |
|
"loss": 2.6294, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 3.3721116921238273e-06, |
|
"loss": 2.572, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 2.59786319732666, |
|
"eval_runtime": 173.4256, |
|
"eval_samples_per_second": 4.417, |
|
"eval_steps_per_second": 1.107, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 3.3507779240770967e-06, |
|
"loss": 2.6574, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 3.3293738543833807e-06, |
|
"loss": 2.6334, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 3.3079012517079325e-06, |
|
"loss": 2.5623, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 3.2863618903790346e-06, |
|
"loss": 2.566, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 3.264757550241384e-06, |
|
"loss": 2.571, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 3.2430900165090195e-06, |
|
"loss": 2.5241, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 3.221361079617804e-06, |
|
"loss": 2.5838, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 3.199572535077481e-06, |
|
"loss": 2.6498, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"learning_rate": 3.177726183323301e-06, |
|
"loss": 2.4745, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 3.1558238295672544e-06, |
|
"loss": 2.6874, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 3.1338672836488994e-06, |
|
"loss": 2.5929, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 3.1118583598858097e-06, |
|
"loss": 2.5818, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 3.0897988769236583e-06, |
|
"loss": 2.6143, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 3.0676906575859335e-06, |
|
"loss": 2.5951, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 3.0455355287233175e-06, |
|
"loss": 2.5665, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 3.0233353210627305e-06, |
|
"loss": 2.6756, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 9.84, |
|
"learning_rate": 3.0010918690560516e-06, |
|
"loss": 2.7057, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 2.978807010728536e-06, |
|
"loss": 2.5657, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 2.9564825875269333e-06, |
|
"loss": 2.574, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 2.9341204441673267e-06, |
|
"loss": 2.622, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 2.5974395275115967, |
|
"eval_runtime": 173.3905, |
|
"eval_samples_per_second": 4.418, |
|
"eval_steps_per_second": 1.107, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 10.05, |
|
"learning_rate": 2.9117224284826966e-06, |
|
"loss": 2.6295, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"learning_rate": 2.889290391270235e-06, |
|
"loss": 2.7132, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 10.16, |
|
"learning_rate": 2.8668261861384045e-06, |
|
"loss": 2.637, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 2.844331669353777e-06, |
|
"loss": 2.6434, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 10.26, |
|
"learning_rate": 2.8218086996876405e-06, |
|
"loss": 2.6185, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 10.31, |
|
"learning_rate": 2.7992591382624064e-06, |
|
"loss": 2.6806, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"learning_rate": 2.7766848483978235e-06, |
|
"loss": 2.4173, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"learning_rate": 2.754087695457005e-06, |
|
"loss": 2.7256, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 10.47, |
|
"learning_rate": 2.7314695466922896e-06, |
|
"loss": 2.5551, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"learning_rate": 2.708832271090947e-06, |
|
"loss": 2.5539, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"learning_rate": 2.6861777392207415e-06, |
|
"loss": 2.5384, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 10.62, |
|
"learning_rate": 2.663507823075358e-06, |
|
"loss": 2.5276, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 10.68, |
|
"learning_rate": 2.6408243959197192e-06, |
|
"loss": 2.5189, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"learning_rate": 2.6181293321351925e-06, |
|
"loss": 2.6471, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"learning_rate": 2.5954245070647042e-06, |
|
"loss": 2.5109, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 2.572711796857779e-06, |
|
"loss": 2.5946, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"learning_rate": 2.549993078315505e-06, |
|
"loss": 2.5543, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 10.94, |
|
"learning_rate": 2.527270228735456e-06, |
|
"loss": 2.6536, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 2.5045451257565597e-06, |
|
"loss": 2.6404, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_loss": 2.597221851348877, |
|
"eval_runtime": 173.4251, |
|
"eval_samples_per_second": 4.417, |
|
"eval_steps_per_second": 1.107, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"learning_rate": 2.4818196472039464e-06, |
|
"loss": 2.5136, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 11.09, |
|
"learning_rate": 2.459095670933783e-06, |
|
"loss": 2.6783, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 11.15, |
|
"learning_rate": 2.4363750746781e-06, |
|
"loss": 2.656, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"learning_rate": 2.4136597358896273e-06, |
|
"loss": 2.7115, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"learning_rate": 2.3909515315866606e-06, |
|
"loss": 2.5209, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 11.3, |
|
"learning_rate": 2.368252338197956e-06, |
|
"loss": 2.5409, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 11.35, |
|
"learning_rate": 2.3455640314076805e-06, |
|
"loss": 2.62, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 11.41, |
|
"learning_rate": 2.322888486000415e-06, |
|
"loss": 2.6359, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 11.46, |
|
"learning_rate": 2.300227575706241e-06, |
|
"loss": 2.6546, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 11.51, |
|
"learning_rate": 2.2775831730459057e-06, |
|
"loss": 2.6222, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"learning_rate": 2.2549571491760985e-06, |
|
"loss": 2.5022, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 11.61, |
|
"learning_rate": 2.232351373734827e-06, |
|
"loss": 2.4277, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"learning_rate": 2.2097677146869242e-06, |
|
"loss": 2.6073, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 11.72, |
|
"learning_rate": 2.1872080381697023e-06, |
|
"loss": 2.631, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"learning_rate": 2.1646742083387397e-06, |
|
"loss": 2.4211, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 11.82, |
|
"learning_rate": 2.1421680872138483e-06, |
|
"loss": 2.5914, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 11.88, |
|
"learning_rate": 2.1196915345252085e-06, |
|
"loss": 2.637, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"learning_rate": 2.0972464075596964e-06, |
|
"loss": 2.6463, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"learning_rate": 2.0748345610074115e-06, |
|
"loss": 2.6607, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_loss": 2.597095251083374, |
|
"eval_runtime": 173.3734, |
|
"eval_samples_per_second": 4.418, |
|
"eval_steps_per_second": 1.107, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"learning_rate": 2.052457846808419e-06, |
|
"loss": 2.6955, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 12.08, |
|
"learning_rate": 2.0301181139997206e-06, |
|
"loss": 2.578, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 12.14, |
|
"learning_rate": 2.007817208562466e-06, |
|
"loss": 2.5149, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 12.19, |
|
"learning_rate": 1.985556973269413e-06, |
|
"loss": 2.6151, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"learning_rate": 1.9633392475326563e-06, |
|
"loss": 2.6298, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"learning_rate": 1.9411658672516316e-06, |
|
"loss": 2.6063, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 12.34, |
|
"learning_rate": 1.919038664661414e-06, |
|
"loss": 2.7178, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 1.8969594681813141e-06, |
|
"loss": 2.6825, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 12.45, |
|
"learning_rate": 1.8749301022637923e-06, |
|
"loss": 2.637, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 1.852952387243698e-06, |
|
"loss": 2.5857, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 12.55, |
|
"learning_rate": 1.8310281391878582e-06, |
|
"loss": 2.5111, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 12.6, |
|
"learning_rate": 1.8091591697450044e-06, |
|
"loss": 2.5882, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 12.66, |
|
"learning_rate": 1.7873472859960738e-06, |
|
"loss": 2.5988, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 12.71, |
|
"learning_rate": 1.765594290304889e-06, |
|
"loss": 2.5168, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"learning_rate": 1.7439019801692252e-06, |
|
"loss": 2.7292, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"learning_rate": 1.722272148072273e-06, |
|
"loss": 2.5684, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 12.86, |
|
"learning_rate": 1.7007065813345286e-06, |
|
"loss": 2.5859, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 12.92, |
|
"learning_rate": 1.6792070619660977e-06, |
|
"loss": 2.5379, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"learning_rate": 1.6577753665194502e-06, |
|
"loss": 2.5324, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_loss": 2.597085475921631, |
|
"eval_runtime": 173.4189, |
|
"eval_samples_per_second": 4.417, |
|
"eval_steps_per_second": 1.107, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 13.02, |
|
"learning_rate": 1.6364132659426145e-06, |
|
"loss": 2.5232, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 13.07, |
|
"learning_rate": 1.6151225254328418e-06, |
|
"loss": 2.6904, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"learning_rate": 1.5939049042907463e-06, |
|
"loss": 2.6439, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 13.18, |
|
"learning_rate": 1.5727621557749261e-06, |
|
"loss": 2.5261, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 13.23, |
|
"learning_rate": 1.5516960269570918e-06, |
|
"loss": 2.5726, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 13.28, |
|
"learning_rate": 1.5307082585776984e-06, |
|
"loss": 2.6521, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 13.33, |
|
"learning_rate": 1.509800584902108e-06, |
|
"loss": 2.5809, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"learning_rate": 1.48897473357728e-06, |
|
"loss": 2.5779, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 13.44, |
|
"learning_rate": 1.4682324254890135e-06, |
|
"loss": 2.5948, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 13.49, |
|
"learning_rate": 1.4475753746197468e-06, |
|
"loss": 2.5909, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"learning_rate": 1.427005287906928e-06, |
|
"loss": 2.5414, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 13.59, |
|
"learning_rate": 1.4065238651019646e-06, |
|
"loss": 2.5634, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 13.65, |
|
"learning_rate": 1.3861327986297719e-06, |
|
"loss": 2.6414, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 13.7, |
|
"learning_rate": 1.3658337734489225e-06, |
|
"loss": 2.5816, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"learning_rate": 1.3456284669124159e-06, |
|
"loss": 2.7111, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 13.8, |
|
"learning_rate": 1.3255185486290727e-06, |
|
"loss": 2.5295, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 13.85, |
|
"learning_rate": 1.305505680325575e-06, |
|
"loss": 2.6368, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 13.91, |
|
"learning_rate": 1.2855915157091498e-06, |
|
"loss": 2.6506, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 13.96, |
|
"learning_rate": 1.2657777003309244e-06, |
|
"loss": 2.5472, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_loss": 2.5970234870910645, |
|
"eval_runtime": 173.1837, |
|
"eval_samples_per_second": 4.423, |
|
"eval_steps_per_second": 1.109, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 14.01, |
|
"learning_rate": 1.2460658714499462e-06, |
|
"loss": 2.5883, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 14.06, |
|
"learning_rate": 1.2264576578978956e-06, |
|
"loss": 2.5137, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 14.11, |
|
"learning_rate": 1.2069546799444903e-06, |
|
"loss": 2.5562, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 14.17, |
|
"learning_rate": 1.1875585491636e-06, |
|
"loss": 2.6397, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 14.22, |
|
"learning_rate": 1.1682708683000776e-06, |
|
"loss": 2.6961, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 14.27, |
|
"learning_rate": 1.1490932311373219e-06, |
|
"loss": 2.7012, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 14.32, |
|
"learning_rate": 1.1300272223655776e-06, |
|
"loss": 2.5606, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 14.38, |
|
"learning_rate": 1.1110744174509952e-06, |
|
"loss": 2.6933, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 14.43, |
|
"learning_rate": 1.0922363825054355e-06, |
|
"loss": 2.5044, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 14.48, |
|
"learning_rate": 1.073514674157068e-06, |
|
"loss": 2.6375, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 14.53, |
|
"learning_rate": 1.054910839421742e-06, |
|
"loss": 2.5697, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 14.58, |
|
"learning_rate": 1.0364264155751489e-06, |
|
"loss": 2.6638, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 14.64, |
|
"learning_rate": 1.018062930025797e-06, |
|
"loss": 2.5092, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 14.69, |
|
"learning_rate": 9.99821900188798e-07, |
|
"loss": 2.6309, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 14.74, |
|
"learning_rate": 9.81704833360479e-07, |
|
"loss": 2.6016, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 14.79, |
|
"learning_rate": 9.637132265938314e-07, |
|
"loss": 2.6111, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 14.84, |
|
"learning_rate": 9.458485665748071e-07, |
|
"loss": 2.5727, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 14.9, |
|
"learning_rate": 9.281123294994684e-07, |
|
"loss": 2.5992, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 14.95, |
|
"learning_rate": 9.105059809520103e-07, |
|
"loss": 2.5187, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 8.930309757836517e-07, |
|
"loss": 2.539, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_loss": 2.597039222717285, |
|
"eval_runtime": 173.4464, |
|
"eval_samples_per_second": 4.416, |
|
"eval_steps_per_second": 1.107, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 15.05, |
|
"learning_rate": 8.7568875799242e-07, |
|
"loss": 2.5516, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 15.1, |
|
"learning_rate": 8.584807606038306e-07, |
|
"loss": 2.5809, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 15.16, |
|
"learning_rate": 8.414084055524727e-07, |
|
"loss": 2.7022, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 15.21, |
|
"learning_rate": 8.244731035645107e-07, |
|
"loss": 2.5839, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 15.26, |
|
"learning_rate": 8.076762540411145e-07, |
|
"loss": 2.5779, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"learning_rate": 7.910192449428216e-07, |
|
"loss": 2.446, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 15.36, |
|
"learning_rate": 7.74503452674851e-07, |
|
"loss": 2.6257, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 15.42, |
|
"learning_rate": 7.581302419733633e-07, |
|
"loss": 2.6683, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 15.47, |
|
"learning_rate": 7.419009657926898e-07, |
|
"loss": 2.6346, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 15.52, |
|
"learning_rate": 7.258169651935382e-07, |
|
"loss": 2.5915, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 15.57, |
|
"learning_rate": 7.098795692321775e-07, |
|
"loss": 2.6015, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"learning_rate": 6.940900948506113e-07, |
|
"loss": 2.5214, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 15.68, |
|
"learning_rate": 6.784498467677597e-07, |
|
"loss": 2.6015, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 15.73, |
|
"learning_rate": 6.629601173716454e-07, |
|
"loss": 2.726, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 15.78, |
|
"learning_rate": 6.476221866126029e-07, |
|
"loss": 2.5489, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 15.83, |
|
"learning_rate": 6.324373218975105e-07, |
|
"loss": 2.5079, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 15.89, |
|
"learning_rate": 6.174067779850645e-07, |
|
"loss": 2.6254, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 15.94, |
|
"learning_rate": 6.025317968820954e-07, |
|
"loss": 2.6369, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 15.99, |
|
"learning_rate": 5.878136077409357e-07, |
|
"loss": 2.5757, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_loss": 2.597059965133667, |
|
"eval_runtime": 173.4779, |
|
"eval_samples_per_second": 4.416, |
|
"eval_steps_per_second": 1.107, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 16.04, |
|
"learning_rate": 5.732534267578549e-07, |
|
"loss": 2.6266, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 16.09, |
|
"learning_rate": 5.588524570725612e-07, |
|
"loss": 2.6665, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 16.15, |
|
"learning_rate": 5.44611888668784e-07, |
|
"loss": 2.5927, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 16.2, |
|
"learning_rate": 5.305328982759431e-07, |
|
"loss": 2.6236, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 16.25, |
|
"learning_rate": 5.166166492719124e-07, |
|
"loss": 2.4952, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 16.3, |
|
"learning_rate": 5.028642915868869e-07, |
|
"loss": 2.6098, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 16.35, |
|
"learning_rate": 4.892769616083648e-07, |
|
"loss": 2.6309, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 16.41, |
|
"learning_rate": 4.7585578208723976e-07, |
|
"loss": 2.4235, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 16.46, |
|
"learning_rate": 4.626018620450309e-07, |
|
"loss": 2.6291, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 16.51, |
|
"learning_rate": 4.4951629668223583e-07, |
|
"loss": 2.6039, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 16.56, |
|
"learning_rate": 4.366001672878406e-07, |
|
"loss": 2.5953, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 16.61, |
|
"learning_rate": 4.23854541149962e-07, |
|
"loss": 2.539, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"learning_rate": 4.1128047146765936e-07, |
|
"loss": 2.7039, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 16.72, |
|
"learning_rate": 3.9887899726390455e-07, |
|
"loss": 2.7045, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 16.77, |
|
"learning_rate": 3.8665114329972997e-07, |
|
"loss": 2.4848, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 16.82, |
|
"learning_rate": 3.7459791998954137e-07, |
|
"loss": 2.6097, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 16.88, |
|
"learning_rate": 3.627203233176341e-07, |
|
"loss": 2.5642, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 16.93, |
|
"learning_rate": 3.510193347558866e-07, |
|
"loss": 2.6055, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 16.98, |
|
"learning_rate": 3.3949592118266426e-07, |
|
"loss": 2.6495, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_loss": 2.5970218181610107, |
|
"eval_runtime": 173.4401, |
|
"eval_samples_per_second": 4.417, |
|
"eval_steps_per_second": 1.107, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 17.03, |
|
"learning_rate": 3.2815103480291934e-07, |
|
"loss": 2.6929, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 17.08, |
|
"learning_rate": 3.1698561306951065e-07, |
|
"loss": 2.5553, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 17.14, |
|
"learning_rate": 3.0600057860574015e-07, |
|
"loss": 2.6909, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 17.19, |
|
"learning_rate": 2.9519683912911267e-07, |
|
"loss": 2.5462, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 17.24, |
|
"learning_rate": 2.8457528737633163e-07, |
|
"loss": 2.6499, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 17.29, |
|
"learning_rate": 2.7413680102952755e-07, |
|
"loss": 2.5658, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 17.34, |
|
"learning_rate": 2.638822426437371e-07, |
|
"loss": 2.6463, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 17.4, |
|
"learning_rate": 2.5381245957562304e-07, |
|
"loss": 2.5734, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 17.45, |
|
"learning_rate": 2.439282839134602e-07, |
|
"loss": 2.4343, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 2.3423053240837518e-07, |
|
"loss": 2.6411, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 17.55, |
|
"learning_rate": 2.247200064068572e-07, |
|
"loss": 2.626, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"learning_rate": 2.1539749178454223e-07, |
|
"loss": 2.6194, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 17.66, |
|
"learning_rate": 2.0626375888127187e-07, |
|
"loss": 2.672, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 17.71, |
|
"learning_rate": 1.9731956243744106e-07, |
|
"loss": 2.6633, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 17.76, |
|
"learning_rate": 1.8856564153163143e-07, |
|
"loss": 2.5198, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 17.81, |
|
"learning_rate": 1.800027195195389e-07, |
|
"loss": 2.5005, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 17.86, |
|
"learning_rate": 1.7163150397420087e-07, |
|
"loss": 2.5845, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 17.92, |
|
"learning_rate": 1.6345268662752904e-07, |
|
"loss": 2.6347, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 17.97, |
|
"learning_rate": 1.5546694331315086e-07, |
|
"loss": 2.5647, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_loss": 2.597024917602539, |
|
"eval_runtime": 173.3767, |
|
"eval_samples_per_second": 4.418, |
|
"eval_steps_per_second": 1.107, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 18.02, |
|
"learning_rate": 1.4767493391056153e-07, |
|
"loss": 2.639, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 18.07, |
|
"learning_rate": 1.400773022905985e-07, |
|
"loss": 2.5828, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 18.12, |
|
"learning_rate": 1.3267467626223606e-07, |
|
"loss": 2.6745, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 18.18, |
|
"learning_rate": 1.2546766752070893e-07, |
|
"loss": 2.6199, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 18.23, |
|
"learning_rate": 1.1845687159696584e-07, |
|
"loss": 2.5725, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 18.28, |
|
"learning_rate": 1.1164286780845995e-07, |
|
"loss": 2.6234, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 18.33, |
|
"learning_rate": 1.0502621921127776e-07, |
|
"loss": 2.6208, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 18.39, |
|
"learning_rate": 9.860747255361485e-08, |
|
"loss": 2.5672, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 18.44, |
|
"learning_rate": 9.238715823059324e-08, |
|
"loss": 2.6916, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 18.49, |
|
"learning_rate": 8.636579024043717e-08, |
|
"loss": 2.6221, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 18.54, |
|
"learning_rate": 8.054386614199772e-08, |
|
"loss": 2.5351, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 18.59, |
|
"learning_rate": 7.492186701364007e-08, |
|
"loss": 2.5847, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 18.65, |
|
"learning_rate": 6.950025741349037e-08, |
|
"loss": 2.5973, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 18.7, |
|
"learning_rate": 6.427948534104777e-08, |
|
"loss": 2.6242, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 18.75, |
|
"learning_rate": 5.92599822001666e-08, |
|
"loss": 2.5065, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 18.8, |
|
"learning_rate": 5.444216276340702e-08, |
|
"loss": 2.6884, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 18.85, |
|
"learning_rate": 4.982642513776226e-08, |
|
"loss": 2.5535, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 18.91, |
|
"learning_rate": 4.541315073176178e-08, |
|
"loss": 2.4669, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 18.96, |
|
"learning_rate": 4.120270422395572e-08, |
|
"loss": 2.5605, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"eval_loss": 2.5970375537872314, |
|
"eval_runtime": 173.322, |
|
"eval_samples_per_second": 4.42, |
|
"eval_steps_per_second": 1.108, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 19.01, |
|
"learning_rate": 3.7195433532778713e-08, |
|
"loss": 2.6154, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 19.06, |
|
"learning_rate": 3.339166978780256e-08, |
|
"loss": 2.6572, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 19.11, |
|
"learning_rate": 2.9791727302373175e-08, |
|
"loss": 2.5629, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 19.17, |
|
"learning_rate": 2.6395903547638825e-08, |
|
"loss": 2.6195, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 19.22, |
|
"learning_rate": 2.3204479127968415e-08, |
|
"loss": 2.6312, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 19.27, |
|
"learning_rate": 2.0217717757766152e-08, |
|
"loss": 2.6655, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 19.32, |
|
"learning_rate": 1.7435866239678135e-08, |
|
"loss": 2.5804, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 19.38, |
|
"learning_rate": 1.4859154444200885e-08, |
|
"loss": 2.6275, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 19.43, |
|
"learning_rate": 1.2487795290684889e-08, |
|
"loss": 2.6881, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 19.48, |
|
"learning_rate": 1.0321984729741163e-08, |
|
"loss": 2.5813, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 19.53, |
|
"learning_rate": 8.361901727049204e-09, |
|
"loss": 2.6291, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 19.58, |
|
"learning_rate": 6.607708248569378e-09, |
|
"loss": 2.5822, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 19.64, |
|
"learning_rate": 5.0595492471583465e-09, |
|
"loss": 2.4705, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 19.69, |
|
"learning_rate": 3.71755265059226e-09, |
|
"loss": 2.5638, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 19.74, |
|
"learning_rate": 2.581829350994658e-09, |
|
"loss": 2.4875, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 19.79, |
|
"learning_rate": 1.6524731956749085e-09, |
|
"loss": 2.607, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 19.84, |
|
"learning_rate": 9.295609793708027e-10, |
|
"loss": 2.4936, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 19.9, |
|
"learning_rate": 4.13152437906128e-10, |
|
"loss": 2.6588, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 19.95, |
|
"learning_rate": 1.0329024325128611e-10, |
|
"loss": 2.5804, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.6608, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_loss": 2.5970427989959717, |
|
"eval_runtime": 173.461, |
|
"eval_samples_per_second": 4.416, |
|
"eval_steps_per_second": 1.107, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"step": 1920, |
|
"total_flos": 1.0984887148766822e+18, |
|
"train_loss": 2.604488531500101, |
|
"train_runtime": 14508.1949, |
|
"train_samples_per_second": 1.056, |
|
"train_steps_per_second": 0.132 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1920, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 50, |
|
"total_flos": 1.0984887148766822e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|