|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.978406552494415, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.98758997269794e-05, |
|
"loss": 1.8076, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.97517994539588e-05, |
|
"loss": 1.792, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.96276991809382e-05, |
|
"loss": 1.7935, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.95035989079176e-05, |
|
"loss": 1.7432, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9379498634897e-05, |
|
"loss": 1.7369, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.92553983618764e-05, |
|
"loss": 1.7506, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.91312980888558e-05, |
|
"loss": 1.6795, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.90071978158352e-05, |
|
"loss": 1.721, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.88830975428146e-05, |
|
"loss": 1.6954, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.875899726979399e-05, |
|
"loss": 1.6715, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.86348969967734e-05, |
|
"loss": 1.6799, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8510796723752796e-05, |
|
"loss": 1.7203, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8386696450732196e-05, |
|
"loss": 1.6643, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8262596177711595e-05, |
|
"loss": 1.6655, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8138495904690995e-05, |
|
"loss": 1.681, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.8014395631670394e-05, |
|
"loss": 1.7232, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.789029535864979e-05, |
|
"loss": 1.6658, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7766195085629186e-05, |
|
"loss": 1.6965, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.764209481260859e-05, |
|
"loss": 1.6745, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.751799453958799e-05, |
|
"loss": 1.6881, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.739389426656739e-05, |
|
"loss": 1.6899, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.726979399354679e-05, |
|
"loss": 1.6825, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.7145693720526184e-05, |
|
"loss": 1.6894, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.702159344750558e-05, |
|
"loss": 1.7016, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.689749317448498e-05, |
|
"loss": 1.6556, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.677339290146439e-05, |
|
"loss": 1.715, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.664929262844379e-05, |
|
"loss": 1.6415, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.652519235542319e-05, |
|
"loss": 1.6986, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.640109208240258e-05, |
|
"loss": 1.6766, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.627699180938198e-05, |
|
"loss": 1.7016, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.615289153636138e-05, |
|
"loss": 1.698, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.602879126334078e-05, |
|
"loss": 1.6692, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.590469099032018e-05, |
|
"loss": 1.6386, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5780590717299585e-05, |
|
"loss": 1.7369, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5656490444278984e-05, |
|
"loss": 1.6997, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.553239017125838e-05, |
|
"loss": 1.6983, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5408289898237776e-05, |
|
"loss": 1.6702, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5284189625217176e-05, |
|
"loss": 1.6901, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.5160089352196575e-05, |
|
"loss": 1.6916, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5035989079175975e-05, |
|
"loss": 1.682, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.491188880615538e-05, |
|
"loss": 1.6756, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.4787788533134774e-05, |
|
"loss": 1.6632, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.466368826011417e-05, |
|
"loss": 1.67, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.453958798709357e-05, |
|
"loss": 1.6581, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.441548771407297e-05, |
|
"loss": 1.7106, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.429138744105237e-05, |
|
"loss": 1.6757, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.416728716803177e-05, |
|
"loss": 1.6315, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.404318689501117e-05, |
|
"loss": 1.66, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.391908662199057e-05, |
|
"loss": 1.6966, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.379498634896997e-05, |
|
"loss": 1.6642, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.367088607594937e-05, |
|
"loss": 1.6882, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.354678580292877e-05, |
|
"loss": 1.6741, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.342268552990817e-05, |
|
"loss": 1.6859, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.329858525688757e-05, |
|
"loss": 1.6925, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.317448498386697e-05, |
|
"loss": 1.6748, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.3050384710846366e-05, |
|
"loss": 1.674, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.2926284437825766e-05, |
|
"loss": 1.6723, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.2802184164805165e-05, |
|
"loss": 1.688, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2678083891784565e-05, |
|
"loss": 1.6585, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2553983618763964e-05, |
|
"loss": 1.6902, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.2429883345743364e-05, |
|
"loss": 1.6565, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2305783072722757e-05, |
|
"loss": 1.6897, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.218168279970216e-05, |
|
"loss": 1.6698, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.205758252668156e-05, |
|
"loss": 1.6712, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.193348225366096e-05, |
|
"loss": 1.6653, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.180938198064036e-05, |
|
"loss": 1.683, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.168528170761976e-05, |
|
"loss": 1.6498, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1561181434599153e-05, |
|
"loss": 1.6939, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.143708116157855e-05, |
|
"loss": 1.7123, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.131298088855796e-05, |
|
"loss": 1.669, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.118888061553736e-05, |
|
"loss": 1.7012, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.106478034251676e-05, |
|
"loss": 1.668, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.094068006949616e-05, |
|
"loss": 1.6845, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.081657979647556e-05, |
|
"loss": 1.6371, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.069247952345495e-05, |
|
"loss": 1.6871, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.056837925043435e-05, |
|
"loss": 1.6605, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.044427897741375e-05, |
|
"loss": 1.6838, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0320178704393155e-05, |
|
"loss": 1.6586, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.0196078431372555e-05, |
|
"loss": 1.7265, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.0071978158351954e-05, |
|
"loss": 1.6616, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.994787788533135e-05, |
|
"loss": 1.6445, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.9823777612310746e-05, |
|
"loss": 1.6916, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.9699677339290146e-05, |
|
"loss": 1.6986, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.9575577066269545e-05, |
|
"loss": 1.6776, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.945147679324895e-05, |
|
"loss": 1.6629, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.932737652022835e-05, |
|
"loss": 1.6789, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9203276247207744e-05, |
|
"loss": 1.6432, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.907917597418714e-05, |
|
"loss": 1.6934, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.895507570116654e-05, |
|
"loss": 1.7064, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.883097542814594e-05, |
|
"loss": 1.6626, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.870687515512534e-05, |
|
"loss": 1.6482, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.858277488210474e-05, |
|
"loss": 1.674, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.845867460908415e-05, |
|
"loss": 1.6474, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.833457433606354e-05, |
|
"loss": 1.6719, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.821047406304294e-05, |
|
"loss": 1.6694, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.808637379002234e-05, |
|
"loss": 1.6876, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.796227351700174e-05, |
|
"loss": 1.6902, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.783817324398114e-05, |
|
"loss": 1.67, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.771407297096054e-05, |
|
"loss": 1.6488, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.758997269793994e-05, |
|
"loss": 1.6153, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7465872424919336e-05, |
|
"loss": 1.6346, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.7341772151898736e-05, |
|
"loss": 1.7009, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7217671878878135e-05, |
|
"loss": 1.6873, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7093571605857535e-05, |
|
"loss": 1.6742, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.6969471332836934e-05, |
|
"loss": 1.645, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6845371059816334e-05, |
|
"loss": 1.7089, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.672127078679573e-05, |
|
"loss": 1.6473, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.659717051377513e-05, |
|
"loss": 1.6846, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.647307024075453e-05, |
|
"loss": 1.7159, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.634896996773393e-05, |
|
"loss": 1.6727, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.622486969471333e-05, |
|
"loss": 1.6722, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.610076942169273e-05, |
|
"loss": 1.6785, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.597666914867212e-05, |
|
"loss": 1.68, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.585256887565153e-05, |
|
"loss": 1.6698, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.572846860263093e-05, |
|
"loss": 1.7116, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.560436832961033e-05, |
|
"loss": 1.6635, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.548026805658973e-05, |
|
"loss": 1.6821, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.535616778356913e-05, |
|
"loss": 1.6669, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.523206751054853e-05, |
|
"loss": 1.6733, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.510796723752792e-05, |
|
"loss": 1.66, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.498386696450732e-05, |
|
"loss": 1.6447, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.4859766691486725e-05, |
|
"loss": 1.5991, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4735666418466125e-05, |
|
"loss": 1.6811, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4611566145445524e-05, |
|
"loss": 1.7129, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4487465872424924e-05, |
|
"loss": 1.6621, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4363365599404317e-05, |
|
"loss": 1.7028, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4239265326383716e-05, |
|
"loss": 1.6428, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4115165053363115e-05, |
|
"loss": 1.6931, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.399106478034252e-05, |
|
"loss": 1.6831, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.386696450732192e-05, |
|
"loss": 1.6676, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.374286423430132e-05, |
|
"loss": 1.6861, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3618763961280713e-05, |
|
"loss": 1.6731, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.349466368826011e-05, |
|
"loss": 1.6459, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.337056341523951e-05, |
|
"loss": 1.7026, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.324646314221891e-05, |
|
"loss": 1.6455, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.312236286919831e-05, |
|
"loss": 1.6502, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.299826259617772e-05, |
|
"loss": 1.6802, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.287416232315712e-05, |
|
"loss": 1.6756, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.275006205013651e-05, |
|
"loss": 1.615, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.262596177711591e-05, |
|
"loss": 1.6637, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.250186150409531e-05, |
|
"loss": 1.6727, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.237776123107471e-05, |
|
"loss": 1.6169, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.225366095805411e-05, |
|
"loss": 1.6792, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.2129560685033514e-05, |
|
"loss": 1.6745, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.200546041201291e-05, |
|
"loss": 1.6932, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.1881360138992306e-05, |
|
"loss": 1.6967, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.1757259865971706e-05, |
|
"loss": 1.6416, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.1633159592951105e-05, |
|
"loss": 1.6618, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.1509059319930505e-05, |
|
"loss": 1.6343, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1384959046909904e-05, |
|
"loss": 1.6633, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1260858773889304e-05, |
|
"loss": 1.7296, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.11367585008687e-05, |
|
"loss": 1.6811, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.10126582278481e-05, |
|
"loss": 1.6472, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.08885579548275e-05, |
|
"loss": 1.6914, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.07644576818069e-05, |
|
"loss": 1.6557, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.06403574087863e-05, |
|
"loss": 1.6505, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0516257135765704e-05, |
|
"loss": 1.6881, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0392156862745097e-05, |
|
"loss": 1.6816, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0268056589724496e-05, |
|
"loss": 1.6364, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.01439563167039e-05, |
|
"loss": 1.6724, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.00198560436833e-05, |
|
"loss": 1.6698, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9895755770662698e-05, |
|
"loss": 1.6864, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9771655497642097e-05, |
|
"loss": 1.6614, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9647555224621497e-05, |
|
"loss": 1.6181, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9523454951600893e-05, |
|
"loss": 1.697, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9399354678580292e-05, |
|
"loss": 1.6462, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.9275254405559692e-05, |
|
"loss": 1.6583, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9151154132539095e-05, |
|
"loss": 1.6781, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.9027053859518494e-05, |
|
"loss": 1.6672, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8902953586497894e-05, |
|
"loss": 1.6406, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.877885331347729e-05, |
|
"loss": 1.7013, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.865475304045669e-05, |
|
"loss": 1.6636, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.853065276743609e-05, |
|
"loss": 1.6721, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8406552494415488e-05, |
|
"loss": 1.6605, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.828245222139489e-05, |
|
"loss": 1.6329, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.815835194837429e-05, |
|
"loss": 1.6444, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8034251675353683e-05, |
|
"loss": 1.6278, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7910151402333086e-05, |
|
"loss": 1.6724, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.7786051129312486e-05, |
|
"loss": 1.7456, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.7661950856291885e-05, |
|
"loss": 1.6874, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.7537850583271285e-05, |
|
"loss": 1.6249, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7413750310250684e-05, |
|
"loss": 1.6814, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7289650037230087e-05, |
|
"loss": 1.6465, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.716554976420948e-05, |
|
"loss": 1.6471, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.704144949118888e-05, |
|
"loss": 1.6207, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.6917349218168282e-05, |
|
"loss": 1.6443, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.679324894514768e-05, |
|
"loss": 1.6533, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.666914867212708e-05, |
|
"loss": 1.7088, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.654504839910648e-05, |
|
"loss": 1.6959, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6420948126085876e-05, |
|
"loss": 1.7019, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6296847853065276e-05, |
|
"loss": 1.6753, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6172747580044675e-05, |
|
"loss": 1.7044, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6048647307024078e-05, |
|
"loss": 1.6777, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.5924547034003478e-05, |
|
"loss": 1.6419, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5800446760982877e-05, |
|
"loss": 1.6893, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5676346487962277e-05, |
|
"loss": 1.6555, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5552246214941673e-05, |
|
"loss": 1.6814, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5428145941921072e-05, |
|
"loss": 1.7269, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5304045668900472e-05, |
|
"loss": 1.6449, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.517994539587987e-05, |
|
"loss": 1.6843, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5055845122859274e-05, |
|
"loss": 1.6782, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.493174484983867e-05, |
|
"loss": 1.7249, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.480764457681807e-05, |
|
"loss": 1.6899, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.468354430379747e-05, |
|
"loss": 1.6383, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.455944403077687e-05, |
|
"loss": 1.6221, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4435343757756268e-05, |
|
"loss": 1.6997, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4311243484735668e-05, |
|
"loss": 1.6394, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4187143211715067e-05, |
|
"loss": 1.6809, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4063042938694467e-05, |
|
"loss": 1.6683, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3938942665673866e-05, |
|
"loss": 1.6625, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3814842392653262e-05, |
|
"loss": 1.6692, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3690742119632665e-05, |
|
"loss": 1.6996, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3566641846612065e-05, |
|
"loss": 1.6653, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3442541573591464e-05, |
|
"loss": 1.6903, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3318441300570863e-05, |
|
"loss": 1.6639, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3194341027550263e-05, |
|
"loss": 1.6501, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3070240754529662e-05, |
|
"loss": 1.6298, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.294614048150906e-05, |
|
"loss": 1.6438, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.282204020848846e-05, |
|
"loss": 1.6265, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.269793993546786e-05, |
|
"loss": 1.6856, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2573839662447257e-05, |
|
"loss": 1.6824, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.2449739389426656e-05, |
|
"loss": 1.6899, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.232563911640606e-05, |
|
"loss": 1.6264, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2201538843385455e-05, |
|
"loss": 1.6794, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2077438570364855e-05, |
|
"loss": 1.6568, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.1953338297344254e-05, |
|
"loss": 1.6893, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.1829238024323654e-05, |
|
"loss": 1.696, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1705137751303053e-05, |
|
"loss": 1.6312, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1581037478282453e-05, |
|
"loss": 1.6189, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1456937205261852e-05, |
|
"loss": 1.6403, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1332836932241252e-05, |
|
"loss": 1.6639, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.120873665922065e-05, |
|
"loss": 1.7026, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.108463638620005e-05, |
|
"loss": 1.6919, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.096053611317945e-05, |
|
"loss": 1.6642, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.083643584015885e-05, |
|
"loss": 1.6531, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.071233556713825e-05, |
|
"loss": 1.6271, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.058823529411765e-05, |
|
"loss": 1.6252, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0464135021097048e-05, |
|
"loss": 1.6681, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0340034748076448e-05, |
|
"loss": 1.6417, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0215934475055844e-05, |
|
"loss": 1.6386, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0091834202035247e-05, |
|
"loss": 1.6616, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.9967733929014646e-05, |
|
"loss": 1.629, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9843633655994042e-05, |
|
"loss": 1.711, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.971953338297344e-05, |
|
"loss": 1.6446, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.9595433109952844e-05, |
|
"loss": 1.6744, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9471332836932244e-05, |
|
"loss": 1.6503, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.934723256391164e-05, |
|
"loss": 1.6351, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.922313229089104e-05, |
|
"loss": 1.6266, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9099032017870442e-05, |
|
"loss": 1.6997, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.897493174484984e-05, |
|
"loss": 1.62, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8850831471829238e-05, |
|
"loss": 1.6761, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8726731198808637e-05, |
|
"loss": 1.6611, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8602630925788037e-05, |
|
"loss": 1.6603, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8478530652767436e-05, |
|
"loss": 1.6923, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8354430379746836e-05, |
|
"loss": 1.6886, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8230330106726235e-05, |
|
"loss": 1.6769, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8106229833705635e-05, |
|
"loss": 1.6706, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.7982129560685034e-05, |
|
"loss": 1.6321, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7858029287664434e-05, |
|
"loss": 1.7275, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7733929014643833e-05, |
|
"loss": 1.6575, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.7609828741623233e-05, |
|
"loss": 1.6676, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7485728468602632e-05, |
|
"loss": 1.6639, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7361628195582032e-05, |
|
"loss": 1.7096, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.723752792256143e-05, |
|
"loss": 1.6374, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.711342764954083e-05, |
|
"loss": 1.6545, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.6989327376520227e-05, |
|
"loss": 1.6276, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.686522710349963e-05, |
|
"loss": 1.6365, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.674112683047903e-05, |
|
"loss": 1.6577, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6617026557458425e-05, |
|
"loss": 1.6519, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6492926284437825e-05, |
|
"loss": 1.6889, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6368826011417228e-05, |
|
"loss": 1.6314, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6244725738396624e-05, |
|
"loss": 1.7011, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6120625465376023e-05, |
|
"loss": 1.664, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.5996525192355426e-05, |
|
"loss": 1.6482, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5872424919334825e-05, |
|
"loss": 1.6608, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.574832464631422e-05, |
|
"loss": 1.671, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.562422437329362e-05, |
|
"loss": 1.7074, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5500124100273024e-05, |
|
"loss": 1.7046, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.537602382725242e-05, |
|
"loss": 1.65, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.525192355423182e-05, |
|
"loss": 1.6294, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.512782328121122e-05, |
|
"loss": 1.6541, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5003723008190618e-05, |
|
"loss": 1.6501, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4879622735170018e-05, |
|
"loss": 1.6495, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4755522462149417e-05, |
|
"loss": 1.642, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4631422189128815e-05, |
|
"loss": 1.6685, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4507321916108216e-05, |
|
"loss": 1.6514, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4383221643087616e-05, |
|
"loss": 1.6306, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4259121370067017e-05, |
|
"loss": 1.6884, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4135021097046413e-05, |
|
"loss": 1.6732, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4010920824025814e-05, |
|
"loss": 1.6354, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3886820551005214e-05, |
|
"loss": 1.6434, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3762720277984612e-05, |
|
"loss": 1.6864, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3638620004964011e-05, |
|
"loss": 1.6544, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3514519731943412e-05, |
|
"loss": 1.6977, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.339041945892281e-05, |
|
"loss": 1.6684, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.326631918590221e-05, |
|
"loss": 1.6436, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3142218912881609e-05, |
|
"loss": 1.6643, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3018118639861007e-05, |
|
"loss": 1.6743, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2894018366840408e-05, |
|
"loss": 1.6791, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2769918093819807e-05, |
|
"loss": 1.6459, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2645817820799205e-05, |
|
"loss": 1.6427, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2521717547778605e-05, |
|
"loss": 1.6593, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2397617274758006e-05, |
|
"loss": 1.6614, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2273517001737404e-05, |
|
"loss": 1.6754, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2149416728716803e-05, |
|
"loss": 1.6599, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2025316455696203e-05, |
|
"loss": 1.6364, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1901216182675602e-05, |
|
"loss": 1.6294, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1777115909655002e-05, |
|
"loss": 1.6174, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1653015636634401e-05, |
|
"loss": 1.6602, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.15289153636138e-05, |
|
"loss": 1.6252, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.14048150905932e-05, |
|
"loss": 1.596, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.12807148175726e-05, |
|
"loss": 1.6547, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1156614544551997e-05, |
|
"loss": 1.6784, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1032514271531398e-05, |
|
"loss": 1.6921, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0908413998510796e-05, |
|
"loss": 1.7334, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0784313725490197e-05, |
|
"loss": 1.6297, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0660213452469595e-05, |
|
"loss": 1.6577, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0536113179448995e-05, |
|
"loss": 1.6586, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0412012906428396e-05, |
|
"loss": 1.6802, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0287912633407794e-05, |
|
"loss": 1.6577, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0163812360387195e-05, |
|
"loss": 1.6509, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0039712087366593e-05, |
|
"loss": 1.6463, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.915611814345992e-06, |
|
"loss": 1.676, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.791511541325392e-06, |
|
"loss": 1.6099, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.667411268304791e-06, |
|
"loss": 1.6285, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.543310995284189e-06, |
|
"loss": 1.6585, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.41921072226359e-06, |
|
"loss": 1.6711, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.295110449242988e-06, |
|
"loss": 1.6543, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.171010176222389e-06, |
|
"loss": 1.6361, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.046909903201788e-06, |
|
"loss": 1.6181, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.922809630181186e-06, |
|
"loss": 1.6398, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.798709357160587e-06, |
|
"loss": 1.6754, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.674609084139985e-06, |
|
"loss": 1.6702, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.550508811119385e-06, |
|
"loss": 1.6642, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.426408538098784e-06, |
|
"loss": 1.637, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.302308265078184e-06, |
|
"loss": 1.656, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.178207992057583e-06, |
|
"loss": 1.6398, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.054107719036983e-06, |
|
"loss": 1.631, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.93000744601638e-06, |
|
"loss": 1.6664, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.805907172995782e-06, |
|
"loss": 1.6617, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.681806899975181e-06, |
|
"loss": 1.6974, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.557706626954579e-06, |
|
"loss": 1.6472, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.433606353933979e-06, |
|
"loss": 1.6116, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.309506080913378e-06, |
|
"loss": 1.6501, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.185405807892778e-06, |
|
"loss": 1.6773, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.061305534872177e-06, |
|
"loss": 1.6507, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.937205261851576e-06, |
|
"loss": 1.6528, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.8131049888309765e-06, |
|
"loss": 1.6789, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.689004715810375e-06, |
|
"loss": 1.6368, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.564904442789774e-06, |
|
"loss": 1.6571, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.440804169769174e-06, |
|
"loss": 1.6961, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.316703896748573e-06, |
|
"loss": 1.7001, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.192603623727972e-06, |
|
"loss": 1.5976, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.068503350707372e-06, |
|
"loss": 1.7071, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.944403077686771e-06, |
|
"loss": 1.6296, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.82030280466617e-06, |
|
"loss": 1.6624, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.69620253164557e-06, |
|
"loss": 1.675, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.57210225862497e-06, |
|
"loss": 1.6798, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.448001985604369e-06, |
|
"loss": 1.6819, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.323901712583769e-06, |
|
"loss": 1.6284, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.199801439563167e-06, |
|
"loss": 1.6457, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.075701166542567e-06, |
|
"loss": 1.6644, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.951600893521966e-06, |
|
"loss": 1.6907, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.827500620501366e-06, |
|
"loss": 1.6358, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.703400347480764e-06, |
|
"loss": 1.6713, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.579300074460164e-06, |
|
"loss": 1.6857, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.455199801439563e-06, |
|
"loss": 1.6419, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.331099528418963e-06, |
|
"loss": 1.6663, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.206999255398362e-06, |
|
"loss": 1.6744, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.082898982377762e-06, |
|
"loss": 1.6562, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.958798709357161e-06, |
|
"loss": 1.6287, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.834698436336561e-06, |
|
"loss": 1.6765, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.7105981633159593e-06, |
|
"loss": 1.6417, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.5864978902953588e-06, |
|
"loss": 1.6559, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.4623976172747582e-06, |
|
"loss": 1.6428, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3382973442541577e-06, |
|
"loss": 1.5889, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.2141970712335568e-06, |
|
"loss": 1.684, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.0900967982129563e-06, |
|
"loss": 1.6508, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.9659965251923557e-06, |
|
"loss": 1.6872, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.841896252171755e-06, |
|
"loss": 1.6434, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.7177959791511543e-06, |
|
"loss": 1.6654, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.5936957061305533e-06, |
|
"loss": 1.6756, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.469595433109953e-06, |
|
"loss": 1.6538, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.3454951600893523e-06, |
|
"loss": 1.6519, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.2213948870687518e-06, |
|
"loss": 1.6851, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.097294614048151e-06, |
|
"loss": 1.6228, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.9731943410275503e-06, |
|
"loss": 1.6828, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8490940680069496e-06, |
|
"loss": 1.6624, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.724993794986349e-06, |
|
"loss": 1.6821, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6008935219657483e-06, |
|
"loss": 1.6284, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4767932489451478e-06, |
|
"loss": 1.6418, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.352692975924547e-06, |
|
"loss": 1.7002, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.2285927029039465e-06, |
|
"loss": 1.6462, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.1044924298833458e-06, |
|
"loss": 1.6706, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.80392156862745e-07, |
|
"loss": 1.5972, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.562918838421445e-07, |
|
"loss": 1.6743, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.321916108215438e-07, |
|
"loss": 1.6595, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.080913378009432e-07, |
|
"loss": 1.6506, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.839910647803425e-07, |
|
"loss": 1.6343, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.598907917597419e-07, |
|
"loss": 1.6886, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4029, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 2.52809928769536e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|