|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.474283799631429, |
|
"eval_steps": 500, |
|
"global_step": 2200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.99993842168232e-05, |
|
"loss": 1.2211, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9997536897627915e-05, |
|
"loss": 1.0276, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9994458133418e-05, |
|
"loss": 0.8587, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.999014807586154e-05, |
|
"loss": 0.7431, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9984606937283405e-05, |
|
"loss": 0.6841, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9977834990654804e-05, |
|
"loss": 0.6452, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.99698325695798e-05, |
|
"loss": 0.6347, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9960600068278876e-05, |
|
"loss": 0.6109, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.995013794156957e-05, |
|
"loss": 0.5911, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.993844670484401e-05, |
|
"loss": 0.5803, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.992552693404354e-05, |
|
"loss": 0.5902, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.991137926563036e-05, |
|
"loss": 0.5745, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9896004396556176e-05, |
|
"loss": 0.5538, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.987940308422783e-05, |
|
"loss": 0.5495, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.986157614647005e-05, |
|
"loss": 0.5433, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.984252446148508e-05, |
|
"loss": 0.548, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.98222489678095e-05, |
|
"loss": 0.5361, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.980075066426796e-05, |
|
"loss": 0.5331, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.977803060992393e-05, |
|
"loss": 0.53, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.97540899240276e-05, |
|
"loss": 0.5135, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.972892978596069e-05, |
|
"loss": 0.5101, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.970255143517838e-05, |
|
"loss": 0.5125, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.967495617114826e-05, |
|
"loss": 0.4928, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.964614535328626e-05, |
|
"loss": 0.4878, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.961612040088973e-05, |
|
"loss": 0.5017, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9584882793067534e-05, |
|
"loss": 0.4863, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.955243406866713e-05, |
|
"loss": 0.4847, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.951877582619881e-05, |
|
"loss": 0.4868, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.948390972375694e-05, |
|
"loss": 0.4748, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.944783747893825e-05, |
|
"loss": 0.4764, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.941056086875727e-05, |
|
"loss": 0.4712, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.937208172955876e-05, |
|
"loss": 0.4642, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.9332401956927224e-05, |
|
"loss": 0.4642, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9291523505593604e-05, |
|
"loss": 0.4709, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9249448389338905e-05, |
|
"loss": 0.461, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.920617868089501e-05, |
|
"loss": 0.4677, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.9161716511842614e-05, |
|
"loss": 0.4564, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.911606407250617e-05, |
|
"loss": 0.4663, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.9069223611846014e-05, |
|
"loss": 0.4682, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.9021197437347555e-05, |
|
"loss": 0.4636, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.897198791490762e-05, |
|
"loss": 0.4569, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.8921597468717887e-05, |
|
"loss": 0.462, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.887002858114548e-05, |
|
"loss": 0.4563, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.881728379261068e-05, |
|
"loss": 0.4563, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.876336570146175e-05, |
|
"loss": 0.4468, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.870827696384698e-05, |
|
"loss": 0.4508, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.865202029358379e-05, |
|
"loss": 0.4507, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.859459846202507e-05, |
|
"loss": 0.4486, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.853601429792265e-05, |
|
"loss": 0.4423, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.847627068728795e-05, |
|
"loss": 0.4369, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.841537057324979e-05, |
|
"loss": 0.4429, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.835331695590943e-05, |
|
"loss": 0.4389, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.829011289219276e-05, |
|
"loss": 0.44, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.82257614956997e-05, |
|
"loss": 0.4476, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.816026593655085e-05, |
|
"loss": 0.4367, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.809362944123129e-05, |
|
"loss": 0.4357, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.802585529243164e-05, |
|
"loss": 0.4492, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.795694682888635e-05, |
|
"loss": 0.4403, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.7886907445209234e-05, |
|
"loss": 0.4406, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.781574059172621e-05, |
|
"loss": 0.4317, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.7743449774305386e-05, |
|
"loss": 0.4379, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.7670038554184296e-05, |
|
"loss": 0.4324, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.7595510547794465e-05, |
|
"loss": 0.4329, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.751986942658332e-05, |
|
"loss": 0.4259, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.744311891683325e-05, |
|
"loss": 0.4256, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.736526279947807e-05, |
|
"loss": 0.4289, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.728630490991676e-05, |
|
"loss": 0.4353, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.7206249137824535e-05, |
|
"loss": 0.4413, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.7125099426961185e-05, |
|
"loss": 0.4302, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.704285977497687e-05, |
|
"loss": 0.4365, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.6959534233215116e-05, |
|
"loss": 0.4238, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.687512690651328e-05, |
|
"loss": 0.4284, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.678964195300028e-05, |
|
"loss": 0.4193, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.670308358389184e-05, |
|
"loss": 0.4256, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.6615456063282944e-05, |
|
"loss": 0.4288, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.652676370793784e-05, |
|
"loss": 0.4335, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.643701088707736e-05, |
|
"loss": 0.4271, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.634620202216366e-05, |
|
"loss": 0.4304, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.625434158668246e-05, |
|
"loss": 0.4249, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.6161434105922616e-05, |
|
"loss": 0.4322, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.6067484156753234e-05, |
|
"loss": 0.4229, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.597249636739815e-05, |
|
"loss": 0.4252, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.5876475417207974e-05, |
|
"loss": 0.413, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.577942603642959e-05, |
|
"loss": 0.4186, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.568135300597306e-05, |
|
"loss": 0.4233, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.5582261157176164e-05, |
|
"loss": 0.4177, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.5482155371566384e-05, |
|
"loss": 0.4236, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.538104058062042e-05, |
|
"loss": 0.4228, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.5278921765521234e-05, |
|
"loss": 0.4181, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.51758039569127e-05, |
|
"loss": 0.4261, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.5071692234651764e-05, |
|
"loss": 0.4217, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.4966591727558184e-05, |
|
"loss": 0.4191, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.48605076131619e-05, |
|
"loss": 0.4247, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.475344511744794e-05, |
|
"loss": 0.4236, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.464540951459902e-05, |
|
"loss": 0.4172, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.4536406126735664e-05, |
|
"loss": 0.4209, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.442644032365407e-05, |
|
"loss": 0.4179, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.431551752256155e-05, |
|
"loss": 0.4166, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.420364318780973e-05, |
|
"loss": 0.4173, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.4090822830625236e-05, |
|
"loss": 0.4166, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.3977062008838307e-05, |
|
"loss": 0.4173, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.3862366326608975e-05, |
|
"loss": 0.4049, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.374674143415096e-05, |
|
"loss": 0.4143, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.363019302745334e-05, |
|
"loss": 0.4219, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.3512726847999987e-05, |
|
"loss": 0.4152, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.339434868248665e-05, |
|
"loss": 0.4153, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.3275064362535966e-05, |
|
"loss": 0.4148, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.315487976441014e-05, |
|
"loss": 0.4147, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.303380080872145e-05, |
|
"loss": 0.41, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.291183346014063e-05, |
|
"loss": 0.4119, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.278898372710296e-05, |
|
"loss": 0.4173, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.266525766151238e-05, |
|
"loss": 0.4119, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.254066135844326e-05, |
|
"loss": 0.4163, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.2415200955840184e-05, |
|
"loss": 0.4104, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.228888263421557e-05, |
|
"loss": 0.4045, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.216171261634521e-05, |
|
"loss": 0.413, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.2033697166961716e-05, |
|
"loss": 0.4112, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.1904842592445906e-05, |
|
"loss": 0.4018, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.177515524051609e-05, |
|
"loss": 0.4068, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.1644641499915454e-05, |
|
"loss": 0.4029, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.151330780009726e-05, |
|
"loss": 0.4009, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.1381160610908134e-05, |
|
"loss": 0.4073, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.124820644226936e-05, |
|
"loss": 0.4138, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.111445184385616e-05, |
|
"loss": 0.4139, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.097990340477507e-05, |
|
"loss": 0.4062, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.0844567753239276e-05, |
|
"loss": 0.4044, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.070845155624221e-05, |
|
"loss": 0.3978, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.0571561519228984e-05, |
|
"loss": 0.4102, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.043390438576616e-05, |
|
"loss": 0.4052, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.029548693720949e-05, |
|
"loss": 0.4048, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.0156315992369864e-05, |
|
"loss": 0.4008, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.001639840717741e-05, |
|
"loss": 0.4038, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.9875741074343744e-05, |
|
"loss": 0.408, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.973435092302239e-05, |
|
"loss": 0.406, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.959223491846749e-05, |
|
"loss": 0.3991, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.94494000616906e-05, |
|
"loss": 0.4091, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.93058533891159e-05, |
|
"loss": 0.4, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.916160197223344e-05, |
|
"loss": 0.4112, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.901665291725091e-05, |
|
"loss": 0.4024, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.887101336474346e-05, |
|
"loss": 0.4048, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.8724690489302004e-05, |
|
"loss": 0.4112, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.857769149917973e-05, |
|
"loss": 0.3947, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.843002363593707e-05, |
|
"loss": 0.4005, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.828169417408488e-05, |
|
"loss": 0.3976, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.8132710420726146e-05, |
|
"loss": 0.4006, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.7983079715195984e-05, |
|
"loss": 0.398, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.78328094287001e-05, |
|
"loss": 0.3987, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.768190696395162e-05, |
|
"loss": 0.4013, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7530379754806494e-05, |
|
"loss": 0.4028, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.737823526589722e-05, |
|
"loss": 0.4036, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7225480992265125e-05, |
|
"loss": 0.3937, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.707212445899116e-05, |
|
"loss": 0.4007, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.6918173220825204e-05, |
|
"loss": 0.4004, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.6763634861813836e-05, |
|
"loss": 0.4004, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.660851699492679e-05, |
|
"loss": 0.3991, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.645282726168191e-05, |
|
"loss": 0.4042, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.6296573331768664e-05, |
|
"loss": 0.4043, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.613976290267036e-05, |
|
"loss": 0.3948, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.598240369928494e-05, |
|
"loss": 0.3952, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.5824503473544405e-05, |
|
"loss": 0.4002, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.566607000403298e-05, |
|
"loss": 0.4079, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.5523030408223166e-05, |
|
"loss": 0.3942, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.5363605299319165e-05, |
|
"loss": 0.3935, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.520366965171161e-05, |
|
"loss": 0.3898, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.504323134425501e-05, |
|
"loss": 0.4006, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.48822982805662e-05, |
|
"loss": 0.4089, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.472087838863505e-05, |
|
"loss": 0.3982, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.455897962043387e-05, |
|
"loss": 0.399, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.4396609951525676e-05, |
|
"loss": 0.3964, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.423377738067132e-05, |
|
"loss": 0.3909, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.407048992943541e-05, |
|
"loss": 0.4015, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.39067556417912e-05, |
|
"loss": 0.3915, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.374258258372426e-05, |
|
"loss": 0.3845, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.357797884283517e-05, |
|
"loss": 0.4018, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.3412952527941096e-05, |
|
"loss": 0.3914, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.32475117686763e-05, |
|
"loss": 0.3909, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.308166471509171e-05, |
|
"loss": 0.3993, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.2915419537253346e-05, |
|
"loss": 0.3906, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.274878442483991e-05, |
|
"loss": 0.3897, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.258176758673932e-05, |
|
"loss": 0.3954, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.241437725064431e-05, |
|
"loss": 0.3835, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.224662166264711e-05, |
|
"loss": 0.3854, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.207850908683322e-05, |
|
"loss": 0.3924, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.191004780487434e-05, |
|
"loss": 0.3888, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.1741246115620336e-05, |
|
"loss": 0.3914, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.157211233469042e-05, |
|
"loss": 0.391, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.140265479406358e-05, |
|
"loss": 0.3916, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.1232881841668015e-05, |
|
"loss": 0.4012, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.106280184096996e-05, |
|
"loss": 0.3934, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.089242317056168e-05, |
|
"loss": 0.3908, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.072175422374867e-05, |
|
"loss": 0.3972, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.055080340813623e-05, |
|
"loss": 0.3963, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.0379579145215287e-05, |
|
"loss": 0.3941, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.0208089869947475e-05, |
|
"loss": 0.3887, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.0036344030349644e-05, |
|
"loss": 0.3879, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.9864350087077702e-05, |
|
"loss": 0.3945, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.969211651300978e-05, |
|
"loss": 0.3909, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.9519651792828877e-05, |
|
"loss": 0.3871, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.9346964422604846e-05, |
|
"loss": 0.3803, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.9174062909375892e-05, |
|
"loss": 0.3868, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.9000955770729464e-05, |
|
"loss": 0.385, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.8827651534382655e-05, |
|
"loss": 0.3871, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.8654158737762122e-05, |
|
"loss": 0.3956, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.8480485927583506e-05, |
|
"loss": 0.3884, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.8306641659430382e-05, |
|
"loss": 0.3829, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.8132634497332815e-05, |
|
"loss": 0.3916, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.7958473013345447e-05, |
|
"loss": 0.3924, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.7784165787125226e-05, |
|
"loss": 0.3906, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.7609721405508758e-05, |
|
"loss": 0.383, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.7435148462089282e-05, |
|
"loss": 0.3892, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.7260455556793325e-05, |
|
"loss": 0.3866, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.708565129545706e-05, |
|
"loss": 0.382, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.691074428940237e-05, |
|
"loss": 0.3825, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.673574315501259e-05, |
|
"loss": 0.3828, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.656065651330808e-05, |
|
"loss": 0.3845, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.6385492989521522e-05, |
|
"loss": 0.3904, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.6210261212673004e-05, |
|
"loss": 0.3934, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.6034969815144938e-05, |
|
"loss": 0.3893, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5859627432256816e-05, |
|
"loss": 0.3965, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.568424270183981e-05, |
|
"loss": 0.3833, |
|
"step": 2200 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4476, |
|
"num_train_epochs": 3, |
|
"save_steps": 200, |
|
"total_flos": 2.001340058539393e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|