|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.96, |
|
"eval_steps": 500, |
|
"global_step": 1000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.8561736941337585, |
|
"learning_rate": 1.9999873507471375e-05, |
|
"loss": 1.0407, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.9348568320274353, |
|
"learning_rate": 1.9999494033085565e-05, |
|
"loss": 1.021, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.0065371990203857, |
|
"learning_rate": 1.999886158644271e-05, |
|
"loss": 0.9471, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.0246785879135132, |
|
"learning_rate": 1.999797618354276e-05, |
|
"loss": 0.9153, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.1248774528503418, |
|
"learning_rate": 1.9996837846785094e-05, |
|
"loss": 0.8506, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.9718716740608215, |
|
"learning_rate": 1.999544660496792e-05, |
|
"loss": 0.805, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.670078456401825, |
|
"learning_rate": 1.9993802493287588e-05, |
|
"loss": 0.756, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.5407480597496033, |
|
"learning_rate": 1.9991905553337657e-05, |
|
"loss": 0.7278, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.5028639435768127, |
|
"learning_rate": 1.9989755833107875e-05, |
|
"loss": 0.7153, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.4602053761482239, |
|
"learning_rate": 1.998735338698296e-05, |
|
"loss": 0.6868, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.47076714038848877, |
|
"learning_rate": 1.9984698275741198e-05, |
|
"loss": 0.6744, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.38986819982528687, |
|
"learning_rate": 1.9981790566552944e-05, |
|
"loss": 0.6422, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.36573711037635803, |
|
"learning_rate": 1.997863033297889e-05, |
|
"loss": 0.6242, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.31471285223960876, |
|
"learning_rate": 1.9975217654968228e-05, |
|
"loss": 0.6036, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.34020736813545227, |
|
"learning_rate": 1.9971552618856612e-05, |
|
"loss": 0.6003, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3019075393676758, |
|
"learning_rate": 1.996763531736397e-05, |
|
"loss": 0.5907, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.2804803252220154, |
|
"learning_rate": 1.996346584959219e-05, |
|
"loss": 0.5733, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.26424163579940796, |
|
"learning_rate": 1.9959044321022563e-05, |
|
"loss": 0.5826, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.27084848284721375, |
|
"learning_rate": 1.9954370843513167e-05, |
|
"loss": 0.5781, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.25091850757598877, |
|
"learning_rate": 1.9949445535295992e-05, |
|
"loss": 0.5667, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.2530795633792877, |
|
"learning_rate": 1.9944268520973978e-05, |
|
"loss": 0.5635, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.23881025612354279, |
|
"learning_rate": 1.993883993151785e-05, |
|
"loss": 0.5643, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.22561447322368622, |
|
"learning_rate": 1.9933159904262812e-05, |
|
"loss": 0.5257, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.21529339253902435, |
|
"learning_rate": 1.9927228582905063e-05, |
|
"loss": 0.5387, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.21853545308113098, |
|
"learning_rate": 1.9921046117498175e-05, |
|
"loss": 0.5363, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.20658166706562042, |
|
"learning_rate": 1.991461266444928e-05, |
|
"loss": 0.5447, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.20857058465480804, |
|
"learning_rate": 1.9907928386515126e-05, |
|
"loss": 0.5226, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.21309202909469604, |
|
"learning_rate": 1.9900993452797964e-05, |
|
"loss": 0.5234, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.1945490837097168, |
|
"learning_rate": 1.989380803874125e-05, |
|
"loss": 0.5073, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.21293672919273376, |
|
"learning_rate": 1.9886372326125215e-05, |
|
"loss": 0.508, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.19945946335792542, |
|
"learning_rate": 1.987868650306229e-05, |
|
"loss": 0.5119, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.19202299416065216, |
|
"learning_rate": 1.9870750763992306e-05, |
|
"loss": 0.5139, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.18136940896511078, |
|
"learning_rate": 1.9862565309677606e-05, |
|
"loss": 0.4982, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.20753854513168335, |
|
"learning_rate": 1.9854130347197954e-05, |
|
"loss": 0.4979, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.19880987703800201, |
|
"learning_rate": 1.9845446089945294e-05, |
|
"loss": 0.5069, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.18631042540073395, |
|
"learning_rate": 1.9836512757618355e-05, |
|
"loss": 0.4827, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.17570152878761292, |
|
"learning_rate": 1.9827330576217105e-05, |
|
"loss": 0.4638, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.1938246786594391, |
|
"learning_rate": 1.9817899778037e-05, |
|
"loss": 0.4866, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.17978984117507935, |
|
"learning_rate": 1.9808220601663155e-05, |
|
"loss": 0.4911, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.17453525960445404, |
|
"learning_rate": 1.9798293291964265e-05, |
|
"loss": 0.4841, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.19657620787620544, |
|
"learning_rate": 1.9788118100086423e-05, |
|
"loss": 0.5002, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.18103839457035065, |
|
"learning_rate": 1.9777695283446787e-05, |
|
"loss": 0.5118, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.17228654026985168, |
|
"learning_rate": 1.9767025105727043e-05, |
|
"loss": 0.4707, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.17088425159454346, |
|
"learning_rate": 1.975610783686674e-05, |
|
"loss": 0.4884, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.19012100994586945, |
|
"learning_rate": 1.974494375305647e-05, |
|
"loss": 0.4865, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.1744072586297989, |
|
"learning_rate": 1.973353313673087e-05, |
|
"loss": 0.4884, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.17286820709705353, |
|
"learning_rate": 1.9721876276561485e-05, |
|
"loss": 0.4828, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.1775512546300888, |
|
"learning_rate": 1.970997346744945e-05, |
|
"loss": 0.4811, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.16784435510635376, |
|
"learning_rate": 1.969782501051806e-05, |
|
"loss": 0.4736, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.1838577836751938, |
|
"learning_rate": 1.9685431213105115e-05, |
|
"loss": 0.4844, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.17393770813941956, |
|
"learning_rate": 1.9672792388755174e-05, |
|
"loss": 0.4763, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.1882689893245697, |
|
"learning_rate": 1.9659908857211606e-05, |
|
"loss": 0.4756, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.16501489281654358, |
|
"learning_rate": 1.9646780944408507e-05, |
|
"loss": 0.4573, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.16918818652629852, |
|
"learning_rate": 1.9633408982462453e-05, |
|
"loss": 0.4636, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.17074143886566162, |
|
"learning_rate": 1.9619793309664097e-05, |
|
"loss": 0.4709, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.18831667304039001, |
|
"learning_rate": 1.9605934270469625e-05, |
|
"loss": 0.4631, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.19625447690486908, |
|
"learning_rate": 1.9591832215492018e-05, |
|
"loss": 0.4719, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.1936866044998169, |
|
"learning_rate": 1.9577487501492183e-05, |
|
"loss": 0.468, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.16921108961105347, |
|
"learning_rate": 1.956290049136996e-05, |
|
"loss": 0.4516, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.18268905580043793, |
|
"learning_rate": 1.9548071554154905e-05, |
|
"loss": 0.4558, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.16877137124538422, |
|
"learning_rate": 1.953300106499697e-05, |
|
"loss": 0.4764, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.17508965730667114, |
|
"learning_rate": 1.951768940515701e-05, |
|
"loss": 0.4485, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.16913452744483948, |
|
"learning_rate": 1.9502136961997144e-05, |
|
"loss": 0.4635, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.1623164862394333, |
|
"learning_rate": 1.948634412897094e-05, |
|
"loss": 0.4592, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.19096989929676056, |
|
"learning_rate": 1.9470311305613478e-05, |
|
"loss": 0.4768, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.19626876711845398, |
|
"learning_rate": 1.9454038897531226e-05, |
|
"loss": 0.4615, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.1713666319847107, |
|
"learning_rate": 1.9437527316391802e-05, |
|
"loss": 0.4583, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.17599208652973175, |
|
"learning_rate": 1.9420776979913528e-05, |
|
"loss": 0.4552, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.18346691131591797, |
|
"learning_rate": 1.940378831185489e-05, |
|
"loss": 0.4605, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.2010510265827179, |
|
"learning_rate": 1.9386561742003804e-05, |
|
"loss": 0.48, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.16948485374450684, |
|
"learning_rate": 1.936909770616675e-05, |
|
"loss": 0.4501, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.18418210744857788, |
|
"learning_rate": 1.935139664615773e-05, |
|
"loss": 0.4604, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.1840277910232544, |
|
"learning_rate": 1.933345900978712e-05, |
|
"loss": 0.4586, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.1805981695652008, |
|
"learning_rate": 1.9315285250850312e-05, |
|
"loss": 0.452, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.1838221549987793, |
|
"learning_rate": 1.9296875829116252e-05, |
|
"loss": 0.4571, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.17758053541183472, |
|
"learning_rate": 1.92782312103158e-05, |
|
"loss": 0.4927, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.16368024051189423, |
|
"learning_rate": 1.9259351866129955e-05, |
|
"loss": 0.4504, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.17798829078674316, |
|
"learning_rate": 1.9240238274177907e-05, |
|
"loss": 0.4422, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.16979840397834778, |
|
"learning_rate": 1.9220890918004978e-05, |
|
"loss": 0.4333, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.1931125670671463, |
|
"learning_rate": 1.9201310287070368e-05, |
|
"loss": 0.4312, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.18460631370544434, |
|
"learning_rate": 1.918149687673478e-05, |
|
"loss": 0.4634, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.16709978878498077, |
|
"learning_rate": 1.9161451188247887e-05, |
|
"loss": 0.4451, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.19406840205192566, |
|
"learning_rate": 1.914117372873565e-05, |
|
"loss": 0.4542, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.16695013642311096, |
|
"learning_rate": 1.912066501118751e-05, |
|
"loss": 0.4343, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.18530474603176117, |
|
"learning_rate": 1.9099925554443357e-05, |
|
"loss": 0.444, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.18910014629364014, |
|
"learning_rate": 1.9078955883180466e-05, |
|
"loss": 0.4575, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.19215096533298492, |
|
"learning_rate": 1.9057756527900188e-05, |
|
"loss": 0.4587, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.18122592568397522, |
|
"learning_rate": 1.9036328024914525e-05, |
|
"loss": 0.4381, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.17227929830551147, |
|
"learning_rate": 1.9014670916332593e-05, |
|
"loss": 0.4473, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.17411768436431885, |
|
"learning_rate": 1.8992785750046866e-05, |
|
"loss": 0.4408, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.18275217711925507, |
|
"learning_rate": 1.8970673079719357e-05, |
|
"loss": 0.4493, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.1775893270969391, |
|
"learning_rate": 1.8948333464767578e-05, |
|
"loss": 0.4654, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.16952849924564362, |
|
"learning_rate": 1.892576747035041e-05, |
|
"loss": 0.4403, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.1598004549741745, |
|
"learning_rate": 1.890297566735379e-05, |
|
"loss": 0.4313, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.17070458829402924, |
|
"learning_rate": 1.8879958632376276e-05, |
|
"loss": 0.4362, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.18404103815555573, |
|
"learning_rate": 1.885671694771446e-05, |
|
"loss": 0.441, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.20091940462589264, |
|
"learning_rate": 1.8833251201348233e-05, |
|
"loss": 0.4354, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.1788869947195053, |
|
"learning_rate": 1.8809561986925917e-05, |
|
"loss": 0.4617, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.20356979966163635, |
|
"learning_rate": 1.8785649903749236e-05, |
|
"loss": 0.4338, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.17706063389778137, |
|
"learning_rate": 1.8761515556758162e-05, |
|
"loss": 0.4566, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.17191752791404724, |
|
"learning_rate": 1.8737159556515614e-05, |
|
"loss": 0.4416, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.19822804629802704, |
|
"learning_rate": 1.8712582519192e-05, |
|
"loss": 0.4592, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.19628900289535522, |
|
"learning_rate": 1.868778506654964e-05, |
|
"loss": 0.4463, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.20023687183856964, |
|
"learning_rate": 1.8662767825927038e-05, |
|
"loss": 0.4313, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.1745893806219101, |
|
"learning_rate": 1.8637531430222993e-05, |
|
"loss": 0.4451, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.18421079218387604, |
|
"learning_rate": 1.8612076517880605e-05, |
|
"loss": 0.4569, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.1991654932498932, |
|
"learning_rate": 1.8586403732871124e-05, |
|
"loss": 0.4381, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.18618810176849365, |
|
"learning_rate": 1.8560513724677644e-05, |
|
"loss": 0.447, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.19722780585289001, |
|
"learning_rate": 1.853440714827869e-05, |
|
"loss": 0.4499, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.19388031959533691, |
|
"learning_rate": 1.850808466413163e-05, |
|
"loss": 0.4504, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.1990053355693817, |
|
"learning_rate": 1.8481546938155988e-05, |
|
"loss": 0.4414, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.2028382122516632, |
|
"learning_rate": 1.8454794641716567e-05, |
|
"loss": 0.4319, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.21612244844436646, |
|
"learning_rate": 1.84278284516065e-05, |
|
"loss": 0.4538, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.18235105276107788, |
|
"learning_rate": 1.840064905003009e-05, |
|
"loss": 0.4421, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.21971595287322998, |
|
"learning_rate": 1.8373257124585592e-05, |
|
"loss": 0.4354, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.1827440857887268, |
|
"learning_rate": 1.834565336824779e-05, |
|
"loss": 0.4234, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.18092140555381775, |
|
"learning_rate": 1.8317838479350473e-05, |
|
"loss": 0.439, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.1922396719455719, |
|
"learning_rate": 1.8289813161568758e-05, |
|
"loss": 0.4387, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.18677982687950134, |
|
"learning_rate": 1.8261578123901314e-05, |
|
"loss": 0.4346, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.1832067370414734, |
|
"learning_rate": 1.82331340806524e-05, |
|
"loss": 0.434, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.19588692486286163, |
|
"learning_rate": 1.8204481751413813e-05, |
|
"loss": 0.4298, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.19335758686065674, |
|
"learning_rate": 1.817562186104666e-05, |
|
"loss": 0.4388, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.18936721980571747, |
|
"learning_rate": 1.814655513966305e-05, |
|
"loss": 0.4418, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.18840767443180084, |
|
"learning_rate": 1.8117282322607596e-05, |
|
"loss": 0.418, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.19814783334732056, |
|
"learning_rate": 1.808780415043883e-05, |
|
"loss": 0.4368, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.19724276661872864, |
|
"learning_rate": 1.805812136891046e-05, |
|
"loss": 0.4474, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.21561075747013092, |
|
"learning_rate": 1.8028234728952496e-05, |
|
"loss": 0.4333, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.21301332116127014, |
|
"learning_rate": 1.799814498665228e-05, |
|
"loss": 0.4186, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.21463420987129211, |
|
"learning_rate": 1.7967852903235328e-05, |
|
"loss": 0.4549, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.19322441518306732, |
|
"learning_rate": 1.7937359245046083e-05, |
|
"loss": 0.4399, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.1851872205734253, |
|
"learning_rate": 1.7906664783528533e-05, |
|
"loss": 0.4314, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.2039497047662735, |
|
"learning_rate": 1.787577029520669e-05, |
|
"loss": 0.4268, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.17417435348033905, |
|
"learning_rate": 1.784467656166494e-05, |
|
"loss": 0.448, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.1900678277015686, |
|
"learning_rate": 1.7813384369528285e-05, |
|
"loss": 0.4182, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.1949600726366043, |
|
"learning_rate": 1.778189451044242e-05, |
|
"loss": 0.433, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.19561733305454254, |
|
"learning_rate": 1.7750207781053727e-05, |
|
"loss": 0.4251, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.20206551253795624, |
|
"learning_rate": 1.7718324982989113e-05, |
|
"loss": 0.4379, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.18985505402088165, |
|
"learning_rate": 1.7686246922835728e-05, |
|
"loss": 0.4164, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.19377106428146362, |
|
"learning_rate": 1.7653974412120556e-05, |
|
"loss": 0.4318, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.20084458589553833, |
|
"learning_rate": 1.76215082672899e-05, |
|
"loss": 0.4331, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.20488500595092773, |
|
"learning_rate": 1.7588849309688704e-05, |
|
"loss": 0.4377, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.17935730516910553, |
|
"learning_rate": 1.75559983655398e-05, |
|
"loss": 0.4246, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.1966232806444168, |
|
"learning_rate": 1.7522956265922983e-05, |
|
"loss": 0.439, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.19671481847763062, |
|
"learning_rate": 1.7489723846754e-05, |
|
"loss": 0.4371, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.19839002192020416, |
|
"learning_rate": 1.7456301948763407e-05, |
|
"loss": 0.4181, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.218344584107399, |
|
"learning_rate": 1.7422691417475266e-05, |
|
"loss": 0.4417, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.19715192914009094, |
|
"learning_rate": 1.73888931031858e-05, |
|
"loss": 0.4296, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.2075146734714508, |
|
"learning_rate": 1.7354907860941863e-05, |
|
"loss": 0.4353, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.21111871302127838, |
|
"learning_rate": 1.7320736550519296e-05, |
|
"loss": 0.4236, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.2017849087715149, |
|
"learning_rate": 1.728638003640119e-05, |
|
"loss": 0.4254, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.2018367499113083, |
|
"learning_rate": 1.7251839187756016e-05, |
|
"loss": 0.4323, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2039439082145691, |
|
"learning_rate": 1.7217114878415633e-05, |
|
"loss": 0.4423, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2052575796842575, |
|
"learning_rate": 1.7182207986853176e-05, |
|
"loss": 0.4436, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.1971895545721054, |
|
"learning_rate": 1.714711939616084e-05, |
|
"loss": 0.4492, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.20616604387760162, |
|
"learning_rate": 1.7111849994027545e-05, |
|
"loss": 0.4619, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.20350460708141327, |
|
"learning_rate": 1.7076400672716455e-05, |
|
"loss": 0.4497, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.21535155177116394, |
|
"learning_rate": 1.7040772329042435e-05, |
|
"loss": 0.43, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.22234632074832916, |
|
"learning_rate": 1.7004965864349335e-05, |
|
"loss": 0.4264, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.20027673244476318, |
|
"learning_rate": 1.6968982184487205e-05, |
|
"loss": 0.4315, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.22603030502796173, |
|
"learning_rate": 1.693282219978939e-05, |
|
"loss": 0.4398, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.21727794408798218, |
|
"learning_rate": 1.6896486825049456e-05, |
|
"loss": 0.4226, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.21654784679412842, |
|
"learning_rate": 1.685997697949809e-05, |
|
"loss": 0.4131, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.21828626096248627, |
|
"learning_rate": 1.6823293586779838e-05, |
|
"loss": 0.4267, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.20646880567073822, |
|
"learning_rate": 1.6786437574929712e-05, |
|
"loss": 0.4535, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.21312721073627472, |
|
"learning_rate": 1.674940987634974e-05, |
|
"loss": 0.4561, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.2161865383386612, |
|
"learning_rate": 1.6712211427785373e-05, |
|
"loss": 0.4367, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.24069428443908691, |
|
"learning_rate": 1.6674843170301764e-05, |
|
"loss": 0.4312, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.21740484237670898, |
|
"learning_rate": 1.6637306049259998e-05, |
|
"loss": 0.4213, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.20397590100765228, |
|
"learning_rate": 1.6599601014293144e-05, |
|
"loss": 0.4311, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.21462039649486542, |
|
"learning_rate": 1.6561729019282245e-05, |
|
"loss": 0.4413, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.19624510407447815, |
|
"learning_rate": 1.6523691022332184e-05, |
|
"loss": 0.4304, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.19382067024707794, |
|
"learning_rate": 1.648548798574744e-05, |
|
"loss": 0.4249, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.20487557351589203, |
|
"learning_rate": 1.6447120876007764e-05, |
|
"loss": 0.4257, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.22991390526294708, |
|
"learning_rate": 1.6408590663743688e-05, |
|
"loss": 0.4377, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.19853994250297546, |
|
"learning_rate": 1.6369898323712016e-05, |
|
"loss": 0.4225, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.2055709809064865, |
|
"learning_rate": 1.633104483477113e-05, |
|
"loss": 0.4413, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.20579014718532562, |
|
"learning_rate": 1.6292031179856246e-05, |
|
"loss": 0.4219, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.20581454038619995, |
|
"learning_rate": 1.625285834595453e-05, |
|
"loss": 0.4398, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.19869303703308105, |
|
"learning_rate": 1.6213527324080152e-05, |
|
"loss": 0.4399, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.20192164182662964, |
|
"learning_rate": 1.617403910924919e-05, |
|
"loss": 0.4246, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.2064668983221054, |
|
"learning_rate": 1.6134394700454478e-05, |
|
"loss": 0.4064, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.19731996953487396, |
|
"learning_rate": 1.6094595100640307e-05, |
|
"loss": 0.4355, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.21017956733703613, |
|
"learning_rate": 1.6054641316677094e-05, |
|
"loss": 0.4211, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.21060003340244293, |
|
"learning_rate": 1.6014534359335866e-05, |
|
"loss": 0.4234, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.23994727432727814, |
|
"learning_rate": 1.597427524326271e-05, |
|
"loss": 0.4358, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.234989732503891, |
|
"learning_rate": 1.593386498695311e-05, |
|
"loss": 0.4239, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.23091141879558563, |
|
"learning_rate": 1.589330461272616e-05, |
|
"loss": 0.4217, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.2015177458524704, |
|
"learning_rate": 1.5852595146698728e-05, |
|
"loss": 0.4165, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.21807970106601715, |
|
"learning_rate": 1.581173761875947e-05, |
|
"loss": 0.4324, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.22525441646575928, |
|
"learning_rate": 1.5770733062542783e-05, |
|
"loss": 0.4281, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.208124041557312, |
|
"learning_rate": 1.5729582515402676e-05, |
|
"loss": 0.426, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.20913442969322205, |
|
"learning_rate": 1.5688287018386505e-05, |
|
"loss": 0.435, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.2537720501422882, |
|
"learning_rate": 1.5646847616208625e-05, |
|
"loss": 0.4152, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.23365460336208344, |
|
"learning_rate": 1.5605265357223998e-05, |
|
"loss": 0.4335, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.2154242992401123, |
|
"learning_rate": 1.556354129340164e-05, |
|
"loss": 0.4368, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.19884240627288818, |
|
"learning_rate": 1.5521676480298014e-05, |
|
"loss": 0.4181, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.1923339068889618, |
|
"learning_rate": 1.547967197703034e-05, |
|
"loss": 0.4266, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.21156719326972961, |
|
"learning_rate": 1.5437528846249783e-05, |
|
"loss": 0.4298, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.21929724514484406, |
|
"learning_rate": 1.5395248154114574e-05, |
|
"loss": 0.4258, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.22225961089134216, |
|
"learning_rate": 1.535283097026305e-05, |
|
"loss": 0.4345, |
|
"step": 1000 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 3123, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 3.53274538981589e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|