|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9984810126582278, |
|
"eval_steps": 500, |
|
"global_step": 2961, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.010126582278481013, |
|
"grad_norm": 2.9319232619463147, |
|
"learning_rate": 5e-06, |
|
"loss": 0.754, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.020253164556962026, |
|
"grad_norm": 2.3527842138237443, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6565, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.030379746835443037, |
|
"grad_norm": 2.218097297827581, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6343, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04050632911392405, |
|
"grad_norm": 1.728885416020883, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6266, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05063291139240506, |
|
"grad_norm": 1.7600150721761239, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6225, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.060759493670886074, |
|
"grad_norm": 2.036486573113631, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6185, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07088607594936709, |
|
"grad_norm": 1.8472990337897077, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6137, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0810126582278481, |
|
"grad_norm": 2.0268825324116926, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6035, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09113924050632911, |
|
"grad_norm": 1.7502041363801772, |
|
"learning_rate": 5e-06, |
|
"loss": 0.6063, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.10126582278481013, |
|
"grad_norm": 1.7706135514845762, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5954, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11139240506329114, |
|
"grad_norm": 1.6647485123629138, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5933, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.12151898734177215, |
|
"grad_norm": 2.299944971561371, |
|
"learning_rate": 5e-06, |
|
"loss": 0.598, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.13164556962025317, |
|
"grad_norm": 1.4323042297049071, |
|
"learning_rate": 5e-06, |
|
"loss": 0.597, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.14177215189873418, |
|
"grad_norm": 1.5735470897186044, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5859, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1518987341772152, |
|
"grad_norm": 1.4396445107803788, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5833, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1620253164556962, |
|
"grad_norm": 1.6048875360767698, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5926, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.17215189873417722, |
|
"grad_norm": 1.7629611019629856, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5878, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.18227848101265823, |
|
"grad_norm": 1.692513854306383, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5875, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.19240506329113924, |
|
"grad_norm": 2.4645583630800614, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5908, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.20253164556962025, |
|
"grad_norm": 1.6304964629686653, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5887, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.21265822784810126, |
|
"grad_norm": 2.117270092101592, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5872, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.22278481012658227, |
|
"grad_norm": 1.5677168838868496, |
|
"learning_rate": 5e-06, |
|
"loss": 0.575, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.23291139240506328, |
|
"grad_norm": 1.6515647911636342, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5747, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2430379746835443, |
|
"grad_norm": 1.4834458457301771, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5834, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.25316455696202533, |
|
"grad_norm": 2.090691823913493, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5793, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.26329113924050634, |
|
"grad_norm": 1.4520944372541345, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5779, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.27341772151898736, |
|
"grad_norm": 1.7905780272726646, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5637, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.28354430379746837, |
|
"grad_norm": 1.6753642665165924, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5745, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2936708860759494, |
|
"grad_norm": 1.8117100564747939, |
|
"learning_rate": 5e-06, |
|
"loss": 0.577, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3037974683544304, |
|
"grad_norm": 1.5160896257954382, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5821, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3139240506329114, |
|
"grad_norm": 1.5338602444115943, |
|
"learning_rate": 5e-06, |
|
"loss": 0.574, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.3240506329113924, |
|
"grad_norm": 1.5768816348730115, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5831, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3341772151898734, |
|
"grad_norm": 1.5251244093370149, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5793, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.34430379746835443, |
|
"grad_norm": 1.5422210280401378, |
|
"learning_rate": 5e-06, |
|
"loss": 0.579, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.35443037974683544, |
|
"grad_norm": 1.4422891794630028, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5794, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.36455696202531646, |
|
"grad_norm": 1.778098375508798, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5702, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.37468354430379747, |
|
"grad_norm": 1.4276847831429058, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5762, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3848101265822785, |
|
"grad_norm": 1.8350692685441172, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5742, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3949367088607595, |
|
"grad_norm": 1.6654466906631886, |
|
"learning_rate": 5e-06, |
|
"loss": 0.57, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4050632911392405, |
|
"grad_norm": 1.5625914451532021, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5688, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.4151898734177215, |
|
"grad_norm": 1.5962831101550994, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5567, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.4253164556962025, |
|
"grad_norm": 1.4805817323596187, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5723, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.43544303797468353, |
|
"grad_norm": 1.4164796466458769, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5712, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.44556962025316454, |
|
"grad_norm": 1.5450127362729664, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5704, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.45569620253164556, |
|
"grad_norm": 1.6570421627781717, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5729, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.46582278481012657, |
|
"grad_norm": 1.623306340780734, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5673, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4759493670886076, |
|
"grad_norm": 1.747763610168544, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5609, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4860759493670886, |
|
"grad_norm": 1.295527401866004, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5699, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.4962025316455696, |
|
"grad_norm": 1.3529876566902441, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5783, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.5063291139240507, |
|
"grad_norm": 1.4541590535564763, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5606, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5164556962025316, |
|
"grad_norm": 1.5391153152763637, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5598, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.5265822784810127, |
|
"grad_norm": 1.3816263219270335, |
|
"learning_rate": 5e-06, |
|
"loss": 0.552, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5367088607594936, |
|
"grad_norm": 1.407148137331511, |
|
"learning_rate": 5e-06, |
|
"loss": 0.561, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.5468354430379747, |
|
"grad_norm": 1.4939662242734717, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5691, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5569620253164557, |
|
"grad_norm": 1.4769024813528058, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5628, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5670886075949367, |
|
"grad_norm": 1.5234796484078978, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5571, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5772151898734177, |
|
"grad_norm": 1.38748512162588, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5588, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5873417721518988, |
|
"grad_norm": 1.6616670500045834, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5629, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5974683544303797, |
|
"grad_norm": 1.3613770956091802, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5597, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.6075949367088608, |
|
"grad_norm": 1.9645875965732293, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5614, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.6177215189873417, |
|
"grad_norm": 1.3122962556953037, |
|
"learning_rate": 5e-06, |
|
"loss": 0.57, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.6278481012658228, |
|
"grad_norm": 1.5024516266743335, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5629, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.6379746835443038, |
|
"grad_norm": 1.3599801460050238, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5679, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.6481012658227848, |
|
"grad_norm": 1.3697118174446543, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5567, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6582278481012658, |
|
"grad_norm": 1.447467777446668, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5614, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6683544303797468, |
|
"grad_norm": 1.4332946464750023, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5629, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6784810126582278, |
|
"grad_norm": 1.4245518863907969, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5607, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6886075949367089, |
|
"grad_norm": 1.392960913673911, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5631, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6987341772151898, |
|
"grad_norm": 1.3851040024314174, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5547, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.7088607594936709, |
|
"grad_norm": 1.5665731498555628, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5729, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.7189873417721518, |
|
"grad_norm": 1.3541133459928674, |
|
"learning_rate": 5e-06, |
|
"loss": 0.567, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.7291139240506329, |
|
"grad_norm": 1.3050422174455243, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5649, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.739240506329114, |
|
"grad_norm": 1.4474203168962876, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5587, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.7493670886075949, |
|
"grad_norm": 1.2849691486389958, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5584, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.759493670886076, |
|
"grad_norm": 1.4374182399089128, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5485, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.769620253164557, |
|
"grad_norm": 1.2784166011759992, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5619, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.779746835443038, |
|
"grad_norm": 1.338741085455322, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5625, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.789873417721519, |
|
"grad_norm": 1.3830345955439087, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5532, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.2560229071250701, |
|
"learning_rate": 5e-06, |
|
"loss": 0.557, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.810126582278481, |
|
"grad_norm": 1.3094948207554686, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5606, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.8202531645569621, |
|
"grad_norm": 1.2980406537156193, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5435, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.830379746835443, |
|
"grad_norm": 1.3336678028790407, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5644, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.8405063291139241, |
|
"grad_norm": 1.4362113888678505, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5615, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.850632911392405, |
|
"grad_norm": 1.5496595209680522, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5619, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.8607594936708861, |
|
"grad_norm": 1.4648845925364151, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5556, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.8708860759493671, |
|
"grad_norm": 1.3830147030582034, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5534, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.8810126582278481, |
|
"grad_norm": 1.3802375047462647, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5654, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.8911392405063291, |
|
"grad_norm": 1.665356350061181, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5542, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.9012658227848102, |
|
"grad_norm": 1.5275969827966436, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5492, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.9113924050632911, |
|
"grad_norm": 1.5257937868842386, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5509, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.9215189873417722, |
|
"grad_norm": 1.3059469433496236, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5563, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.9316455696202531, |
|
"grad_norm": 1.3687394115237168, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5527, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.9417721518987342, |
|
"grad_norm": 1.3468586235595001, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5594, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.9518987341772152, |
|
"grad_norm": 1.4377274430310882, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5512, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.9620253164556962, |
|
"grad_norm": 1.2680685522134791, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5486, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.9721518987341772, |
|
"grad_norm": 1.2807954706357672, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5566, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.9822784810126582, |
|
"grad_norm": 1.3423829666439555, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5485, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.9924050632911392, |
|
"grad_norm": 1.4283880670670543, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5515, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.999493670886076, |
|
"eval_loss": 0.13834980130195618, |
|
"eval_runtime": 507.9163, |
|
"eval_samples_per_second": 26.193, |
|
"eval_steps_per_second": 0.41, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.0025316455696203, |
|
"grad_norm": 1.6161728864680243, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5308, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.0126582278481013, |
|
"grad_norm": 1.5737881952906483, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4633, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.0227848101265822, |
|
"grad_norm": 1.5616941170427905, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4634, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.0329113924050632, |
|
"grad_norm": 1.5346519713859017, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4435, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.0430379746835443, |
|
"grad_norm": 1.3492138788127321, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4503, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.0531645569620254, |
|
"grad_norm": 1.4763337790140822, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4506, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.0632911392405062, |
|
"grad_norm": 1.4225225244349122, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4591, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.0734177215189873, |
|
"grad_norm": 1.377686472346139, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4498, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.0835443037974684, |
|
"grad_norm": 1.524899956605274, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4521, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.0936708860759494, |
|
"grad_norm": 1.4555671547809872, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4537, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.1037974683544305, |
|
"grad_norm": 1.352652609202294, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4591, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.1139240506329113, |
|
"grad_norm": 1.389780851883711, |
|
"learning_rate": 5e-06, |
|
"loss": 0.451, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.1240506329113924, |
|
"grad_norm": 1.5493151434769157, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4545, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.1341772151898735, |
|
"grad_norm": 1.4921163467682323, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4511, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.1443037974683543, |
|
"grad_norm": 1.3424106662166821, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4597, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.1544303797468354, |
|
"grad_norm": 1.4205445966678738, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4587, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.1645569620253164, |
|
"grad_norm": 1.3812137606947525, |
|
"learning_rate": 5e-06, |
|
"loss": 0.457, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.1746835443037975, |
|
"grad_norm": 1.504094206656827, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4582, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.1848101265822786, |
|
"grad_norm": 1.4384269712621547, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4478, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.1949367088607594, |
|
"grad_norm": 1.4604436825013358, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4561, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.2050632911392405, |
|
"grad_norm": 1.484357252258588, |
|
"learning_rate": 5e-06, |
|
"loss": 0.463, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.2151898734177216, |
|
"grad_norm": 1.3507024353159274, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4557, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.2253164556962026, |
|
"grad_norm": 1.5356768758566504, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4616, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.2354430379746835, |
|
"grad_norm": 1.507777364560312, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4492, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.2455696202531645, |
|
"grad_norm": 1.402742621286163, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4623, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.2556962025316456, |
|
"grad_norm": 1.4914361590080172, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4576, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.2658227848101267, |
|
"grad_norm": 1.4449809880641054, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4581, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.2759493670886077, |
|
"grad_norm": 1.401311614162818, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4613, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.2860759493670886, |
|
"grad_norm": 1.4035116916924748, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4546, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.2962025316455696, |
|
"grad_norm": 1.5360414127344846, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4638, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.3063291139240507, |
|
"grad_norm": 1.4721064580495236, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4605, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.3164556962025316, |
|
"grad_norm": 1.5018662441760837, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4656, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.3265822784810126, |
|
"grad_norm": 1.4057750381381058, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4546, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.3367088607594937, |
|
"grad_norm": 1.472547127671551, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4509, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.3468354430379748, |
|
"grad_norm": 1.378222631334251, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4592, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.3569620253164558, |
|
"grad_norm": 1.477645235480499, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4731, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.3670886075949367, |
|
"grad_norm": 1.4190131074856087, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4614, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.3772151898734177, |
|
"grad_norm": 1.598773468818255, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4642, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.3873417721518988, |
|
"grad_norm": 1.401701470107724, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4599, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.3974683544303796, |
|
"grad_norm": 1.5834771319486265, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4635, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.4075949367088607, |
|
"grad_norm": 1.398082340799174, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4581, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.4177215189873418, |
|
"grad_norm": 1.4006340071029044, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4602, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.4278481012658228, |
|
"grad_norm": 1.3336328229398797, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4602, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.437974683544304, |
|
"grad_norm": 1.3492817883075563, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4687, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.4481012658227848, |
|
"grad_norm": 1.342687219512621, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4651, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.4582278481012658, |
|
"grad_norm": 1.4950318280118493, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4643, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.4683544303797469, |
|
"grad_norm": 1.5341419482481558, |
|
"learning_rate": 5e-06, |
|
"loss": 0.467, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.4784810126582277, |
|
"grad_norm": 1.3873503711966886, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4692, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.4886075949367088, |
|
"grad_norm": 1.3838670814377412, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4606, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.4987341772151899, |
|
"grad_norm": 1.435159074036311, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4607, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.508860759493671, |
|
"grad_norm": 1.416449038632833, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4603, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.518987341772152, |
|
"grad_norm": 1.349053423409347, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4671, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.529113924050633, |
|
"grad_norm": 1.5418750945784405, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4642, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.539240506329114, |
|
"grad_norm": 1.5350888472278188, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4654, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.549367088607595, |
|
"grad_norm": 1.5918459039624637, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4623, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.5594936708860758, |
|
"grad_norm": 1.4044783253886717, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4644, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.5696202531645569, |
|
"grad_norm": 1.4733622287724428, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4699, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.579746835443038, |
|
"grad_norm": 1.4190534275279456, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4585, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.589873417721519, |
|
"grad_norm": 1.588541120324257, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4674, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.4489487698353771, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4659, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.6101265822784812, |
|
"grad_norm": 1.3499923545579962, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4582, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.620253164556962, |
|
"grad_norm": 1.3640430679010667, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4676, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.630379746835443, |
|
"grad_norm": 1.3690059243296218, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4612, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.640506329113924, |
|
"grad_norm": 1.2447374210211029, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4506, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.650632911392405, |
|
"grad_norm": 1.351219366349358, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4611, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.660759493670886, |
|
"grad_norm": 1.335122455907824, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4699, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.6708860759493671, |
|
"grad_norm": 1.5625860035007904, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4612, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.6810126582278482, |
|
"grad_norm": 1.6296205380869357, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4623, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.6911392405063292, |
|
"grad_norm": 1.4330065379895607, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4625, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.70126582278481, |
|
"grad_norm": 1.3278761181861125, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4627, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.7113924050632912, |
|
"grad_norm": 1.4304366765861536, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4646, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.721518987341772, |
|
"grad_norm": 1.3460103891901807, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4648, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.731645569620253, |
|
"grad_norm": 1.4336438757742112, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4708, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.7417721518987341, |
|
"grad_norm": 1.333163056745564, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4591, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.7518987341772152, |
|
"grad_norm": 1.3819280069426765, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4651, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.7620253164556963, |
|
"grad_norm": 1.413028622360716, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4714, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.7721518987341773, |
|
"grad_norm": 1.3989242299354576, |
|
"learning_rate": 5e-06, |
|
"loss": 0.466, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.7822784810126582, |
|
"grad_norm": 1.3712554248183217, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4727, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.7924050632911392, |
|
"grad_norm": 1.4780769754046013, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4679, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.80253164556962, |
|
"grad_norm": 1.4002831190634755, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4647, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.8126582278481012, |
|
"grad_norm": 1.3944675393169206, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4644, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.8227848101265822, |
|
"grad_norm": 1.4867966880006709, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4675, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.8329113924050633, |
|
"grad_norm": 1.404496416486814, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4624, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.8430379746835444, |
|
"grad_norm": 1.5305845929183253, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4739, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.8531645569620254, |
|
"grad_norm": 1.3084464311737185, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4655, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.8632911392405065, |
|
"grad_norm": 1.3059438675535409, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4678, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.8734177215189873, |
|
"grad_norm": 1.3867484618996055, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4609, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.8835443037974684, |
|
"grad_norm": 1.3420942943215763, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4663, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.8936708860759492, |
|
"grad_norm": 1.421266278043212, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4665, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.9037974683544303, |
|
"grad_norm": 1.36634296470369, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4676, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.9139240506329114, |
|
"grad_norm": 1.2659671091408768, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4669, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.9240506329113924, |
|
"grad_norm": 1.373264332933901, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4701, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.9341772151898735, |
|
"grad_norm": 1.3568414058185374, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4693, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.9443037974683546, |
|
"grad_norm": 1.3940261082697252, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4763, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.9544303797468354, |
|
"grad_norm": 1.3879188735035681, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4756, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.9645569620253165, |
|
"grad_norm": 1.363003506285662, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4667, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.9746835443037973, |
|
"grad_norm": 1.4533412320271968, |
|
"learning_rate": 5e-06, |
|
"loss": 0.469, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.9848101265822784, |
|
"grad_norm": 1.4479310447515883, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4748, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.9949367088607595, |
|
"grad_norm": 1.2966648783230335, |
|
"learning_rate": 5e-06, |
|
"loss": 0.468, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.1398227959871292, |
|
"eval_runtime": 506.8788, |
|
"eval_samples_per_second": 26.247, |
|
"eval_steps_per_second": 0.41, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 2.0050632911392405, |
|
"grad_norm": 3.06600016953371, |
|
"learning_rate": 5e-06, |
|
"loss": 0.4085, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.0151898734177216, |
|
"grad_norm": 2.0901467389116335, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3545, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.0253164556962027, |
|
"grad_norm": 1.7198026240231687, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3486, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.0354430379746837, |
|
"grad_norm": 1.6596559930998485, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3485, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.0455696202531644, |
|
"grad_norm": 1.6010412767976676, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3469, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.0556962025316454, |
|
"grad_norm": 1.619456570416184, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3412, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.0658227848101265, |
|
"grad_norm": 1.5477510460994646, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3499, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.0759493670886076, |
|
"grad_norm": 1.6995317518377548, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3431, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.0860759493670886, |
|
"grad_norm": 1.6672377260042517, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3509, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.0962025316455697, |
|
"grad_norm": 1.5137523020681678, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3485, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.1063291139240508, |
|
"grad_norm": 1.5728338684227074, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3414, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.116455696202532, |
|
"grad_norm": 1.7215854826795536, |
|
"learning_rate": 5e-06, |
|
"loss": 0.356, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.1265822784810124, |
|
"grad_norm": 1.547269577634038, |
|
"learning_rate": 5e-06, |
|
"loss": 0.351, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.1367088607594935, |
|
"grad_norm": 1.7846477440752144, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3475, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 2.1468354430379746, |
|
"grad_norm": 1.6766174751768645, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3507, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 2.1569620253164556, |
|
"grad_norm": 1.6129490070505184, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3544, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 2.1670886075949367, |
|
"grad_norm": 1.6006266055236678, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3514, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 2.1772151898734178, |
|
"grad_norm": 1.5978325737155568, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3525, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.187341772151899, |
|
"grad_norm": 1.5951449058616392, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3507, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.19746835443038, |
|
"grad_norm": 1.5513789073675182, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3496, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.207594936708861, |
|
"grad_norm": 1.5229143655144641, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3494, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.2177215189873416, |
|
"grad_norm": 1.660242717310412, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3553, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.2278481012658227, |
|
"grad_norm": 1.6739776038113652, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3607, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.2379746835443037, |
|
"grad_norm": 1.7005249152138244, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3486, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.248101265822785, |
|
"grad_norm": 1.6489218772469403, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3592, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.258227848101266, |
|
"grad_norm": 1.6209760997530658, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3584, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.268354430379747, |
|
"grad_norm": 1.6153685359109242, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3554, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.278481012658228, |
|
"grad_norm": 1.6268217139529644, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3542, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.2886075949367086, |
|
"grad_norm": 1.7752820541706038, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3621, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.2987341772151897, |
|
"grad_norm": 1.5709969247849427, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3524, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.3088607594936708, |
|
"grad_norm": 1.5534897904208944, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3563, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.318987341772152, |
|
"grad_norm": 1.6608945730503153, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3565, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.329113924050633, |
|
"grad_norm": 1.5801622846665264, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3622, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.339240506329114, |
|
"grad_norm": 1.5320500459364543, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3589, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.349367088607595, |
|
"grad_norm": 1.581287461651533, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3613, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.359493670886076, |
|
"grad_norm": 1.6038211850984352, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3617, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.369620253164557, |
|
"grad_norm": 1.625966530104118, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3598, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.379746835443038, |
|
"grad_norm": 1.7645641137037487, |
|
"learning_rate": 5e-06, |
|
"loss": 0.362, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.389873417721519, |
|
"grad_norm": 1.6940524616595496, |
|
"learning_rate": 5e-06, |
|
"loss": 0.361, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.5384052283713547, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3639, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.410126582278481, |
|
"grad_norm": 1.6483060393926068, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3586, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.420253164556962, |
|
"grad_norm": 1.6074760148392138, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3648, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.430379746835443, |
|
"grad_norm": 1.7080201956192906, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3554, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.440506329113924, |
|
"grad_norm": 1.625757072296565, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3531, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.4506329113924052, |
|
"grad_norm": 1.6544192748138193, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3659, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.460759493670886, |
|
"grad_norm": 1.6016056620993897, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3618, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.470886075949367, |
|
"grad_norm": 1.6466444516635712, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3651, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.481012658227848, |
|
"grad_norm": 1.6213376274576505, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3611, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.491139240506329, |
|
"grad_norm": 1.61939669911579, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3655, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.50126582278481, |
|
"grad_norm": 1.7185252999562315, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3717, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.511392405063291, |
|
"grad_norm": 1.5701430968600476, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3616, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.5215189873417723, |
|
"grad_norm": 1.6525891145092144, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3618, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.5316455696202533, |
|
"grad_norm": 1.6331648861432553, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3608, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.5417721518987344, |
|
"grad_norm": 1.5954836812756767, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3564, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.5518987341772155, |
|
"grad_norm": 1.6221389574344411, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3603, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.562025316455696, |
|
"grad_norm": 1.645557544125129, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3633, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.572151898734177, |
|
"grad_norm": 1.7704063332654312, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3658, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.5822784810126582, |
|
"grad_norm": 1.6779345444764124, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3706, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.5924050632911393, |
|
"grad_norm": 1.6024752118154975, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3618, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.6025316455696204, |
|
"grad_norm": 1.5688766989229237, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3719, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.6126582278481014, |
|
"grad_norm": 1.5972321010318007, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3695, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.622784810126582, |
|
"grad_norm": 1.7926779657765413, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3671, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.632911392405063, |
|
"grad_norm": 1.6497318559260037, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3648, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.643037974683544, |
|
"grad_norm": 1.825449432151336, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3675, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.6531645569620252, |
|
"grad_norm": 1.6048797855449353, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3638, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.6632911392405063, |
|
"grad_norm": 1.6207591062208428, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3683, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.6734177215189874, |
|
"grad_norm": 1.7272301549499494, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3685, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.6835443037974684, |
|
"grad_norm": 1.6364841258418936, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3684, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.6936708860759495, |
|
"grad_norm": 1.6937643730038003, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3723, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.7037974683544306, |
|
"grad_norm": 1.5774730979594618, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3671, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.7139240506329116, |
|
"grad_norm": 1.6768910733845062, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3715, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.7240506329113923, |
|
"grad_norm": 1.6389744367173145, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3674, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.7341772151898733, |
|
"grad_norm": 1.7387883655123013, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3701, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.7443037974683544, |
|
"grad_norm": 1.622855925296899, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3692, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.7544303797468355, |
|
"grad_norm": 1.6049744247340423, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3605, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.7645569620253165, |
|
"grad_norm": 1.674299981616671, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3681, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.7746835443037976, |
|
"grad_norm": 1.6563350827590924, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3732, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.7848101265822782, |
|
"grad_norm": 1.6562436415268564, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3666, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.7949367088607593, |
|
"grad_norm": 1.5565903819361853, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3706, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.8050632911392404, |
|
"grad_norm": 1.7142515863322454, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3679, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.8151898734177214, |
|
"grad_norm": 1.5712425643932983, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3761, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.8253164556962025, |
|
"grad_norm": 1.597329501161643, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3697, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.8354430379746836, |
|
"grad_norm": 1.6642023307988005, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3613, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.8455696202531646, |
|
"grad_norm": 1.6302362931142675, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3773, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.8556962025316457, |
|
"grad_norm": 1.594346958058013, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3699, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.8658227848101268, |
|
"grad_norm": 1.5803480355180004, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3711, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.875949367088608, |
|
"grad_norm": 1.4800494871531567, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3691, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.8860759493670884, |
|
"grad_norm": 1.6631915526393122, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3747, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.8962025316455695, |
|
"grad_norm": 1.630862108030936, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3674, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.9063291139240506, |
|
"grad_norm": 1.6893754841333202, |
|
"learning_rate": 5e-06, |
|
"loss": 0.373, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.9164556962025316, |
|
"grad_norm": 1.705060151220597, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3723, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.9265822784810127, |
|
"grad_norm": 1.6188859279801948, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3796, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.9367088607594938, |
|
"grad_norm": 1.629353248790061, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3828, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.946835443037975, |
|
"grad_norm": 1.7509445495897462, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3713, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.9569620253164555, |
|
"grad_norm": 1.6650857655447995, |
|
"learning_rate": 5e-06, |
|
"loss": 0.369, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.9670886075949365, |
|
"grad_norm": 1.8285248313648312, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3721, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.9772151898734176, |
|
"grad_norm": 1.6243458834916522, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3759, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.9873417721518987, |
|
"grad_norm": 1.6911778320262214, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3746, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.9974683544303797, |
|
"grad_norm": 1.7509767039817499, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3711, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.9984810126582278, |
|
"eval_loss": 0.15120381116867065, |
|
"eval_runtime": 508.8119, |
|
"eval_samples_per_second": 26.147, |
|
"eval_steps_per_second": 0.409, |
|
"step": 2961 |
|
}, |
|
{ |
|
"epoch": 2.9984810126582278, |
|
"step": 2961, |
|
"total_flos": 2479683262218240.0, |
|
"train_loss": 0.46617310421968466, |
|
"train_runtime": 84499.4991, |
|
"train_samples_per_second": 8.974, |
|
"train_steps_per_second": 0.035 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2961, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2479683262218240.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|