|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 820, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.006097560975609756, |
|
"grad_norm": 470.0, |
|
"learning_rate": 2.4390243902439027e-06, |
|
"loss": 50.3127, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.03048780487804878, |
|
"grad_norm": 362.0, |
|
"learning_rate": 1.2195121951219513e-05, |
|
"loss": 41.9975, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06097560975609756, |
|
"grad_norm": 199.0, |
|
"learning_rate": 2.4390243902439026e-05, |
|
"loss": 35.2694, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09146341463414634, |
|
"grad_norm": 27.875, |
|
"learning_rate": 3.6585365853658535e-05, |
|
"loss": 22.3723, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.12195121951219512, |
|
"grad_norm": 21.375, |
|
"learning_rate": 4.878048780487805e-05, |
|
"loss": 18.6519, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1524390243902439, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 6.097560975609756e-05, |
|
"loss": 16.0212, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.18292682926829268, |
|
"grad_norm": 4.5625, |
|
"learning_rate": 7.317073170731707e-05, |
|
"loss": 15.6762, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.21341463414634146, |
|
"grad_norm": 5.9375, |
|
"learning_rate": 8.53658536585366e-05, |
|
"loss": 13.4942, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.24390243902439024, |
|
"grad_norm": 13.875, |
|
"learning_rate": 9.75609756097561e-05, |
|
"loss": 13.2206, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.27439024390243905, |
|
"grad_norm": 40.0, |
|
"learning_rate": 0.00010975609756097563, |
|
"loss": 9.5294, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.3048780487804878, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 0.00012195121951219512, |
|
"loss": 2.8066, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3353658536585366, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 0.00013414634146341464, |
|
"loss": 1.5733, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.36585365853658536, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 0.00014634146341463414, |
|
"loss": 1.3089, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.39634146341463417, |
|
"grad_norm": 3.15625, |
|
"learning_rate": 0.00015853658536585366, |
|
"loss": 1.1767, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.4268292682926829, |
|
"grad_norm": 7.0625, |
|
"learning_rate": 0.0001707317073170732, |
|
"loss": 1.1124, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.4573170731707317, |
|
"grad_norm": 3.265625, |
|
"learning_rate": 0.0001829268292682927, |
|
"loss": 1.0577, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.4878048780487805, |
|
"grad_norm": 6.59375, |
|
"learning_rate": 0.0001951219512195122, |
|
"loss": 0.9769, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5182926829268293, |
|
"grad_norm": 10.6875, |
|
"learning_rate": 0.00019999184556954776, |
|
"loss": 0.9411, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.5487804878048781, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 0.0001999420177550043, |
|
"loss": 0.939, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5792682926829268, |
|
"grad_norm": 2.25, |
|
"learning_rate": 0.00019984691491033906, |
|
"loss": 0.9085, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.6097560975609756, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 0.00019970658011837404, |
|
"loss": 0.8808, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6402439024390244, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 0.00019952107695258992, |
|
"loss": 0.8201, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.6707317073170732, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 0.00019929048944832638, |
|
"loss": 0.7852, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.7012195121951219, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 0.00019901492206471325, |
|
"loss": 0.7695, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.7317073170731707, |
|
"grad_norm": 14.3125, |
|
"learning_rate": 0.00019869449963734893, |
|
"loss": 0.7996, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.7621951219512195, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 0.00019832936732174834, |
|
"loss": 0.7611, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.7926829268292683, |
|
"grad_norm": 29.0, |
|
"learning_rate": 0.00019791969052758562, |
|
"loss": 0.7835, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.823170731707317, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.00019746565484376132, |
|
"loss": 0.759, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.8536585365853658, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00019696746595432828, |
|
"loss": 0.7598, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.8841463414634146, |
|
"grad_norm": 0.9140625, |
|
"learning_rate": 0.0001964253495453141, |
|
"loss": 0.7279, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.9146341463414634, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 0.00019583955120248237, |
|
"loss": 0.7304, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9451219512195121, |
|
"grad_norm": 0.7890625, |
|
"learning_rate": 0.00019521033630007928, |
|
"loss": 0.7659, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.975609756097561, |
|
"grad_norm": 3.03125, |
|
"learning_rate": 0.00019453798988061535, |
|
"loss": 0.7425, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.182375431060791, |
|
"eval_runtime": 1.0783, |
|
"eval_samples_per_second": 1.855, |
|
"eval_steps_per_second": 0.927, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.0060975609756098, |
|
"grad_norm": 0.8515625, |
|
"learning_rate": 0.00019382281652573785, |
|
"loss": 0.6928, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.0365853658536586, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 0.00019306514021825118, |
|
"loss": 0.6592, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.0670731707317074, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 0.00019226530419534833, |
|
"loss": 0.6442, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.0975609756097562, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 0.00019142367079312021, |
|
"loss": 0.659, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.1280487804878048, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.00019054062128241264, |
|
"loss": 0.6355, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.1585365853658536, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 0.00018961655569610557, |
|
"loss": 0.6304, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.1890243902439024, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.0001886518926478932, |
|
"loss": 0.6414, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.2195121951219512, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00018764706914264635, |
|
"loss": 0.6429, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 0.00018660254037844388, |
|
"loss": 0.6442, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.2804878048780488, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.00018551877954036162, |
|
"loss": 0.6394, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.3109756097560976, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 0.00018439627758611385, |
|
"loss": 0.6256, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.3414634146341464, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 0.00018323554302364272, |
|
"loss": 0.6397, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.3719512195121952, |
|
"grad_norm": 0.96875, |
|
"learning_rate": 0.00018203710168075788, |
|
"loss": 0.6138, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.4024390243902438, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 0.0001808014964669293, |
|
"loss": 0.6305, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.4329268292682926, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.00017952928712734268, |
|
"loss": 0.6213, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.4634146341463414, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 0.00017822104998932713, |
|
"loss": 0.6233, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.4939024390243902, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 0.00017687737770127185, |
|
"loss": 0.6489, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.524390243902439, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 0.00017549887896414851, |
|
"loss": 0.6058, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.5548780487804879, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 0.0001740861782557618, |
|
"loss": 0.6164, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.5853658536585367, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.0001726399155478529, |
|
"loss": 0.606, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.6158536585365852, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00017116074601618417, |
|
"loss": 0.6051, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.6463414634146343, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 0.0001696493397437357, |
|
"loss": 0.6798, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.6768292682926829, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 0.00016810638141714934, |
|
"loss": 0.6205, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.7073170731707317, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 0.00016653257001655652, |
|
"loss": 0.6627, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.7378048780487805, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 0.0001649286184989315, |
|
"loss": 0.6152, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.7682926829268293, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 0.0001632952534751122, |
|
"loss": 0.5972, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.798780487804878, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00016163321488063637, |
|
"loss": 0.6053, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.8292682926829267, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00015994325564054122, |
|
"loss": 0.5965, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.8597560975609757, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 0.00015822614132827837, |
|
"loss": 0.6023, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.8902439024390243, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 0.00015648264981889934, |
|
"loss": 0.5916, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.9207317073170733, |
|
"grad_norm": 0.68359375, |
|
"learning_rate": 0.00015471357093666804, |
|
"loss": 0.6082, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.951219512195122, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.00015291970609726007, |
|
"loss": 0.6176, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.9817073170731707, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00015110186794471103, |
|
"loss": 0.6196, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.1435465812683105, |
|
"eval_runtime": 1.0793, |
|
"eval_samples_per_second": 1.853, |
|
"eval_steps_per_second": 0.927, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.0121951219512195, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00014926087998327837, |
|
"loss": 0.5623, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.042682926829268, |
|
"grad_norm": 0.82421875, |
|
"learning_rate": 0.00014739757620438307, |
|
"loss": 0.5231, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.073170731707317, |
|
"grad_norm": 1.25, |
|
"learning_rate": 0.0001455128007088009, |
|
"loss": 0.5611, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.1036585365853657, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00014360740732427367, |
|
"loss": 0.5497, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.1341463414634148, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00014168225921871433, |
|
"loss": 0.5154, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.1646341463414633, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.00013973822850918055, |
|
"loss": 0.5307, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.1951219512195124, |
|
"grad_norm": 0.79296875, |
|
"learning_rate": 0.0001377761958667946, |
|
"loss": 0.5085, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.225609756097561, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 0.00013579705011778766, |
|
"loss": 0.5339, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.2560975609756095, |
|
"grad_norm": 0.7890625, |
|
"learning_rate": 0.00013380168784085027, |
|
"loss": 0.5263, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.2865853658536586, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00013179101296097035, |
|
"loss": 0.5106, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.317073170731707, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.00012976593633994346, |
|
"loss": 0.5371, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.347560975609756, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.0001277273753637408, |
|
"loss": 0.5188, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.3780487804878048, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 0.00012567625352692127, |
|
"loss": 0.5309, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.408536585365854, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 0.0001236135000142765, |
|
"loss": 0.5024, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.4390243902439024, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.00012154004927989815, |
|
"loss": 0.5291, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.4695121951219514, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.00011945684062385803, |
|
"loss": 0.5261, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 0.5218, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.5304878048780486, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00011526492842188745, |
|
"loss": 0.5259, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.5609756097560976, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.0001131581238665465, |
|
"loss": 0.5077, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.591463414634146, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00011104535851045539, |
|
"loss": 0.5356, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.6219512195121952, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.00010892758946371944, |
|
"loss": 0.5158, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.652439024390244, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.00010680577610318072, |
|
"loss": 0.5297, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.682926829268293, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00010468087963780789, |
|
"loss": 0.5015, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.7134146341463414, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00010255386267325602, |
|
"loss": 0.5169, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.7439024390243905, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.00010042568877579388, |
|
"loss": 0.5034, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.774390243902439, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 9.829732203579584e-05, |
|
"loss": 0.5113, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.8048780487804876, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 9.616972663099647e-05, |
|
"loss": 0.5106, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.8353658536585367, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 9.404386638970542e-05, |
|
"loss": 0.5135, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.8658536585365852, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 9.192070435418079e-05, |
|
"loss": 0.5222, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.8963414634146343, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 8.980120234435849e-05, |
|
"loss": 0.5214, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.926829268292683, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 8.768632052213531e-05, |
|
"loss": 0.5408, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.9573170731707314, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 8.557701695640321e-05, |
|
"loss": 0.5012, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.9878048780487805, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 8.347424718903151e-05, |
|
"loss": 0.5124, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.1460928916931152, |
|
"eval_runtime": 1.0779, |
|
"eval_samples_per_second": 1.855, |
|
"eval_steps_per_second": 0.928, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 3.018292682926829, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 8.13789638019942e-05, |
|
"loss": 0.4767, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 3.048780487804878, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 7.929211598583794e-05, |
|
"loss": 0.4404, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.0792682926829267, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 7.721464910968627e-05, |
|
"loss": 0.447, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 3.1097560975609757, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 7.514750429297528e-05, |
|
"loss": 0.4379, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.1402439024390243, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 7.309161797911441e-05, |
|
"loss": 0.4287, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 3.1707317073170733, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 7.104792151126515e-05, |
|
"loss": 0.428, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.201219512195122, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 6.901734071043071e-05, |
|
"loss": 0.4448, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 3.231707317073171, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 6.700079545604708e-05, |
|
"loss": 0.4297, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.2621951219512195, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 6.499919926926566e-05, |
|
"loss": 0.423, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 3.292682926829268, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 6.301345889911637e-05, |
|
"loss": 0.4218, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.323170731707317, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 6.104447391173858e-05, |
|
"loss": 0.4354, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 3.3536585365853657, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 5.909313628286601e-05, |
|
"loss": 0.4233, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.3841463414634148, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 5.716032999375006e-05, |
|
"loss": 0.4634, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 3.4146341463414633, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 5.524693063070492e-05, |
|
"loss": 0.4268, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.4451219512195124, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 5.335380498845559e-05, |
|
"loss": 0.4333, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 3.475609756097561, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 5.148181067746862e-05, |
|
"loss": 0.471, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.5060975609756095, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 4.963179573544357e-05, |
|
"loss": 0.4116, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 3.5365853658536586, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 4.7804598243140666e-05, |
|
"loss": 0.4301, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 3.567073170731707, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 4.60010459447196e-05, |
|
"loss": 0.4359, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 3.597560975609756, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 4.422195587276058e-05, |
|
"loss": 0.4322, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 3.6280487804878048, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 4.2468133978137945e-05, |
|
"loss": 0.4283, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 3.658536585365854, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 4.0740374764914136e-05, |
|
"loss": 0.4297, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.6890243902439024, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 3.903946093041877e-05, |
|
"loss": 0.4386, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 3.7195121951219514, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 3.736616301067694e-05, |
|
"loss": 0.4279, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 3.5721239031346066e-05, |
|
"loss": 0.454, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 3.7804878048780486, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 3.410543416432069e-05, |
|
"loss": 0.4497, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.8109756097560976, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 3.2519480390159806e-05, |
|
"loss": 0.4405, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.841463414634146, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 3.096409616649023e-05, |
|
"loss": 0.4354, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.8719512195121952, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 2.9439986102536043e-05, |
|
"loss": 0.4382, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 3.902439024390244, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 2.794784063992131e-05, |
|
"loss": 0.4204, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.932926829268293, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 2.6488335739891178e-05, |
|
"loss": 0.4199, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 3.9634146341463414, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 2.50621325770927e-05, |
|
"loss": 0.4275, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.9939024390243905, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 2.366987724005404e-05, |
|
"loss": 0.4403, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.206821322441101, |
|
"eval_runtime": 1.0795, |
|
"eval_samples_per_second": 1.853, |
|
"eval_steps_per_second": 0.926, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 4.024390243902439, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 2.2312200438498043e-05, |
|
"loss": 0.3802, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.054878048780488, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 2.0989717217622652e-05, |
|
"loss": 0.3864, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 4.085365853658536, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 1.9703026679477256e-05, |
|
"loss": 0.3842, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.115853658536586, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 1.8452711711561842e-05, |
|
"loss": 0.3832, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 4.146341463414634, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 1.7239338722771327e-05, |
|
"loss": 0.3686, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 4.176829268292683, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 1.6063457386805004e-05, |
|
"loss": 0.3955, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 4.2073170731707314, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 1.4925600393157324e-05, |
|
"loss": 0.3847, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.237804878048781, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 1.3826283205802427e-05, |
|
"loss": 0.3829, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 4.2682926829268295, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 1.2766003829682505e-05, |
|
"loss": 0.3862, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.298780487804878, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 1.1745242585104955e-05, |
|
"loss": 0.3965, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 4.329268292682927, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 1.0764461890151112e-05, |
|
"loss": 0.3807, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 4.359756097560975, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 9.824106051194859e-06, |
|
"loss": 0.3722, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 4.390243902439025, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 8.924601061626048e-06, |
|
"loss": 0.3865, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 4.420731707317073, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 8.066354408870048e-06, |
|
"loss": 0.3827, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 4.451219512195122, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 7.249754889790539e-06, |
|
"loss": 0.3932, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 4.4817073170731705, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 6.475172434559573e-06, |
|
"loss": 0.3762, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 4.512195121951219, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 5.742957939074412e-06, |
|
"loss": 0.3825, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 4.5426829268292686, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 5.0534431059970685e-06, |
|
"loss": 0.3846, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 4.573170731707317, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 4.40694029448877e-06, |
|
"loss": 0.3748, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 4.603658536585366, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 3.803742378707198e-06, |
|
"loss": 0.4205, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 4.634146341463414, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 3.2441226151306404e-06, |
|
"loss": 0.3816, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 4.664634146341464, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 2.7283345187693264e-06, |
|
"loss": 0.3782, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 4.695121951219512, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 2.256611748319792e-06, |
|
"loss": 0.3842, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 4.725609756097561, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 1.8291680003145073e-06, |
|
"loss": 0.4031, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 4.7560975609756095, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 1.4461969123145457e-06, |
|
"loss": 0.3891, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 4.786585365853659, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 1.107871975189234e-06, |
|
"loss": 0.365, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 4.817073170731708, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 8.143464545226298e-07, |
|
"loss": 0.3849, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 4.847560975609756, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 5.657533211820942e-07, |
|
"loss": 0.3848, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 4.878048780487805, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 3.622051910808666e-07, |
|
"loss": 0.3739, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 4.908536585365853, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 2.037942741615617e-07, |
|
"loss": 0.392, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 4.939024390243903, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 9.059233262386225e-08, |
|
"loss": 0.391, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 4.969512195121951, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 2.2650648415334376e-08, |
|
"loss": 0.3862, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.0, |
|
"loss": 0.4072, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.248147964477539, |
|
"eval_runtime": 1.0787, |
|
"eval_samples_per_second": 1.854, |
|
"eval_steps_per_second": 0.927, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 820, |
|
"total_flos": 6.28853392705323e+17, |
|
"train_loss": 1.6822980254161648, |
|
"train_runtime": 5646.8844, |
|
"train_samples_per_second": 1.159, |
|
"train_steps_per_second": 0.145 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 820, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 100, |
|
"total_flos": 6.28853392705323e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|