|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 9.989373007438894, |
|
"eval_steps": 500, |
|
"global_step": 4700, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0021253985122210413, |
|
"grad_norm": 512.0, |
|
"learning_rate": 4.2553191489361704e-07, |
|
"loss": 23.7791, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.010626992561105207, |
|
"grad_norm": 488.0, |
|
"learning_rate": 2.1276595744680853e-06, |
|
"loss": 23.5159, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.021253985122210415, |
|
"grad_norm": 244.0, |
|
"learning_rate": 4.255319148936171e-06, |
|
"loss": 20.5987, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.031880977683315624, |
|
"grad_norm": 83.5, |
|
"learning_rate": 6.3829787234042555e-06, |
|
"loss": 17.7875, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04250797024442083, |
|
"grad_norm": 45.25, |
|
"learning_rate": 8.510638297872341e-06, |
|
"loss": 15.0704, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.053134962805526036, |
|
"grad_norm": 19.125, |
|
"learning_rate": 1.0638297872340426e-05, |
|
"loss": 14.1038, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06376195536663125, |
|
"grad_norm": 9.5, |
|
"learning_rate": 1.2765957446808511e-05, |
|
"loss": 13.5893, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07438894792773645, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 1.4893617021276596e-05, |
|
"loss": 12.8509, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08501594048884166, |
|
"grad_norm": 6.34375, |
|
"learning_rate": 1.7021276595744682e-05, |
|
"loss": 12.4395, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09564293304994687, |
|
"grad_norm": 8.875, |
|
"learning_rate": 1.9148936170212766e-05, |
|
"loss": 12.1862, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.10626992561105207, |
|
"grad_norm": 14.5625, |
|
"learning_rate": 2.1276595744680852e-05, |
|
"loss": 11.1895, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11689691817215728, |
|
"grad_norm": 29.5, |
|
"learning_rate": 2.340425531914894e-05, |
|
"loss": 9.9832, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1275239107332625, |
|
"grad_norm": 46.75, |
|
"learning_rate": 2.5531914893617022e-05, |
|
"loss": 6.8574, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1381509032943677, |
|
"grad_norm": 9.8125, |
|
"learning_rate": 2.765957446808511e-05, |
|
"loss": 2.6513, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1487778958554729, |
|
"grad_norm": 2.9375, |
|
"learning_rate": 2.9787234042553192e-05, |
|
"loss": 1.4605, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1594048884165781, |
|
"grad_norm": 3.453125, |
|
"learning_rate": 3.191489361702128e-05, |
|
"loss": 1.1919, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.17003188097768332, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 3.4042553191489365e-05, |
|
"loss": 1.0938, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1806588735387885, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 3.617021276595745e-05, |
|
"loss": 0.9763, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.19128586609989373, |
|
"grad_norm": 4.96875, |
|
"learning_rate": 3.829787234042553e-05, |
|
"loss": 0.9307, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.20191285866099895, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 4.0425531914893614e-05, |
|
"loss": 0.866, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.21253985122210414, |
|
"grad_norm": 4.40625, |
|
"learning_rate": 4.2553191489361704e-05, |
|
"loss": 0.8456, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22316684378320936, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 4.468085106382979e-05, |
|
"loss": 0.821, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.23379383634431455, |
|
"grad_norm": 2.75, |
|
"learning_rate": 4.680851063829788e-05, |
|
"loss": 0.7823, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.24442082890541977, |
|
"grad_norm": 2.953125, |
|
"learning_rate": 4.893617021276596e-05, |
|
"loss": 0.7831, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.255047821466525, |
|
"grad_norm": 3.90625, |
|
"learning_rate": 5.1063829787234044e-05, |
|
"loss": 0.7535, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.26567481402763016, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 5.319148936170213e-05, |
|
"loss": 0.714, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2763018065887354, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 5.531914893617022e-05, |
|
"loss": 0.7084, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2869287991498406, |
|
"grad_norm": 4.15625, |
|
"learning_rate": 5.744680851063831e-05, |
|
"loss": 0.7074, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2975557917109458, |
|
"grad_norm": 3.203125, |
|
"learning_rate": 5.9574468085106384e-05, |
|
"loss": 0.6913, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.30818278427205104, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 6.170212765957447e-05, |
|
"loss": 0.6714, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3188097768331562, |
|
"grad_norm": 29.25, |
|
"learning_rate": 6.382978723404256e-05, |
|
"loss": 0.6761, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3294367693942614, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 6.595744680851063e-05, |
|
"loss": 0.6739, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.34006376195536664, |
|
"grad_norm": 3.921875, |
|
"learning_rate": 6.808510638297873e-05, |
|
"loss": 0.6572, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.35069075451647186, |
|
"grad_norm": 8.25, |
|
"learning_rate": 7.021276595744681e-05, |
|
"loss": 0.644, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.361317747077577, |
|
"grad_norm": 11.125, |
|
"learning_rate": 7.23404255319149e-05, |
|
"loss": 0.6699, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.37194473963868224, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 7.446808510638298e-05, |
|
"loss": 0.6613, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.38257173219978746, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 7.659574468085106e-05, |
|
"loss": 0.6376, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3931987247608927, |
|
"grad_norm": 3.796875, |
|
"learning_rate": 7.872340425531916e-05, |
|
"loss": 0.6444, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.4038257173219979, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 8.085106382978723e-05, |
|
"loss": 0.6349, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.41445270988310307, |
|
"grad_norm": 3.53125, |
|
"learning_rate": 8.297872340425533e-05, |
|
"loss": 0.644, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.4250797024442083, |
|
"grad_norm": 1.9921875, |
|
"learning_rate": 8.510638297872341e-05, |
|
"loss": 0.6224, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4357066950053135, |
|
"grad_norm": 3.1875, |
|
"learning_rate": 8.723404255319149e-05, |
|
"loss": 0.6213, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.4463336875664187, |
|
"grad_norm": 6.375, |
|
"learning_rate": 8.936170212765958e-05, |
|
"loss": 0.6095, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4569606801275239, |
|
"grad_norm": 4.28125, |
|
"learning_rate": 9.148936170212766e-05, |
|
"loss": 0.6226, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.4675876726886291, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 9.361702127659576e-05, |
|
"loss": 0.6042, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4782146652497343, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 9.574468085106384e-05, |
|
"loss": 0.6009, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.48884165781083955, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 9.787234042553192e-05, |
|
"loss": 0.5855, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.49946865037194477, |
|
"grad_norm": 1.8671875, |
|
"learning_rate": 0.0001, |
|
"loss": 0.5729, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.51009564293305, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 0.00010212765957446809, |
|
"loss": 0.5751, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5207226354941552, |
|
"grad_norm": 13.5625, |
|
"learning_rate": 0.00010425531914893618, |
|
"loss": 0.5832, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5313496280552603, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 0.00010638297872340425, |
|
"loss": 0.5858, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5419766206163655, |
|
"grad_norm": 14.5, |
|
"learning_rate": 0.00010851063829787234, |
|
"loss": 0.5908, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5526036131774708, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 0.00011063829787234043, |
|
"loss": 0.5924, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.563230605738576, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 0.00011276595744680852, |
|
"loss": 0.5704, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5738575982996812, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 0.00011489361702127661, |
|
"loss": 0.5695, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5844845908607864, |
|
"grad_norm": 2.25, |
|
"learning_rate": 0.00011702127659574468, |
|
"loss": 0.5654, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5951115834218916, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 0.00011914893617021277, |
|
"loss": 0.5816, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6057385759829969, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 0.00012127659574468086, |
|
"loss": 0.5627, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6163655685441021, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 0.00012340425531914893, |
|
"loss": 0.5557, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6269925611052072, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 0.00012553191489361702, |
|
"loss": 0.5571, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6376195536663124, |
|
"grad_norm": 4.46875, |
|
"learning_rate": 0.00012765957446808513, |
|
"loss": 0.544, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6482465462274176, |
|
"grad_norm": 3.421875, |
|
"learning_rate": 0.00012978723404255318, |
|
"loss": 0.5593, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6588735387885228, |
|
"grad_norm": 3.5625, |
|
"learning_rate": 0.00013191489361702127, |
|
"loss": 0.5513, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6695005313496281, |
|
"grad_norm": 9.0, |
|
"learning_rate": 0.00013404255319148938, |
|
"loss": 0.5681, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6801275239107333, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 0.00013617021276595746, |
|
"loss": 0.554, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6907545164718385, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 0.00013829787234042554, |
|
"loss": 0.5383, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.7013815090329437, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 0.00014042553191489363, |
|
"loss": 0.5381, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7120085015940489, |
|
"grad_norm": 1.125, |
|
"learning_rate": 0.0001425531914893617, |
|
"loss": 0.5544, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.722635494155154, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 0.0001446808510638298, |
|
"loss": 0.539, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7332624867162593, |
|
"grad_norm": 2.890625, |
|
"learning_rate": 0.00014680851063829788, |
|
"loss": 0.542, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.7438894792773645, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 0.00014893617021276596, |
|
"loss": 0.531, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7545164718384697, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 0.00015106382978723407, |
|
"loss": 0.5358, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.7651434643995749, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 0.00015319148936170213, |
|
"loss": 0.5366, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7757704569606801, |
|
"grad_norm": 2.71875, |
|
"learning_rate": 0.0001553191489361702, |
|
"loss": 0.5359, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.7863974495217854, |
|
"grad_norm": 1.8984375, |
|
"learning_rate": 0.00015744680851063832, |
|
"loss": 0.5506, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7970244420828906, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 0.00015957446808510637, |
|
"loss": 0.5286, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.8076514346439958, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 0.00016170212765957446, |
|
"loss": 0.5319, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8182784272051009, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00016382978723404257, |
|
"loss": 0.5312, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.8289054197662061, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 0.00016595744680851065, |
|
"loss": 0.5284, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8395324123273114, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 0.00016808510638297873, |
|
"loss": 0.5448, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.8501594048884166, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 0.00017021276595744682, |
|
"loss": 0.5292, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8607863974495218, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 0.0001723404255319149, |
|
"loss": 0.5246, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.871413390010627, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 0.00017446808510638298, |
|
"loss": 0.5353, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8820403825717322, |
|
"grad_norm": 21.0, |
|
"learning_rate": 0.00017659574468085107, |
|
"loss": 0.5399, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.8926673751328374, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 0.00017872340425531915, |
|
"loss": 0.5405, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.9032943676939427, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00018085106382978726, |
|
"loss": 0.5233, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.9139213602550478, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 0.00018297872340425532, |
|
"loss": 0.5225, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.924548352816153, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 0.0001851063829787234, |
|
"loss": 0.532, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.9351753453772582, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 0.0001872340425531915, |
|
"loss": 0.5353, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9458023379383634, |
|
"grad_norm": 1.8671875, |
|
"learning_rate": 0.00018936170212765957, |
|
"loss": 0.5365, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.9564293304994687, |
|
"grad_norm": 0.74609375, |
|
"learning_rate": 0.00019148936170212768, |
|
"loss": 0.5395, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9670563230605739, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 0.00019361702127659576, |
|
"loss": 0.5309, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.9776833156216791, |
|
"grad_norm": 4.96875, |
|
"learning_rate": 0.00019574468085106384, |
|
"loss": 0.5281, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9883103081827843, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 0.00019787234042553193, |
|
"loss": 0.5182, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.9989373007438895, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5262, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9989373007438895, |
|
"eval_loss": 1.3223822116851807, |
|
"eval_runtime": 0.8107, |
|
"eval_samples_per_second": 6.167, |
|
"eval_steps_per_second": 1.233, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.0095642933049946, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 0.00019999931050912237, |
|
"loss": 0.5078, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.0201912858661, |
|
"grad_norm": 1.125, |
|
"learning_rate": 0.00019999724204599747, |
|
"loss": 0.4809, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.030818278427205, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 0.00019999379463914898, |
|
"loss": 0.4983, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.0414452709883104, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 0.00019998896833611603, |
|
"loss": 0.4957, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.0520722635494155, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 0.00019998276320345247, |
|
"loss": 0.4901, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.0626992561105206, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 0.0001999751793267259, |
|
"loss": 0.4931, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.073326248671626, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 0.00019996621681051669, |
|
"loss": 0.4916, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.083953241232731, |
|
"grad_norm": 2.75, |
|
"learning_rate": 0.0001999558757784162, |
|
"loss": 0.501, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.0945802337938364, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 0.00019994415637302547, |
|
"loss": 0.5011, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.1052072263549415, |
|
"grad_norm": 2.625, |
|
"learning_rate": 0.0001999310587559529, |
|
"loss": 0.4966, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.1158342189160468, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 0.00019991658310781224, |
|
"loss": 0.5037, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.126461211477152, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 0.00019990072962822007, |
|
"loss": 0.5134, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.1370882040382573, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00019988349853579295, |
|
"loss": 0.4997, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.1477151965993624, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 0.00019986489006814452, |
|
"loss": 0.4965, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.1583421891604675, |
|
"grad_norm": 7.1875, |
|
"learning_rate": 0.00019984490448188218, |
|
"loss": 0.4843, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.1689691817215728, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 0.00019982354205260347, |
|
"loss": 0.5033, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.179596174282678, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.0001998008030748924, |
|
"loss": 0.4976, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.1902231668437833, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00019977668786231534, |
|
"loss": 0.497, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.2008501594048884, |
|
"grad_norm": 0.79296875, |
|
"learning_rate": 0.00019975119674741664, |
|
"loss": 0.4973, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.2114771519659937, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 0.00019972433008171416, |
|
"loss": 0.5, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.2221041445270988, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 0.00019969608823569433, |
|
"loss": 0.4925, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.2327311370882041, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 0.00019966647159880703, |
|
"loss": 0.4848, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.2433581296493093, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 0.00019963548057946024, |
|
"loss": 0.5033, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.2539851222104144, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00019960311560501454, |
|
"loss": 0.4925, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.2646121147715197, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 0.000199569377121777, |
|
"loss": 0.487, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.2752391073326248, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.0001995342655949951, |
|
"loss": 0.497, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.2858660998937301, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00019949778150885042, |
|
"loss": 0.4877, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.2964930924548352, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.00019945992536645187, |
|
"loss": 0.4942, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.3071200850159406, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00019942069768982872, |
|
"loss": 0.4881, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.3177470775770457, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.0001993800990199235, |
|
"loss": 0.4959, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.328374070138151, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 0.0001993381299165844, |
|
"loss": 0.4883, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.3390010626992561, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0001992947909585578, |
|
"loss": 0.4861, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.3496280552603612, |
|
"grad_norm": 0.74609375, |
|
"learning_rate": 0.00019925008274347995, |
|
"loss": 0.4868, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.3602550478214666, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 0.000199204005887869, |
|
"loss": 0.4979, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.3708820403825717, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 0.00019915656102711634, |
|
"loss": 0.4885, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.381509032943677, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.000199107748815478, |
|
"loss": 0.4928, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.392136025504782, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 0.00019905756992606548, |
|
"loss": 0.4879, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.4027630180658874, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 0.00019900602505083648, |
|
"loss": 0.4854, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.4133900106269925, |
|
"grad_norm": 5.0, |
|
"learning_rate": 0.00019895311490058542, |
|
"loss": 0.4932, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.4240170031880979, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.0001988988402049336, |
|
"loss": 0.4911, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.434643995749203, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00019884320171231925, |
|
"loss": 0.4967, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.445270988310308, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 0.00019878620018998696, |
|
"loss": 0.4893, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.4558979808714134, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 0.00019872783642397733, |
|
"loss": 0.4849, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.4665249734325185, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.00019866811121911607, |
|
"loss": 0.485, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.4771519659936239, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 0.00019860702539900287, |
|
"loss": 0.4993, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.487778958554729, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 0.000198544579806, |
|
"loss": 0.4858, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.4984059511158343, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 0.00019848077530122083, |
|
"loss": 0.4781, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.5090329436769394, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 0.0001984156127645178, |
|
"loss": 0.4771, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.5196599362380447, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 0.00019834909309447045, |
|
"loss": 0.4913, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.5302869287991498, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.00019828121720837286, |
|
"loss": 0.4957, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.540913921360255, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 0.00019821198604222113, |
|
"loss": 0.4829, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.5515409139213603, |
|
"grad_norm": 1.9140625, |
|
"learning_rate": 0.00019814140055070042, |
|
"loss": 0.4901, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.5621679064824656, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 0.00019806946170717175, |
|
"loss": 0.4813, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.5727948990435707, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.0001979961705036587, |
|
"loss": 0.472, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.5834218916046758, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00019792152795083351, |
|
"loss": 0.4895, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.594048884165781, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 0.00019784553507800349, |
|
"loss": 0.4911, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.6046758767268863, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 0.00019776819293309633, |
|
"loss": 0.4801, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.6153028692879916, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 0.00019768950258264623, |
|
"loss": 0.488, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.6259298618490967, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 0.00019760946511177872, |
|
"loss": 0.4795, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.6365568544102018, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 0.0001975280816241959, |
|
"loss": 0.4797, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.6471838469713072, |
|
"grad_norm": 5.125, |
|
"learning_rate": 0.00019744535324216127, |
|
"loss": 0.4913, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.6578108395324125, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 0.00019736128110648407, |
|
"loss": 0.4751, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.6684378320935176, |
|
"grad_norm": 6.5, |
|
"learning_rate": 0.00019727586637650373, |
|
"loss": 0.4956, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.6790648246546227, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 0.0001971891102300738, |
|
"loss": 0.4918, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.6896918172157278, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.0001971010138635457, |
|
"loss": 0.4869, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.7003188097768331, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.00019701157849175228, |
|
"loss": 0.4823, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.7109458023379385, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 0.00019692080534799096, |
|
"loss": 0.4922, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.7215727948990436, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 0.00019682869568400684, |
|
"loss": 0.4918, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.7321997874601487, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.0001967352507699754, |
|
"loss": 0.4834, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.742826780021254, |
|
"grad_norm": 6.21875, |
|
"learning_rate": 0.00019664047189448493, |
|
"loss": 0.4807, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.7534537725823593, |
|
"grad_norm": 0.78515625, |
|
"learning_rate": 0.0001965443603645189, |
|
"loss": 0.486, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.7640807651434645, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 0.00019644691750543767, |
|
"loss": 0.5035, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.7747077577045696, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 0.00019634814466096056, |
|
"loss": 0.4976, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.7853347502656747, |
|
"grad_norm": 0.78515625, |
|
"learning_rate": 0.00019624804319314705, |
|
"loss": 0.5016, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.79596174282678, |
|
"grad_norm": 1.125, |
|
"learning_rate": 0.0001961466144823781, |
|
"loss": 0.4952, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.8065887353878853, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00019604385992733715, |
|
"loss": 0.4849, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.8172157279489904, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 0.00019593978094499076, |
|
"loss": 0.4792, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.8278427205100956, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.00019583437897056915, |
|
"loss": 0.4858, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.8384697130712009, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 0.00019572765545754626, |
|
"loss": 0.4905, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.8490967056323062, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 0.00019561961187761985, |
|
"loss": 0.4786, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.8597236981934113, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00019551024972069126, |
|
"loss": 0.4694, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.8703506907545164, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 0.00019539957049484458, |
|
"loss": 0.474, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.8809776833156215, |
|
"grad_norm": 1.5, |
|
"learning_rate": 0.00019528757572632622, |
|
"loss": 0.4803, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.8916046758767269, |
|
"grad_norm": 0.890625, |
|
"learning_rate": 0.00019517426695952358, |
|
"loss": 0.4798, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.9022316684378322, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00019505964575694385, |
|
"loss": 0.4779, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.9128586609989373, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 0.0001949437136991925, |
|
"loss": 0.4796, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.9234856535600424, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 0.00019482647238495152, |
|
"loss": 0.4808, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.9341126461211477, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 0.00019470792343095718, |
|
"loss": 0.4837, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.944739638682253, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.000194588068471978, |
|
"loss": 0.4861, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.9553666312433582, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.0001944669091607919, |
|
"loss": 0.4632, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.9659936238044633, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 0.00019434444716816374, |
|
"loss": 0.4767, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.9766206163655684, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.00019422068418282202, |
|
"loss": 0.465, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.9872476089266737, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 0.00019409562191143577, |
|
"loss": 0.4613, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.997874601487779, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.00019396926207859084, |
|
"loss": 0.4826, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.3435465097427368, |
|
"eval_runtime": 0.7971, |
|
"eval_samples_per_second": 6.273, |
|
"eval_steps_per_second": 1.255, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 2.008501594048884, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 0.00019384160642676635, |
|
"loss": 0.4356, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.0191285866099893, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.00019371265671631037, |
|
"loss": 0.4104, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.0297555791710944, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.0001935824147254159, |
|
"loss": 0.4108, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.0403825717322, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.00019345088225009626, |
|
"loss": 0.4187, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.051009564293305, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.00019331806110416027, |
|
"loss": 0.4235, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.06163655685441, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.0001931839531191873, |
|
"loss": 0.4266, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.0722635494155153, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 0.000193048560144502, |
|
"loss": 0.4157, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.082890541976621, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 0.00019291188404714878, |
|
"loss": 0.4255, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.093517534537726, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 0.00019277392671186608, |
|
"loss": 0.4309, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.104144527098831, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 0.0001926346900410604, |
|
"loss": 0.4288, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.114771519659936, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 0.00019249417595478002, |
|
"loss": 0.4208, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.1253985122210413, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00019235238639068856, |
|
"loss": 0.4249, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.136025504782147, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 0.00019220932330403823, |
|
"loss": 0.427, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.146652497343252, |
|
"grad_norm": 0.6875, |
|
"learning_rate": 0.00019206498866764288, |
|
"loss": 0.4276, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.157279489904357, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00019191938447185084, |
|
"loss": 0.4201, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.167906482465462, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 0.0001917725127245174, |
|
"loss": 0.4299, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.1785334750265677, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00019162437545097719, |
|
"loss": 0.4268, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.189160467587673, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 0.0001914749746940161, |
|
"loss": 0.4241, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.199787460148778, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 0.00019132431251384335, |
|
"loss": 0.4297, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.210414452709883, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00019117239098806295, |
|
"loss": 0.4275, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.221041445270988, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.000191019212211645, |
|
"loss": 0.4293, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.2316684378320937, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00019086477829689685, |
|
"loss": 0.421, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.242295430393199, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.00019070909137343408, |
|
"loss": 0.4312, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.252922422954304, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.0001905521535881509, |
|
"loss": 0.4299, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.263549415515409, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00019039396710519076, |
|
"loss": 0.4291, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.2741764080765146, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.00019023453410591635, |
|
"loss": 0.4236, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.2848034006376197, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00019007385678887975, |
|
"loss": 0.4201, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.2954303931987248, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.00018991193736979175, |
|
"loss": 0.4257, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.30605738575983, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.0001897487780814916, |
|
"loss": 0.4219, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.316684378320935, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00018958438117391618, |
|
"loss": 0.4212, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.3273113708820405, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.00018941874891406882, |
|
"loss": 0.4351, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.3379383634431457, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.00018925188358598813, |
|
"loss": 0.4141, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.3485653560042508, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 0.0001890837874907166, |
|
"loss": 0.4243, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.359192348565356, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 0.00018891446294626866, |
|
"loss": 0.4408, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.369819341126461, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00018874391228759893, |
|
"loss": 0.4277, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.3804463336875665, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00018857213786656985, |
|
"loss": 0.4279, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.3910733262486716, |
|
"grad_norm": 0.77734375, |
|
"learning_rate": 0.00018839914205191936, |
|
"loss": 0.4259, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.4017003188097767, |
|
"grad_norm": 0.79296875, |
|
"learning_rate": 0.0001882249272292282, |
|
"loss": 0.4287, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.412327311370882, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 0.00018804949580088692, |
|
"loss": 0.4394, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.4229543039319874, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.00018787285018606297, |
|
"loss": 0.4268, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.4335812964930925, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.00018769499282066717, |
|
"loss": 0.4271, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.4442082890541976, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 0.00018751592615732005, |
|
"loss": 0.4338, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.4548352816153027, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 0.0001873356526653183, |
|
"loss": 0.4306, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.4654622741764083, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 0.0001871541748306005, |
|
"loss": 0.4374, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.4760892667375134, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.00018697149515571284, |
|
"loss": 0.429, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.4867162592986185, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 0.00018678761615977468, |
|
"loss": 0.4351, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.4973432518597236, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.00018660254037844388, |
|
"loss": 0.4264, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.5079702444208287, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.00018641627036388169, |
|
"loss": 0.431, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.5185972369819343, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00018622880868471756, |
|
"loss": 0.4265, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.5292242295430394, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.00018604015792601396, |
|
"loss": 0.4259, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.5398512221041445, |
|
"grad_norm": 7.6875, |
|
"learning_rate": 0.00018585032068923032, |
|
"loss": 0.4309, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.5504782146652496, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.00018565929959218758, |
|
"loss": 0.4254, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.5611052072263547, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00018546709726903178, |
|
"loss": 0.4253, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.5717321997874603, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 0.0001852737163701979, |
|
"loss": 0.4403, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.5823591923485654, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 0.00018507915956237326, |
|
"loss": 0.4326, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.5929861849096705, |
|
"grad_norm": 2.25, |
|
"learning_rate": 0.00018488342952846073, |
|
"loss": 0.437, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.603613177470776, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 0.00018468652896754177, |
|
"loss": 0.4344, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.614240170031881, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 0.0001844884605948392, |
|
"loss": 0.4387, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.6248671625929862, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 0.0001842892271416797, |
|
"loss": 0.4371, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.6354941551540914, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00018408883135545632, |
|
"loss": 0.4345, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.6461211477151965, |
|
"grad_norm": 0.8515625, |
|
"learning_rate": 0.00018388727599959033, |
|
"loss": 0.4405, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.656748140276302, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 0.00018368456385349334, |
|
"loss": 0.4369, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.667375132837407, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.0001834806977125288, |
|
"loss": 0.4354, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.6780021253985122, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.0001832756803879737, |
|
"loss": 0.4297, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.6886291179596173, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.00018306951470697946, |
|
"loss": 0.4268, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.6992561105207225, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.0001828622035125332, |
|
"loss": 0.4299, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.709883103081828, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 0.0001826537496634186, |
|
"loss": 0.4362, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.720510095642933, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 0.00018244415603417603, |
|
"loss": 0.4364, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.731137088204038, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 0.0001822334255150635, |
|
"loss": 0.439, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.7417640807651433, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 0.00018202156101201645, |
|
"loss": 0.4342, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.7523910733262484, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.0001818085654466076, |
|
"loss": 0.4308, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.763018065887354, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.00018159444175600703, |
|
"loss": 0.4306, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.773645058448459, |
|
"grad_norm": 1.125, |
|
"learning_rate": 0.00018137919289294135, |
|
"loss": 0.4342, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.784272051009564, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00018116282182565311, |
|
"loss": 0.428, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.7948990435706698, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00018094533153785984, |
|
"loss": 0.4221, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.805526036131775, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 0.00018072672502871296, |
|
"loss": 0.433, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.81615302869288, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.0001805070053127563, |
|
"loss": 0.4306, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.826780021253985, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.00018028617541988472, |
|
"loss": 0.4217, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.83740701381509, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.00018006423839530205, |
|
"loss": 0.426, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.8480340063761957, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 0.00017984119729947944, |
|
"loss": 0.4402, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.858660998937301, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00017961705520811293, |
|
"loss": 0.4306, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.869287991498406, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.000179391815212081, |
|
"loss": 0.434, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.879914984059511, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 0.00017916548041740213, |
|
"loss": 0.4295, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 2.890541976620616, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 0.0001789380539451919, |
|
"loss": 0.4373, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.9011689691817217, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.00017870953893161975, |
|
"loss": 0.4287, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.911795961742827, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.0001784799385278661, |
|
"loss": 0.4358, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.922422954303932, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.00017824925590007864, |
|
"loss": 0.4304, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.933049946865037, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 0.0001780174942293287, |
|
"loss": 0.4296, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.943676939426142, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 0.00017778465671156743, |
|
"loss": 0.4326, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 2.9543039319872477, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.00017755074655758174, |
|
"loss": 0.4348, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.964930924548353, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 0.0001773157669929499, |
|
"loss": 0.4345, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.975557917109458, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00017707972125799735, |
|
"loss": 0.4259, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.9861849096705635, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00017684261260775163, |
|
"loss": 0.4306, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 2.9968119022316686, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 0.0001766044443118978, |
|
"loss": 0.4369, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.9989373007438895, |
|
"eval_loss": 1.4787013530731201, |
|
"eval_runtime": 0.8123, |
|
"eval_samples_per_second": 6.155, |
|
"eval_steps_per_second": 1.231, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 3.0074388947927737, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.00017636521965473323, |
|
"loss": 0.3835, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 3.018065887353879, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.0001761249419351222, |
|
"loss": 0.3594, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.028692879914984, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.00017588361446645073, |
|
"loss": 0.3596, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 3.0393198724760895, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00017564124057658056, |
|
"loss": 0.3504, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.0499468650371946, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.00017539782360780334, |
|
"loss": 0.3575, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 3.0605738575982997, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.00017515336691679477, |
|
"loss": 0.3619, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.071200850159405, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.0001749078738745679, |
|
"loss": 0.3678, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 3.08182784272051, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.0001746613478664271, |
|
"loss": 0.3551, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.0924548352816155, |
|
"grad_norm": 0.75, |
|
"learning_rate": 0.00017441379229192098, |
|
"loss": 0.3668, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 3.1030818278427206, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.00017416521056479577, |
|
"loss": 0.3664, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.1137088204038257, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 0.0001739156061129481, |
|
"loss": 0.3709, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 3.124335812964931, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 0.0001736649823783779, |
|
"loss": 0.3612, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.134962805526036, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 0.00017341334281714064, |
|
"loss": 0.3619, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 3.1455897980871415, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 0.00017316069089930007, |
|
"loss": 0.37, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.1562167906482466, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00017290703010887994, |
|
"loss": 0.3717, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 3.1668437832093517, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00017265236394381633, |
|
"loss": 0.3749, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.177470775770457, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00017239669591590916, |
|
"loss": 0.3754, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 3.1880977683315623, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00017214002955077393, |
|
"loss": 0.3726, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.1987247608926674, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00017188236838779295, |
|
"loss": 0.3726, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 3.2093517534537725, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 0.00017162371598006666, |
|
"loss": 0.3798, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.2199787460148777, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 0.00017136407589436457, |
|
"loss": 0.3767, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 3.230605738575983, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.0001711034517110761, |
|
"loss": 0.3763, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.2412327311370883, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00017084184702416115, |
|
"loss": 0.3703, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 3.2518597236981934, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 0.0001705792654411007, |
|
"loss": 0.378, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.2624867162592985, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.00017031571058284678, |
|
"loss": 0.3818, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 3.2731137088204036, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00017005118608377288, |
|
"loss": 0.3709, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.283740701381509, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.00016978569559162357, |
|
"loss": 0.3761, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 3.2943676939426143, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.00016951924276746425, |
|
"loss": 0.3786, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.3049946865037194, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.00016925183128563078, |
|
"loss": 0.3661, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 3.3156216790648245, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00016898346483367867, |
|
"loss": 0.3721, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.3262486716259296, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.0001687141471123324, |
|
"loss": 0.3752, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 3.336875664187035, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 0.00016844388183543418, |
|
"loss": 0.3784, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.3475026567481403, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 0.00016817267272989286, |
|
"loss": 0.3807, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 3.3581296493092454, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00016790052353563253, |
|
"loss": 0.3887, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.3687566418703505, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 0.0001676274380055409, |
|
"loss": 0.3831, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 3.379383634431456, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.00016735341990541764, |
|
"loss": 0.3772, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 3.390010626992561, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.00016707847301392236, |
|
"loss": 0.3799, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 3.4006376195536663, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.0001668026011225225, |
|
"loss": 0.3854, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.4112646121147714, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00016652580803544112, |
|
"loss": 0.3725, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 3.421891604675877, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00016624809756960444, |
|
"loss": 0.3785, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.432518597236982, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00016596947355458904, |
|
"loss": 0.3803, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 3.443145589798087, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 0.0001656899398325693, |
|
"loss": 0.3893, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 3.4537725823591923, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00016540950025826422, |
|
"loss": 0.3776, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 3.4643995749202974, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.0001651281586988844, |
|
"loss": 0.3802, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 3.475026567481403, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.00016484591903407857, |
|
"loss": 0.3765, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 3.485653560042508, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00016456278515588024, |
|
"loss": 0.3843, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 3.496280552603613, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.00016427876096865394, |
|
"loss": 0.3748, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 3.5069075451647183, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00016399385038904138, |
|
"loss": 0.3785, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.5175345377258234, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.00016370805734590747, |
|
"loss": 0.3879, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 3.528161530286929, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00016342138578028613, |
|
"loss": 0.382, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.538788522848034, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00016313383964532596, |
|
"loss": 0.3915, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 3.549415515409139, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00016284542290623567, |
|
"loss": 0.3767, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.5600425079702447, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.0001625561395402295, |
|
"loss": 0.373, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 3.57066950053135, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.00016226599353647228, |
|
"loss": 0.3811, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.581296493092455, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.00016197498889602448, |
|
"loss": 0.3842, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 3.59192348565356, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.00016168312963178697, |
|
"loss": 0.3864, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.602550478214665, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 0.00016139041976844583, |
|
"loss": 0.3835, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 3.6131774707757707, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.00016109686334241655, |
|
"loss": 0.3801, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.623804463336876, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00016080246440178874, |
|
"loss": 0.382, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 3.634431455897981, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 0.00016050722700627012, |
|
"loss": 0.3923, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.645058448459086, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 0.00016021115522713047, |
|
"loss": 0.3836, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 3.655685441020191, |
|
"grad_norm": 0.77734375, |
|
"learning_rate": 0.0001599142531471456, |
|
"loss": 0.3801, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.6663124335812967, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 0.00015961652486054103, |
|
"loss": 0.3871, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 3.6769394261424018, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00015931797447293552, |
|
"loss": 0.3843, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.687566418703507, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.00015901860610128448, |
|
"loss": 0.3846, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 3.698193411264612, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00015871842387382305, |
|
"loss": 0.3788, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.708820403825717, |
|
"grad_norm": 0.66015625, |
|
"learning_rate": 0.00015841743193000944, |
|
"loss": 0.3899, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 3.7194473963868226, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00015811563442046767, |
|
"loss": 0.3863, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.7300743889479278, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00015781303550693024, |
|
"loss": 0.3822, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 3.740701381509033, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00015750963936218105, |
|
"loss": 0.3821, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.7513283740701384, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 0.00015720545016999752, |
|
"loss": 0.3809, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 3.761955366631243, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00015690047212509316, |
|
"loss": 0.3809, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.7725823591923486, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00015659470943305955, |
|
"loss": 0.3815, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 3.7832093517534537, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.00015628816631030836, |
|
"loss": 0.3808, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 3.793836344314559, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.00015598084698401342, |
|
"loss": 0.38, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 3.8044633368756644, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00015567275569205218, |
|
"loss": 0.3851, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 3.8150903294367695, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 0.00015536389668294724, |
|
"loss": 0.3917, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 3.8257173219978746, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.00015505427421580808, |
|
"loss": 0.3799, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.8363443145589797, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.000154743892560272, |
|
"loss": 0.3846, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 3.846971307120085, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.00015443275599644538, |
|
"loss": 0.3865, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 3.8575982996811904, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.0001541208688148447, |
|
"loss": 0.3802, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 3.8682252922422955, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.00015380823531633729, |
|
"loss": 0.3823, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 3.8788522848034006, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 0.00015349485981208202, |
|
"loss": 0.3866, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 3.8894792773645057, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00015318074662346994, |
|
"loss": 0.3758, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 3.900106269925611, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.00015286590008206465, |
|
"loss": 0.3858, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 3.9107332624867164, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 0.00015255032452954245, |
|
"loss": 0.3867, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 3.9213602550478215, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00015223402431763269, |
|
"loss": 0.3893, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 3.9319872476089266, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00015191700380805752, |
|
"loss": 0.3791, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.942614240170032, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.00015159926737247202, |
|
"loss": 0.3932, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 3.953241232731137, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 0.00015128081939240357, |
|
"loss": 0.3794, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 3.9638682252922424, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.00015096166425919175, |
|
"loss": 0.3857, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 3.9744952178533475, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.00015064180637392764, |
|
"loss": 0.3887, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 3.9851222104144526, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.0001503212501473931, |
|
"loss": 0.3886, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 3.995749202975558, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 0.3819, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.7431671619415283, |
|
"eval_runtime": 0.7941, |
|
"eval_samples_per_second": 6.296, |
|
"eval_steps_per_second": 1.259, |
|
"step": 1882 |
|
}, |
|
{ |
|
"epoch": 4.006376195536663, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.0001496780603617293, |
|
"loss": 0.3464, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 4.017003188097768, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.00014935543567206984, |
|
"loss": 0.3125, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 4.027630180658874, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00014903213037995724, |
|
"loss": 0.3159, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 4.038257173219979, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.00014870814894371245, |
|
"loss": 0.3227, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 4.048884165781084, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.00014838349583098045, |
|
"loss": 0.3122, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 4.059511158342189, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.00014805817551866838, |
|
"loss": 0.3194, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 4.070138150903294, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.00014773219249288402, |
|
"loss": 0.3123, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 4.0807651434644, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.00014740555124887375, |
|
"loss": 0.3254, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 4.0913921360255046, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.00014707825629096084, |
|
"loss": 0.3178, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 4.10201912858661, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.00014675031213248296, |
|
"loss": 0.3214, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 4.112646121147715, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.00014642172329573026, |
|
"loss": 0.321, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 4.12327311370882, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.00014609249431188278, |
|
"loss": 0.3227, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 4.133900106269926, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.00014576262972094828, |
|
"loss": 0.3277, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 4.1445270988310305, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.0001454321340716992, |
|
"loss": 0.3216, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 4.155154091392136, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.00014510101192161018, |
|
"loss": 0.3196, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 4.165781083953242, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00014476926783679538, |
|
"loss": 0.3284, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 4.176408076514346, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00014443690639194515, |
|
"loss": 0.3244, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 4.187035069075452, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 0.00014410393217026318, |
|
"loss": 0.3205, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 4.1976620616365565, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 0.0001437703497634032, |
|
"loss": 0.3216, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 4.208289054197662, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 0.00014343616377140582, |
|
"loss": 0.3178, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 4.218916046758768, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 0.00014310137880263482, |
|
"loss": 0.3219, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 4.229543039319872, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00014276599947371388, |
|
"loss": 0.3262, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 4.240170031880978, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00014243003040946274, |
|
"loss": 0.3287, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 4.2507970244420825, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.0001420934762428335, |
|
"loss": 0.328, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.261424017003188, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 0.00014175634161484675, |
|
"loss": 0.3334, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 4.272051009564294, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00014141863117452745, |
|
"loss": 0.3255, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 4.282678002125398, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.00014108034957884094, |
|
"loss": 0.3226, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 4.293304994686504, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.0001407415014926288, |
|
"loss": 0.3314, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 4.3039319872476085, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 0.00014040209158854426, |
|
"loss": 0.3334, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 4.314558979808714, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.00014006212454698797, |
|
"loss": 0.3349, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 4.32518597236982, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.00013972160505604342, |
|
"loss": 0.3301, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 4.335812964930924, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 0.00013938053781141222, |
|
"loss": 0.325, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 4.34643995749203, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.00013903892751634947, |
|
"loss": 0.3293, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 4.357066950053135, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 0.00013869677888159887, |
|
"loss": 0.3285, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 4.36769394261424, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 0.00013835409662532762, |
|
"loss": 0.3271, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 4.378320935175346, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00013801088547306148, |
|
"loss": 0.3296, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 4.38894792773645, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.0001376671501576197, |
|
"loss": 0.3292, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 4.399574920297556, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00013732289541904948, |
|
"loss": 0.3324, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 4.410201912858661, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.00013697812600456093, |
|
"loss": 0.3326, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 4.420828905419766, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.00013663284666846134, |
|
"loss": 0.3329, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 4.431455897980872, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 0.00013628706217208976, |
|
"loss": 0.3305, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 4.442082890541976, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.00013594077728375128, |
|
"loss": 0.3305, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 4.452709883103082, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 0.0001355939967786514, |
|
"loss": 0.3327, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 4.463336875664187, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00013524672543882996, |
|
"loss": 0.3329, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 4.473963868225292, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 0.00013489896805309542, |
|
"loss": 0.3376, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 4.484590860786398, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 0.00013455072941695863, |
|
"loss": 0.329, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 4.495217853347503, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00013420201433256689, |
|
"loss": 0.333, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 4.505844845908608, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.00013385282760863758, |
|
"loss": 0.328, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 4.516471838469713, |
|
"grad_norm": 0.439453125, |
|
"learning_rate": 0.00013350317406039187, |
|
"loss": 0.3321, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 4.527098831030818, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.00013315305850948846, |
|
"loss": 0.3325, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 4.537725823591924, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.0001328024857839569, |
|
"loss": 0.3324, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 4.548352816153029, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 0.00013245146071813114, |
|
"loss": 0.3351, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 4.558979808714134, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.00013209998815258273, |
|
"loss": 0.3264, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 4.569606801275239, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 0.00013174807293405428, |
|
"loss": 0.3303, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 4.580233793836344, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.00013139571991539238, |
|
"loss": 0.3308, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 4.5908607863974495, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.00013104293395548098, |
|
"loss": 0.3314, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 4.601487778958555, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.000130689719919174, |
|
"loss": 0.3328, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 4.61211477151966, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00013033608267722858, |
|
"loss": 0.3337, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 4.622741764080765, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.0001299820271062378, |
|
"loss": 0.3343, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 4.63336875664187, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 0.00012962755808856342, |
|
"loss": 0.3317, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 4.6439957492029755, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.00012927268051226844, |
|
"loss": 0.3311, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 4.654622741764081, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.0001289173992710499, |
|
"loss": 0.3347, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 4.665249734325186, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00012856171926417133, |
|
"loss": 0.3353, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 4.675876726886291, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.00012820564539639512, |
|
"loss": 0.3393, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 4.686503719447396, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.00012784918257791495, |
|
"loss": 0.3373, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 4.6971307120085015, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 0.00012749233572428804, |
|
"loss": 0.3271, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 4.707757704569607, |
|
"grad_norm": 0.77734375, |
|
"learning_rate": 0.00012713510975636741, |
|
"loss": 0.3363, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 4.718384697130712, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 0.00012677750960023396, |
|
"loss": 0.3355, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 4.729011689691817, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00012641954018712863, |
|
"loss": 0.3378, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 4.739638682252922, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.0001260612064533843, |
|
"loss": 0.3354, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 4.7502656748140275, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.0001257025133403577, |
|
"loss": 0.3363, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 4.760892667375133, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.0001253434657943616, |
|
"loss": 0.3296, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 4.771519659936238, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00012498406876659598, |
|
"loss": 0.3381, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 4.782146652497343, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.0001246243272130804, |
|
"loss": 0.3222, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 4.792773645058449, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.00012426424609458518, |
|
"loss": 0.3348, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 4.8034006376195535, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.00012390383037656327, |
|
"loss": 0.3321, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 4.814027630180659, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.00012354308502908164, |
|
"loss": 0.3349, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 4.824654622741764, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00012318201502675285, |
|
"loss": 0.3269, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 4.835281615302869, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.00012282062534866632, |
|
"loss": 0.3379, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 4.845908607863975, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00012245892097831982, |
|
"loss": 0.3316, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 4.8565356004250795, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.0001220969069035506, |
|
"loss": 0.3411, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 4.867162592986185, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.0001217345881164667, |
|
"loss": 0.3362, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 4.877789585547291, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.00012137196961337811, |
|
"loss": 0.3316, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 4.888416578108395, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00012100905639472779, |
|
"loss": 0.3309, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 4.899043570669501, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00012064585346502286, |
|
"loss": 0.3341, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 4.9096705632306055, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.00012028236583276542, |
|
"loss": 0.3362, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 4.920297555791711, |
|
"grad_norm": 0.734375, |
|
"learning_rate": 0.0001199185985103836, |
|
"loss": 0.3397, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 4.930924548352817, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.00011955455651416246, |
|
"loss": 0.3363, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 4.941551540913921, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.00011919024486417463, |
|
"loss": 0.3335, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 4.952178533475027, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00011882566858421135, |
|
"loss": 0.3311, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 4.9628055260361315, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.000118460832701713, |
|
"loss": 0.3346, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 4.973432518597237, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.00011809574224769981, |
|
"loss": 0.3334, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 4.984059511158343, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.00011773040225670256, |
|
"loss": 0.3309, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 4.994686503719447, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.00011736481776669306, |
|
"loss": 0.3345, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 4.99893730074389, |
|
"eval_loss": 2.1233744621276855, |
|
"eval_runtime": 0.8188, |
|
"eval_samples_per_second": 6.106, |
|
"eval_steps_per_second": 1.221, |
|
"step": 2352 |
|
}, |
|
{ |
|
"epoch": 5.005313496280553, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.0001169989938190147, |
|
"loss": 0.3151, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 5.015940488841657, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00011663293545831302, |
|
"loss": 0.2706, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 5.026567481402763, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.000116266647732466, |
|
"loss": 0.276, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 5.0371944739638685, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.00011590013569251457, |
|
"loss": 0.2787, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 5.047821466524973, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.00011553340439259286, |
|
"loss": 0.2753, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 5.058448459086079, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.0001151664588898586, |
|
"loss": 0.2707, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 5.069075451647183, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00011479930424442335, |
|
"loss": 0.2703, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 5.079702444208289, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00011443194551928266, |
|
"loss": 0.2715, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 5.0903294367693945, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 0.00011406438778024635, |
|
"loss": 0.2739, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 5.100956429330499, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.00011369663609586854, |
|
"loss": 0.278, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 5.111583421891605, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.0001133286955373779, |
|
"loss": 0.2809, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 5.12221041445271, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 0.00011296057117860759, |
|
"loss": 0.2706, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 5.132837407013815, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 0.00011259226809592534, |
|
"loss": 0.2765, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 5.1434643995749205, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.00011222379136816345, |
|
"loss": 0.2717, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 5.154091392136025, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00011185514607654881, |
|
"loss": 0.2796, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 5.164718384697131, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.00011148633730463273, |
|
"loss": 0.2801, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 5.175345377258236, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.00011111737013822088, |
|
"loss": 0.2745, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 5.185972369819341, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00011074824966530312, |
|
"loss": 0.2786, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 5.1965993623804465, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.00011037898097598352, |
|
"loss": 0.2728, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 5.207226354941551, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.00011000956916240985, |
|
"loss": 0.2827, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 5.217853347502657, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.00010964001931870365, |
|
"loss": 0.2783, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 5.228480340063762, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00010927033654088983, |
|
"loss": 0.2847, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 5.239107332624867, |
|
"grad_norm": 0.78125, |
|
"learning_rate": 0.0001089005259268265, |
|
"loss": 0.2833, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 5.2497343251859725, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 0.00010853059257613448, |
|
"loss": 0.2819, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 5.260361317747078, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.00010816054159012723, |
|
"loss": 0.2766, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 5.270988310308183, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.00010779037807174033, |
|
"loss": 0.2767, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 5.281615302869288, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.00010742010712546116, |
|
"loss": 0.2737, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 5.292242295430393, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.00010704973385725851, |
|
"loss": 0.2799, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 5.3028692879914985, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.00010667926337451217, |
|
"loss": 0.2815, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 5.313496280552604, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.00010630870078594249, |
|
"loss": 0.2796, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 5.324123273113709, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.00010593805120154001, |
|
"loss": 0.2764, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 5.334750265674814, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.00010556731973249485, |
|
"loss": 0.279, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 5.345377258235919, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.00010519651149112631, |
|
"loss": 0.2817, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 5.3560042507970245, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.00010482563159081238, |
|
"loss": 0.283, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 5.36663124335813, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 0.00010445468514591925, |
|
"loss": 0.2815, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 5.377258235919235, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00010408367727173067, |
|
"loss": 0.2793, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 5.38788522848034, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 0.0001037126130843776, |
|
"loss": 0.2839, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 5.398512221041445, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.00010334149770076747, |
|
"loss": 0.2843, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 5.4091392136025505, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.00010297033623851369, |
|
"loss": 0.2864, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 5.419766206163656, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.0001025991338158651, |
|
"loss": 0.2883, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 5.430393198724761, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.0001022278955516354, |
|
"loss": 0.2919, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 5.441020191285866, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.00010185662656513251, |
|
"loss": 0.28, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 5.451647183846971, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.00010148533197608803, |
|
"loss": 0.2815, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 5.462274176408076, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.00010111401690458654, |
|
"loss": 0.2858, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 5.472901168969182, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.00010074268647099525, |
|
"loss": 0.2824, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 5.483528161530287, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.00010037134579589302, |
|
"loss": 0.2863, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 5.494155154091392, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.0001, |
|
"loss": 0.2755, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 5.504782146652497, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 9.962865420410701e-05, |
|
"loss": 0.2812, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 5.515409139213602, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 9.925731352900478e-05, |
|
"loss": 0.2781, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 5.526036131774708, |
|
"grad_norm": 0.73828125, |
|
"learning_rate": 9.888598309541347e-05, |
|
"loss": 0.2864, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 5.536663124335813, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 9.851466802391201e-05, |
|
"loss": 0.2813, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 5.547290116896918, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 9.814337343486754e-05, |
|
"loss": 0.2801, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 5.557917109458024, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 9.777210444836463e-05, |
|
"loss": 0.281, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 5.568544102019128, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 9.740086618413495e-05, |
|
"loss": 0.2817, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 5.579171094580234, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 9.702966376148635e-05, |
|
"loss": 0.2859, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 5.589798087141339, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 9.665850229923258e-05, |
|
"loss": 0.2814, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 5.600425079702444, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 9.62873869156224e-05, |
|
"loss": 0.2827, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 5.61105207226355, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 9.591632272826934e-05, |
|
"loss": 0.2831, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 5.621679064824654, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 9.554531485408078e-05, |
|
"loss": 0.2791, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 5.63230605738576, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 9.517436840918766e-05, |
|
"loss": 0.2829, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 5.6429330499468655, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 9.480348850887373e-05, |
|
"loss": 0.2826, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 5.65356004250797, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 9.44326802675052e-05, |
|
"loss": 0.2827, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 5.664187035069076, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 9.406194879846e-05, |
|
"loss": 0.2857, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 5.67481402763018, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 9.369129921405754e-05, |
|
"loss": 0.2818, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 5.685441020191286, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 9.332073662548784e-05, |
|
"loss": 0.278, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 5.6960680127523915, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 9.295026614274152e-05, |
|
"loss": 0.2815, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 5.706695005313496, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 9.257989287453883e-05, |
|
"loss": 0.2832, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 5.717321997874602, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 9.220962192825968e-05, |
|
"loss": 0.285, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 5.727948990435706, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 9.183945840987276e-05, |
|
"loss": 0.283, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 5.738575982996812, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 9.146940742386553e-05, |
|
"loss": 0.2869, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 5.7492029755579175, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 9.109947407317352e-05, |
|
"loss": 0.2876, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 5.759829968119022, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 9.072966345911019e-05, |
|
"loss": 0.2879, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 5.770456960680128, |
|
"grad_norm": 0.5, |
|
"learning_rate": 9.035998068129636e-05, |
|
"loss": 0.2773, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 5.781083953241232, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 8.999043083759017e-05, |
|
"loss": 0.2863, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 5.791710945802338, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 8.962101902401648e-05, |
|
"loss": 0.2813, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 5.8023379383634435, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 8.925175033469688e-05, |
|
"loss": 0.2772, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 5.812964930924548, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 8.888262986177913e-05, |
|
"loss": 0.2842, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 5.823591923485654, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 8.851366269536729e-05, |
|
"loss": 0.2806, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 5.834218916046758, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 8.814485392345118e-05, |
|
"loss": 0.2754, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 5.844845908607864, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 8.777620863183657e-05, |
|
"loss": 0.2852, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 5.8554729011689695, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 8.740773190407471e-05, |
|
"loss": 0.2782, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 5.866099893730074, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 8.703942882139245e-05, |
|
"loss": 0.2887, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 5.87672688629118, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 8.667130446262214e-05, |
|
"loss": 0.2812, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 5.887353878852284, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 8.630336390413147e-05, |
|
"loss": 0.2809, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 5.89798087141339, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 8.59356122197537e-05, |
|
"loss": 0.2816, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 5.9086078639744954, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 8.556805448071735e-05, |
|
"loss": 0.2788, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 5.9192348565356, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 8.520069575557667e-05, |
|
"loss": 0.2773, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 5.929861849096706, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 8.483354111014141e-05, |
|
"loss": 0.2882, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 5.940488841657811, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 8.446659560740717e-05, |
|
"loss": 0.285, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 5.951115834218916, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 8.409986430748545e-05, |
|
"loss": 0.2788, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 5.961742826780021, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 8.373335226753404e-05, |
|
"loss": 0.2799, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 5.972369819341126, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 8.336706454168701e-05, |
|
"loss": 0.2835, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 5.982996811902232, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 8.300100618098534e-05, |
|
"loss": 0.2865, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 5.993623804463337, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 8.263518223330697e-05, |
|
"loss": 0.2875, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 2.5845518112182617, |
|
"eval_runtime": 0.7933, |
|
"eval_samples_per_second": 6.303, |
|
"eval_steps_per_second": 1.261, |
|
"step": 2823 |
|
}, |
|
{ |
|
"epoch": 6.004250797024442, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 8.226959774329747e-05, |
|
"loss": 0.2603, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 6.014877789585547, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 8.190425775230021e-05, |
|
"loss": 0.2316, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 6.025504782146652, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 8.153916729828703e-05, |
|
"loss": 0.2244, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 6.036131774707758, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 8.117433141578866e-05, |
|
"loss": 0.2276, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 6.046758767268863, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 8.080975513582539e-05, |
|
"loss": 0.2338, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 6.057385759829968, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 8.044544348583755e-05, |
|
"loss": 0.2334, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 6.068012752391073, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 8.008140148961641e-05, |
|
"loss": 0.2319, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 6.078639744952179, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 7.971763416723459e-05, |
|
"loss": 0.2326, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 6.089266737513284, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 7.935414653497715e-05, |
|
"loss": 0.2305, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 6.099893730074389, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 7.89909436052722e-05, |
|
"loss": 0.2297, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 6.110520722635494, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 7.862803038662191e-05, |
|
"loss": 0.2251, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 6.121147715196599, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 7.826541188353329e-05, |
|
"loss": 0.2321, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 6.131774707757705, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 7.790309309644942e-05, |
|
"loss": 0.2324, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 6.14240170031881, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 7.754107902168019e-05, |
|
"loss": 0.2305, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 6.153028692879915, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 7.717937465133371e-05, |
|
"loss": 0.2258, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 6.16365568544102, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 7.681798497324716e-05, |
|
"loss": 0.2375, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 6.174282678002125, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 7.645691497091838e-05, |
|
"loss": 0.2351, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 6.184909670563231, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 7.609616962343675e-05, |
|
"loss": 0.2359, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 6.195536663124336, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 7.573575390541485e-05, |
|
"loss": 0.2396, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 6.206163655685441, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 7.537567278691964e-05, |
|
"loss": 0.2316, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 6.216790648246546, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 7.501593123340403e-05, |
|
"loss": 0.2356, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 6.227417640807651, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 7.465653420563845e-05, |
|
"loss": 0.2373, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 6.238044633368757, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 7.42974866596423e-05, |
|
"loss": 0.2349, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 6.248671625929862, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 7.393879354661577e-05, |
|
"loss": 0.2326, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 6.259298618490967, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 7.358045981287141e-05, |
|
"loss": 0.2366, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 6.269925611052072, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 7.322249039976608e-05, |
|
"loss": 0.2372, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 6.280552603613177, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 7.286489024363261e-05, |
|
"loss": 0.2319, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 6.291179596174283, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 7.2507664275712e-05, |
|
"loss": 0.2341, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 6.301806588735388, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 7.215081742208508e-05, |
|
"loss": 0.2334, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 6.312433581296493, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 7.179435460360491e-05, |
|
"loss": 0.2305, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 6.323060573857599, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 7.14382807358287e-05, |
|
"loss": 0.2351, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 6.333687566418703, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 7.108260072895013e-05, |
|
"loss": 0.2312, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 6.344314558979809, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 7.072731948773159e-05, |
|
"loss": 0.2309, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 6.354941551540914, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 7.037244191143661e-05, |
|
"loss": 0.2392, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 6.365568544102019, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 7.001797289376218e-05, |
|
"loss": 0.2363, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 6.376195536663125, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 6.966391732277143e-05, |
|
"loss": 0.2342, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 6.386822529224229, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 6.931028008082602e-05, |
|
"loss": 0.2329, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 6.397449521785335, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 6.895706604451905e-05, |
|
"loss": 0.2377, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 6.4080765143464395, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 6.860428008460762e-05, |
|
"loss": 0.2402, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 6.418703506907545, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 6.825192706594575e-05, |
|
"loss": 0.2366, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 6.429330499468651, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 6.790001184741728e-05, |
|
"loss": 0.2331, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 6.439957492029755, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 6.75485392818689e-05, |
|
"loss": 0.2337, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 6.450584484590861, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 6.719751421604309e-05, |
|
"loss": 0.2357, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 6.461211477151966, |
|
"grad_norm": 0.5, |
|
"learning_rate": 6.684694149051156e-05, |
|
"loss": 0.2395, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 6.471838469713071, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 6.649682593960814e-05, |
|
"loss": 0.2258, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 6.482465462274177, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 6.614717239136246e-05, |
|
"loss": 0.234, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 6.493092454835281, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 6.579798566743314e-05, |
|
"loss": 0.2417, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 6.503719447396387, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 6.54492705830414e-05, |
|
"loss": 0.2319, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 6.514346439957492, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 6.51010319469046e-05, |
|
"loss": 0.2325, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 6.524973432518597, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 6.475327456117005e-05, |
|
"loss": 0.2353, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 6.535600425079703, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 6.440600322134864e-05, |
|
"loss": 0.2345, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 6.546227417640807, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 6.405922271624874e-05, |
|
"loss": 0.2327, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 6.556854410201913, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 6.371293782791028e-05, |
|
"loss": 0.2356, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 6.567481402763018, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 6.336715333153869e-05, |
|
"loss": 0.2336, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 6.578108395324123, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 6.302187399543911e-05, |
|
"loss": 0.2365, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 6.588735387885229, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 6.267710458095053e-05, |
|
"loss": 0.2377, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 6.599362380446333, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 6.233284984238035e-05, |
|
"loss": 0.2297, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 6.609989373007439, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 6.198911452693853e-05, |
|
"loss": 0.2347, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 6.620616365568544, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 6.164590337467243e-05, |
|
"loss": 0.2363, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 6.631243358129649, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 6.130322111840114e-05, |
|
"loss": 0.2339, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 6.641870350690755, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 6.0961072483650526e-05, |
|
"loss": 0.2328, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 6.652497343251859, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 6.0619462188587793e-05, |
|
"loss": 0.234, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 6.663124335812965, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 6.027839494395664e-05, |
|
"loss": 0.2397, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 6.67375132837407, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 5.993787545301204e-05, |
|
"loss": 0.2379, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 6.684378320935175, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 5.959790841145577e-05, |
|
"loss": 0.236, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 6.695005313496281, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 5.9258498507371194e-05, |
|
"loss": 0.2393, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 6.705632306057386, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 5.891965042115907e-05, |
|
"loss": 0.2399, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 6.716259298618491, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 5.8581368825472585e-05, |
|
"loss": 0.2261, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 6.726886291179596, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 5.8243658385153285e-05, |
|
"loss": 0.2379, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 6.737513283740701, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 5.790652375716652e-05, |
|
"loss": 0.2327, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 6.748140276301807, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 5.7569969590537284e-05, |
|
"loss": 0.2299, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 6.758767268862912, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 5.7234000526286156e-05, |
|
"loss": 0.2373, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 6.769394261424017, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 5.689862119736522e-05, |
|
"loss": 0.2406, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 6.780021253985122, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 5.656383622859418e-05, |
|
"loss": 0.2394, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 6.790648246546228, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 5.622965023659683e-05, |
|
"loss": 0.2378, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 6.801275239107333, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 5.589606782973683e-05, |
|
"loss": 0.2342, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 6.811902231668438, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 5.55630936080549e-05, |
|
"loss": 0.2344, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 6.822529224229543, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 5.5230732163204615e-05, |
|
"loss": 0.2344, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 6.833156216790648, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 5.48989880783898e-05, |
|
"loss": 0.2352, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 6.843783209351754, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 5.456786592830083e-05, |
|
"loss": 0.2337, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 6.8544102019128585, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 5.423737027905173e-05, |
|
"loss": 0.2326, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 6.865037194473964, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 5.39075056881172e-05, |
|
"loss": 0.2348, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 6.875664187035069, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 5.357827670426977e-05, |
|
"loss": 0.2361, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 6.886291179596174, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 5.3249687867517095e-05, |
|
"loss": 0.2298, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 6.89691817215728, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 5.292174370903919e-05, |
|
"loss": 0.2344, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 6.9075451647183845, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 5.259444875112624e-05, |
|
"loss": 0.2366, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 6.91817215727949, |
|
"grad_norm": 0.8203125, |
|
"learning_rate": 5.226780750711602e-05, |
|
"loss": 0.2343, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 6.928799149840595, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 5.1941824481331626e-05, |
|
"loss": 0.236, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 6.9394261424017, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 5.1616504169019564e-05, |
|
"loss": 0.2271, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 6.950053134962806, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 5.129185105628756e-05, |
|
"loss": 0.2338, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 6.9606801275239105, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 5.0967869620042794e-05, |
|
"loss": 0.2311, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 6.971307120085016, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 5.064456432793019e-05, |
|
"loss": 0.23, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 6.981934112646121, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 5.032193963827073e-05, |
|
"loss": 0.2347, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 6.992561105207226, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 0.2319, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 6.99893730074389, |
|
"eval_loss": 3.1057090759277344, |
|
"eval_runtime": 0.816, |
|
"eval_samples_per_second": 6.127, |
|
"eval_steps_per_second": 1.225, |
|
"step": 3293 |
|
}, |
|
{ |
|
"epoch": 7.003188097768332, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 4.96787498526069e-05, |
|
"loss": 0.2177, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 7.0138150903294365, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 4.93581936260724e-05, |
|
"loss": 0.1956, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 7.024442082890542, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 4.903833574080825e-05, |
|
"loss": 0.1954, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 7.035069075451648, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 4.8719180607596484e-05, |
|
"loss": 0.1939, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 7.045696068012752, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 4.8400732627528e-05, |
|
"loss": 0.1996, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 7.056323060573858, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 4.808299619194251e-05, |
|
"loss": 0.1946, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 7.0669500531349625, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 4.776597568236731e-05, |
|
"loss": 0.1914, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 7.077577045696068, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 4.744967547045754e-05, |
|
"loss": 0.1963, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 7.088204038257174, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 4.713409991793536e-05, |
|
"loss": 0.2024, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 7.098831030818278, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 4.681925337653006e-05, |
|
"loss": 0.1974, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 7.109458023379384, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 4.650514018791799e-05, |
|
"loss": 0.1954, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 7.1200850159404885, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 4.6191764683662744e-05, |
|
"loss": 0.2007, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 7.130712008501594, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 4.587913118515532e-05, |
|
"loss": 0.1959, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 7.1413390010627, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 4.5567244003554645e-05, |
|
"loss": 0.1994, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 7.151965993623804, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 4.5256107439728035e-05, |
|
"loss": 0.1979, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 7.16259298618491, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 4.494572578419194e-05, |
|
"loss": 0.1951, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 7.1732199787460145, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 4.463610331705273e-05, |
|
"loss": 0.1996, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 7.18384697130712, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 4.432724430794786e-05, |
|
"loss": 0.1989, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 7.194473963868226, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 4.4019153015986546e-05, |
|
"loss": 0.2009, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 7.20510095642933, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 4.371183368969165e-05, |
|
"loss": 0.1926, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 7.215727948990436, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 4.340529056694047e-05, |
|
"loss": 0.1997, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 7.226354941551541, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 4.309952787490689e-05, |
|
"loss": 0.1969, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 7.236981934112646, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 4.279454983000251e-05, |
|
"loss": 0.1974, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 7.247608926673752, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 4.249036063781896e-05, |
|
"loss": 0.1969, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 7.258235919234856, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 4.2186964493069794e-05, |
|
"loss": 0.1953, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 7.268862911795962, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 4.1884365579532346e-05, |
|
"loss": 0.1973, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 7.279489904357067, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 4.158256806999059e-05, |
|
"loss": 0.1938, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 7.290116896918172, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 4.128157612617696e-05, |
|
"loss": 0.1964, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 7.3007438894792775, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 4.0981393898715545e-05, |
|
"loss": 0.202, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 7.311370882040382, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 4.0682025527064486e-05, |
|
"loss": 0.1964, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 7.321997874601488, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 4.038347513945898e-05, |
|
"loss": 0.1999, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 7.332624867162593, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 4.008574685285442e-05, |
|
"loss": 0.1938, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 7.343251859723698, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 3.978884477286956e-05, |
|
"loss": 0.1941, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 7.3538788522848035, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 3.94927729937299e-05, |
|
"loss": 0.1996, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 7.364505844845908, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 3.9197535598211274e-05, |
|
"loss": 0.2, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 7.375132837407014, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 3.890313665758348e-05, |
|
"loss": 0.1937, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 7.385759829968119, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 3.860958023155422e-05, |
|
"loss": 0.1991, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 7.396386822529224, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 3.8316870368213e-05, |
|
"loss": 0.1979, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 7.4070138150903295, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 3.802501110397553e-05, |
|
"loss": 0.2021, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 7.417640807651434, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 3.773400646352769e-05, |
|
"loss": 0.2034, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 7.42826780021254, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 3.744386045977052e-05, |
|
"loss": 0.2033, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 7.438894792773645, |
|
"grad_norm": 0.5, |
|
"learning_rate": 3.7154577093764334e-05, |
|
"loss": 0.2012, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 7.44952178533475, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 3.686616035467408e-05, |
|
"loss": 0.1952, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 7.4601487778958555, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 3.657861421971388e-05, |
|
"loss": 0.2011, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 7.470775770456961, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 3.629194265409255e-05, |
|
"loss": 0.2033, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 7.481402763018066, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 3.6006149610958625e-05, |
|
"loss": 0.2003, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 7.492029755579171, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 3.5721239031346066e-05, |
|
"loss": 0.1988, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 7.502656748140276, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 3.543721484411976e-05, |
|
"loss": 0.1903, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 7.5132837407013815, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 3.515408096592144e-05, |
|
"loss": 0.1991, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 7.523910733262487, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 3.487184130111562e-05, |
|
"loss": 0.1967, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 7.534537725823592, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 3.459049974173579e-05, |
|
"loss": 0.1983, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 7.545164718384697, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 3.4310060167430725e-05, |
|
"loss": 0.1998, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 7.555791710945803, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 3.4030526445410984e-05, |
|
"loss": 0.198, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 7.5664187035069075, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 3.375190243039556e-05, |
|
"loss": 0.1961, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 7.577045696068013, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 3.3474191964558885e-05, |
|
"loss": 0.1974, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 7.587672688629118, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 3.319739887747752e-05, |
|
"loss": 0.195, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 7.598299681190223, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 3.292152698607768e-05, |
|
"loss": 0.1992, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 7.608926673751329, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 3.264658009458239e-05, |
|
"loss": 0.1996, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 7.6195536663124335, |
|
"grad_norm": 0.5, |
|
"learning_rate": 3.2372561994459136e-05, |
|
"loss": 0.1966, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 7.630180658873539, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 3.209947646436752e-05, |
|
"loss": 0.1941, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 7.640807651434644, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 3.182732727010715e-05, |
|
"loss": 0.1959, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 7.651434643995749, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 3.155611816456586e-05, |
|
"loss": 0.2002, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 7.662061636556855, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 3.12858528876676e-05, |
|
"loss": 0.196, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 7.6726886291179595, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 3.1016535166321356e-05, |
|
"loss": 0.1988, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 7.683315621679065, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 3.074816871436924e-05, |
|
"loss": 0.2016, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 7.69394261424017, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 3.0480757232535772e-05, |
|
"loss": 0.1973, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 7.704569606801275, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 3.021430440837646e-05, |
|
"loss": 0.1945, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 7.715196599362381, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 2.9948813916227115e-05, |
|
"loss": 0.1985, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 7.7258235919234854, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 2.968428941715321e-05, |
|
"loss": 0.1963, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 7.736450584484591, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 2.9420734558899322e-05, |
|
"loss": 0.2008, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 7.747077577045696, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 2.915815297583886e-05, |
|
"loss": 0.1983, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 7.757704569606801, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 2.889654828892393e-05, |
|
"loss": 0.2, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 7.768331562167907, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 2.8635924105635458e-05, |
|
"loss": 0.1978, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 7.778958554729011, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 2.8376284019933373e-05, |
|
"loss": 0.1944, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 7.789585547290117, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 2.8117631612207084e-05, |
|
"loss": 0.1963, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 7.800212539851222, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 2.7859970449226104e-05, |
|
"loss": 0.1961, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 7.810839532412327, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 2.760330408409083e-05, |
|
"loss": 0.1974, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 7.821466524973433, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 2.73476360561837e-05, |
|
"loss": 0.1977, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 7.832093517534537, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 2.7092969891120058e-05, |
|
"loss": 0.1956, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 7.842720510095643, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 2.6839309100699973e-05, |
|
"loss": 0.1905, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 7.8533475026567485, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 2.6586657182859343e-05, |
|
"loss": 0.1984, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 7.863974495217853, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 2.6335017621622116e-05, |
|
"loss": 0.1949, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 7.874601487778959, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 2.6084393887051884e-05, |
|
"loss": 0.196, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 7.885228480340063, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 2.5834789435204243e-05, |
|
"loss": 0.1922, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 7.895855472901169, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 2.5586207708079034e-05, |
|
"loss": 0.1921, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 7.9064824654622745, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 2.5338652133572915e-05, |
|
"loss": 0.1969, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 7.917109458023379, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 2.5092126125432136e-05, |
|
"loss": 0.2008, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 7.927736450584485, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 2.4846633083205263e-05, |
|
"loss": 0.1963, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 7.93836344314559, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 2.4602176392196662e-05, |
|
"loss": 0.1944, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 7.948990435706695, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 2.4358759423419474e-05, |
|
"loss": 0.1954, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 7.9596174282678005, |
|
"grad_norm": 0.5, |
|
"learning_rate": 2.411638553354928e-05, |
|
"loss": 0.1979, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 7.970244420828905, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 2.3875058064877807e-05, |
|
"loss": 0.195, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 7.980871413390011, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 2.3634780345266806e-05, |
|
"loss": 0.1936, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 7.991498405951116, |
|
"grad_norm": 0.5, |
|
"learning_rate": 2.339555568810221e-05, |
|
"loss": 0.1968, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 3.6609108448028564, |
|
"eval_runtime": 0.7997, |
|
"eval_samples_per_second": 6.252, |
|
"eval_steps_per_second": 1.25, |
|
"step": 3764 |
|
}, |
|
{ |
|
"epoch": 8.002125398512222, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 2.3157387392248385e-05, |
|
"loss": 0.1921, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 8.012752391073326, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 2.2920278742002676e-05, |
|
"loss": 0.1828, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 8.023379383634431, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 2.2684233007050115e-05, |
|
"loss": 0.1811, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 8.034006376195537, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 2.244925344241828e-05, |
|
"loss": 0.1809, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 8.044633368756642, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 2.221534328843261e-05, |
|
"loss": 0.1758, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 8.055260361317748, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 2.1982505770671303e-05, |
|
"loss": 0.1764, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 8.065887353878852, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 2.1750744099921396e-05, |
|
"loss": 0.1779, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 8.076514346439957, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 2.1520061472133902e-05, |
|
"loss": 0.1798, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 8.087141339001063, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 2.1290461068380298e-05, |
|
"loss": 0.1756, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 8.097768331562168, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 2.1061946054808146e-05, |
|
"loss": 0.1804, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 8.108395324123274, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 2.0834519582597878e-05, |
|
"loss": 0.1782, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 8.119022316684378, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 2.0608184787919026e-05, |
|
"loss": 0.1804, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 8.129649309245483, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 2.0382944791887106e-05, |
|
"loss": 0.1795, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 8.140276301806589, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 2.0158802700520574e-05, |
|
"loss": 0.1815, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 8.150903294367694, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 1.9935761604697976e-05, |
|
"loss": 0.1793, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 8.1615302869288, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 1.9713824580115335e-05, |
|
"loss": 0.1785, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 8.172157279489904, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 1.9492994687243714e-05, |
|
"loss": 0.1784, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 8.182784272051009, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 1.927327497128706e-05, |
|
"loss": 0.177, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 8.193411264612115, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 1.9054668462140168e-05, |
|
"loss": 0.1804, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 8.20403825717322, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 1.883717817434688e-05, |
|
"loss": 0.1753, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 8.214665249734326, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 1.8620807107058668e-05, |
|
"loss": 0.1785, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 8.22529224229543, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 1.840555824399296e-05, |
|
"loss": 0.1775, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 8.235919234856535, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 1.8191434553392428e-05, |
|
"loss": 0.18, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 8.24654622741764, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 1.797843898798358e-05, |
|
"loss": 0.1806, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 8.257173219978746, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 1.7766574484936482e-05, |
|
"loss": 0.1837, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 8.267800212539852, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 1.7555843965823992e-05, |
|
"loss": 0.1817, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 8.278427205100957, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 1.7346250336581427e-05, |
|
"loss": 0.1806, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 8.289054197662061, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 1.7137796487466797e-05, |
|
"loss": 0.1791, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 8.299681190223167, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 1.693048529302056e-05, |
|
"loss": 0.1788, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 8.310308182784272, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 1.672431961202635e-05, |
|
"loss": 0.1809, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 8.320935175345378, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 1.6519302287471207e-05, |
|
"loss": 0.1774, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 8.331562167906483, |
|
"grad_norm": 0.5, |
|
"learning_rate": 1.6315436146506703e-05, |
|
"loss": 0.1737, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 8.342189160467587, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 1.6112724000409694e-05, |
|
"loss": 0.1813, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 8.352816153028693, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 1.5911168644543707e-05, |
|
"loss": 0.175, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 8.363443145589798, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 1.57107728583203e-05, |
|
"loss": 0.1838, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 8.374070138150904, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 1.5511539405160825e-05, |
|
"loss": 0.1783, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 8.38469713071201, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 1.5313471032458247e-05, |
|
"loss": 0.1763, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 8.395324123273113, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 1.5116570471539293e-05, |
|
"loss": 0.1758, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 8.405951115834219, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 1.4920840437626759e-05, |
|
"loss": 0.1829, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 8.416578108395324, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 1.4726283629802107e-05, |
|
"loss": 0.1779, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 8.42720510095643, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 1.4532902730968212e-05, |
|
"loss": 0.1792, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 8.437832093517535, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 1.4340700407812435e-05, |
|
"loss": 0.1813, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 8.448459086078639, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 1.414967931076967e-05, |
|
"loss": 0.1785, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 8.459086078639745, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 1.3959842073986085e-05, |
|
"loss": 0.1746, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 8.46971307120085, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 1.3771191315282427e-05, |
|
"loss": 0.1832, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 8.480340063761956, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 1.3583729636118358e-05, |
|
"loss": 0.1843, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 8.490967056323061, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 1.339745962155613e-05, |
|
"loss": 0.1801, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 8.501594048884165, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 1.3212383840225329e-05, |
|
"loss": 0.18, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 8.51222104144527, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 1.3028504844287182e-05, |
|
"loss": 0.18, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 8.522848034006376, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 1.2845825169399507e-05, |
|
"loss": 0.1778, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 8.533475026567482, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 1.2664347334681681e-05, |
|
"loss": 0.1849, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 8.544102019128587, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 1.2484073842679944e-05, |
|
"loss": 0.1799, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 8.554729011689691, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 1.230500717933285e-05, |
|
"loss": 0.1802, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 8.565356004250797, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 1.2127149813937022e-05, |
|
"loss": 0.1776, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 8.575982996811902, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 1.1950504199113088e-05, |
|
"loss": 0.1794, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 8.586609989373008, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 1.1775072770771834e-05, |
|
"loss": 0.1757, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 8.597236981934113, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 1.1600857948080624e-05, |
|
"loss": 0.1786, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 8.607863974495217, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 1.1427862133430156e-05, |
|
"loss": 0.1838, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 8.618490967056323, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 1.1256087712401087e-05, |
|
"loss": 0.1755, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 8.629117959617428, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 1.1085537053731354e-05, |
|
"loss": 0.1777, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 8.639744952178534, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 1.091621250928343e-05, |
|
"loss": 0.1799, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 8.65037194473964, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 1.0748116414011888e-05, |
|
"loss": 0.1774, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 8.660998937300743, |
|
"grad_norm": 0.5, |
|
"learning_rate": 1.058125108593122e-05, |
|
"loss": 0.1844, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 8.671625929861849, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 1.0415618826083828e-05, |
|
"loss": 0.1803, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 8.682252922422954, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 1.0251221918508425e-05, |
|
"loss": 0.1787, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 8.69287991498406, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 1.0088062630208273e-05, |
|
"loss": 0.1804, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 8.703506907545165, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 9.926143211120286e-06, |
|
"loss": 0.1868, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 8.71413390010627, |
|
"grad_norm": 0.5, |
|
"learning_rate": 9.765465894083636e-06, |
|
"loss": 0.1831, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 8.724760892667375, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 9.606032894809258e-06, |
|
"loss": 0.1774, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 8.73538788522848, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 9.447846411849115e-06, |
|
"loss": 0.1775, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 8.746014877789586, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 9.29090862656593e-06, |
|
"loss": 0.1796, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 8.756641870350691, |
|
"grad_norm": 0.5, |
|
"learning_rate": 9.135221703103136e-06, |
|
"loss": 0.1798, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 8.767268862911797, |
|
"grad_norm": 0.5, |
|
"learning_rate": 8.980787788355016e-06, |
|
"loss": 0.179, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 8.7778958554729, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 8.827609011937066e-06, |
|
"loss": 0.1802, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 8.788522848034006, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 8.675687486156659e-06, |
|
"loss": 0.184, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 8.799149840595112, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 8.525025305983936e-06, |
|
"loss": 0.173, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 8.809776833156217, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 8.375624549022854e-06, |
|
"loss": 0.179, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 8.820403825717323, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 8.227487275482592e-06, |
|
"loss": 0.1812, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 8.831030818278427, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 8.08061552814917e-06, |
|
"loss": 0.183, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 8.841657810839532, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 7.935011332357112e-06, |
|
"loss": 0.1786, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 8.852284803400638, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 7.790676695961796e-06, |
|
"loss": 0.178, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 8.862911795961743, |
|
"grad_norm": 0.5, |
|
"learning_rate": 7.647613609311455e-06, |
|
"loss": 0.1809, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 8.873538788522849, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 7.505824045220011e-06, |
|
"loss": 0.1756, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 8.884165781083952, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 7.365309958939615e-06, |
|
"loss": 0.175, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 8.894792773645058, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 7.226073288133939e-06, |
|
"loss": 0.1792, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 8.905419766206164, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 7.088115952851238e-06, |
|
"loss": 0.1733, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 8.91604675876727, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 6.951439855498021e-06, |
|
"loss": 0.1792, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 8.926673751328375, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 6.81604688081271e-06, |
|
"loss": 0.1803, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 8.937300743889478, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 6.681938895839746e-06, |
|
"loss": 0.177, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 8.947927736450584, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 6.549117749903755e-06, |
|
"loss": 0.1791, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 8.95855472901169, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 6.417585274584115e-06, |
|
"loss": 0.1778, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 8.969181721572795, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 6.287343283689661e-06, |
|
"loss": 0.176, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 8.9798087141339, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 6.158393573233678e-06, |
|
"loss": 0.1759, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 8.990435706695006, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 6.030737921409169e-06, |
|
"loss": 0.1809, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 8.99893730074389, |
|
"eval_loss": 3.939990520477295, |
|
"eval_runtime": 0.8191, |
|
"eval_samples_per_second": 6.105, |
|
"eval_steps_per_second": 1.221, |
|
"step": 4234 |
|
}, |
|
{ |
|
"epoch": 9.00106269925611, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 5.904378088564255e-06, |
|
"loss": 0.1819, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 9.011689691817216, |
|
"grad_norm": 0.5, |
|
"learning_rate": 5.779315817178e-06, |
|
"loss": 0.1763, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 9.022316684378321, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 5.655552831836297e-06, |
|
"loss": 0.1767, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 9.032943676939427, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 5.533090839208133e-06, |
|
"loss": 0.1744, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 9.043570669500532, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 5.4119315280220405e-06, |
|
"loss": 0.1777, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 9.054197662061636, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 5.292076569042826e-06, |
|
"loss": 0.1744, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 9.064824654622742, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 5.1735276150484905e-06, |
|
"loss": 0.1792, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 9.075451647183847, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 5.056286300807511e-06, |
|
"loss": 0.1751, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 9.086078639744953, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 4.940354243056178e-06, |
|
"loss": 0.174, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 9.096705632306058, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 4.825733040476465e-06, |
|
"loss": 0.1732, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 9.107332624867162, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 4.712424273673788e-06, |
|
"loss": 0.1767, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 9.117959617428268, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 4.600429505155424e-06, |
|
"loss": 0.1726, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 9.128586609989373, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 4.489750279308757e-06, |
|
"loss": 0.174, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 9.139213602550479, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 4.380388122380141e-06, |
|
"loss": 0.1787, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 9.149840595111584, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 4.2723445424537544e-06, |
|
"loss": 0.1802, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 9.160467587672688, |
|
"grad_norm": 0.5, |
|
"learning_rate": 4.165621029430855e-06, |
|
"loss": 0.1786, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 9.171094580233794, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 4.060219055009229e-06, |
|
"loss": 0.1752, |
|
"step": 4315 |
|
}, |
|
{ |
|
"epoch": 9.181721572794899, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 3.9561400726628505e-06, |
|
"loss": 0.1793, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 9.192348565356005, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 3.85338551762191e-06, |
|
"loss": 0.175, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 9.20297555791711, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 3.7519568068529855e-06, |
|
"loss": 0.1728, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 9.213602550478214, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 3.651855339039456e-06, |
|
"loss": 0.1698, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 9.22422954303932, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 3.5530824945623542e-06, |
|
"loss": 0.1765, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 9.234856535600425, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 3.4556396354811226e-06, |
|
"loss": 0.1762, |
|
"step": 4345 |
|
}, |
|
{ |
|
"epoch": 9.24548352816153, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 3.359528105515064e-06, |
|
"loss": 0.1781, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 9.256110520722636, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 3.2647492300245994e-06, |
|
"loss": 0.1754, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 9.26673751328374, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 3.1713043159931734e-06, |
|
"loss": 0.1767, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 9.277364505844846, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 3.079194652009054e-06, |
|
"loss": 0.1708, |
|
"step": 4365 |
|
}, |
|
{ |
|
"epoch": 9.287991498405951, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 2.9884215082477408e-06, |
|
"loss": 0.1723, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 9.298618490967057, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 2.898986136454318e-06, |
|
"loss": 0.1801, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 9.309245483528162, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 2.810889769926217e-06, |
|
"loss": 0.1774, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 9.319872476089266, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 2.7241336234962944e-06, |
|
"loss": 0.1722, |
|
"step": 4385 |
|
}, |
|
{ |
|
"epoch": 9.330499468650371, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 2.6387188935159456e-06, |
|
"loss": 0.1779, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 9.341126461211477, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 2.5546467578387635e-06, |
|
"loss": 0.1743, |
|
"step": 4395 |
|
}, |
|
{ |
|
"epoch": 9.351753453772583, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 2.471918375804105e-06, |
|
"loss": 0.1733, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 9.362380446333688, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 2.3905348882212964e-06, |
|
"loss": 0.179, |
|
"step": 4405 |
|
}, |
|
{ |
|
"epoch": 9.373007438894792, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 2.3104974173537743e-06, |
|
"loss": 0.1742, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 9.383634431455897, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 2.2318070669036685e-06, |
|
"loss": 0.1743, |
|
"step": 4415 |
|
}, |
|
{ |
|
"epoch": 9.394261424017003, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 2.1544649219965575e-06, |
|
"loss": 0.1734, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 9.404888416578109, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 2.07847204916648e-06, |
|
"loss": 0.1729, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 9.415515409139214, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 2.003829496341325e-06, |
|
"loss": 0.1762, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 9.426142401700318, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 1.9305382928282546e-06, |
|
"loss": 0.1798, |
|
"step": 4435 |
|
}, |
|
{ |
|
"epoch": 9.436769394261423, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 1.8585994492995916e-06, |
|
"loss": 0.1757, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 9.447396386822529, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 1.788013957778878e-06, |
|
"loss": 0.1765, |
|
"step": 4445 |
|
}, |
|
{ |
|
"epoch": 9.458023379383635, |
|
"grad_norm": 0.5, |
|
"learning_rate": 1.7187827916271382e-06, |
|
"loss": 0.1739, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 9.46865037194474, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 1.65090690552957e-06, |
|
"loss": 0.1732, |
|
"step": 4455 |
|
}, |
|
{ |
|
"epoch": 9.479277364505846, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 1.5843872354822097e-06, |
|
"loss": 0.173, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 9.48990435706695, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 1.5192246987791981e-06, |
|
"loss": 0.177, |
|
"step": 4465 |
|
}, |
|
{ |
|
"epoch": 9.500531349628055, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 1.4554201940000123e-06, |
|
"loss": 0.177, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 9.51115834218916, |
|
"grad_norm": 0.5, |
|
"learning_rate": 1.3929746009971433e-06, |
|
"loss": 0.179, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 9.521785334750266, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 1.3318887808839274e-06, |
|
"loss": 0.1714, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 9.532412327311372, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 1.272163576022667e-06, |
|
"loss": 0.1729, |
|
"step": 4485 |
|
}, |
|
{ |
|
"epoch": 9.543039319872475, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 1.21379981001305e-06, |
|
"loss": 0.1733, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 9.553666312433581, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 1.1567982876807603e-06, |
|
"loss": 0.175, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 9.564293304994687, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 1.1011597950663865e-06, |
|
"loss": 0.1816, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 9.574920297555792, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 1.0468850994145963e-06, |
|
"loss": 0.1777, |
|
"step": 4505 |
|
}, |
|
{ |
|
"epoch": 9.585547290116898, |
|
"grad_norm": 0.5, |
|
"learning_rate": 9.939749491635341e-07, |
|
"loss": 0.1776, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 9.596174282678001, |
|
"grad_norm": 0.5, |
|
"learning_rate": 9.424300739345304e-07, |
|
"loss": 0.1727, |
|
"step": 4515 |
|
}, |
|
{ |
|
"epoch": 9.606801275239107, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 8.922511845219971e-07, |
|
"loss": 0.1734, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 9.617428267800213, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 8.434389728836589e-07, |
|
"loss": 0.1806, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 9.628055260361318, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 7.959941121310266e-07, |
|
"loss": 0.1729, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 9.638682252922424, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 7.499172565200718e-07, |
|
"loss": 0.1729, |
|
"step": 4535 |
|
}, |
|
{ |
|
"epoch": 9.649309245483527, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 7.052090414422119e-07, |
|
"loss": 0.1821, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 9.659936238044633, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 6.618700834155945e-07, |
|
"loss": 0.1825, |
|
"step": 4545 |
|
}, |
|
{ |
|
"epoch": 9.670563230605739, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 6.199009800765265e-07, |
|
"loss": 0.1709, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 9.681190223166844, |
|
"grad_norm": 0.5, |
|
"learning_rate": 5.793023101712924e-07, |
|
"loss": 0.1771, |
|
"step": 4555 |
|
}, |
|
{ |
|
"epoch": 9.69181721572795, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 5.400746335481488e-07, |
|
"loss": 0.1771, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 9.702444208289053, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 5.022184911495864e-07, |
|
"loss": 0.1807, |
|
"step": 4565 |
|
}, |
|
{ |
|
"epoch": 9.713071200850159, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 4.6573440500492504e-07, |
|
"loss": 0.1677, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 9.723698193411265, |
|
"grad_norm": 0.5, |
|
"learning_rate": 4.306228782230304e-07, |
|
"loss": 0.1755, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 9.73432518597237, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 3.96884394985475e-07, |
|
"loss": 0.1741, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 9.744952178533476, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 3.6451942053975507e-07, |
|
"loss": 0.1708, |
|
"step": 4585 |
|
}, |
|
{ |
|
"epoch": 9.755579171094581, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 3.335284011929951e-07, |
|
"loss": 0.1732, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 9.766206163655685, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 3.0391176430567547e-07, |
|
"loss": 0.1757, |
|
"step": 4595 |
|
}, |
|
{ |
|
"epoch": 9.77683315621679, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 2.756699182858369e-07, |
|
"loss": 0.1786, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 9.787460148777896, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 2.488032525833628e-07, |
|
"loss": 0.1746, |
|
"step": 4605 |
|
}, |
|
{ |
|
"epoch": 9.798087141339002, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 2.2331213768468363e-07, |
|
"loss": 0.1767, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 9.808714133900107, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 1.9919692510762533e-07, |
|
"loss": 0.1795, |
|
"step": 4615 |
|
}, |
|
{ |
|
"epoch": 9.819341126461211, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 1.7645794739654665e-07, |
|
"loss": 0.1764, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 9.829968119022316, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 1.5509551811783152e-07, |
|
"loss": 0.1781, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 9.840595111583422, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 1.351099318554705e-07, |
|
"loss": 0.1755, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 9.851222104144528, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 1.1650146420704166e-07, |
|
"loss": 0.1758, |
|
"step": 4635 |
|
}, |
|
{ |
|
"epoch": 9.861849096705633, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 9.927037177993592e-08, |
|
"loss": 0.1774, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 9.872476089266737, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 8.341689218775984e-08, |
|
"loss": 0.1827, |
|
"step": 4645 |
|
}, |
|
{ |
|
"epoch": 9.883103081827842, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 6.894124404711599e-08, |
|
"loss": 0.1717, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 9.893730074388948, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 5.584362697453882e-08, |
|
"loss": 0.1773, |
|
"step": 4655 |
|
}, |
|
{ |
|
"epoch": 9.904357066950054, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 4.4124221583785595e-08, |
|
"loss": 0.1738, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 9.91498405951116, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 3.378318948332737e-08, |
|
"loss": 0.1792, |
|
"step": 4665 |
|
}, |
|
{ |
|
"epoch": 9.925611052072263, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 2.482067327409521e-08, |
|
"loss": 0.173, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 9.936238044633368, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 1.7236796547559497e-08, |
|
"loss": 0.1719, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 9.946865037194474, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 1.103166388398691e-08, |
|
"loss": 0.1741, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 9.95749202975558, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 6.205360851041508e-09, |
|
"loss": 0.1773, |
|
"step": 4685 |
|
}, |
|
{ |
|
"epoch": 9.968119022316685, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 2.7579540025524097e-09, |
|
"loss": 0.1762, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 9.978746014877789, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 6.894908776255982e-10, |
|
"loss": 0.1745, |
|
"step": 4695 |
|
}, |
|
{ |
|
"epoch": 9.989373007438894, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0, |
|
"loss": 0.1757, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 9.989373007438894, |
|
"eval_loss": 3.9657981395721436, |
|
"eval_runtime": 0.7855, |
|
"eval_samples_per_second": 6.366, |
|
"eval_steps_per_second": 1.273, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 9.989373007438894, |
|
"step": 4700, |
|
"total_flos": 7.255013733871649e+18, |
|
"train_loss": 0.5076825852977469, |
|
"train_runtime": 24662.4426, |
|
"train_samples_per_second": 6.102, |
|
"train_steps_per_second": 0.191 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 4700, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 100, |
|
"total_flos": 7.255013733871649e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|