|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 11.989100817438691, |
|
"eval_steps": 1000, |
|
"global_step": 3300, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03633060853769301, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 3.0303030303030305e-07, |
|
"loss": 2.0607, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07266121707538602, |
|
"grad_norm": 5.90625, |
|
"learning_rate": 6.060606060606061e-07, |
|
"loss": 2.033, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.10899182561307902, |
|
"grad_norm": 5.46875, |
|
"learning_rate": 9.090909090909091e-07, |
|
"loss": 2.0622, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.14532243415077203, |
|
"grad_norm": 4.96875, |
|
"learning_rate": 1.2121212121212122e-06, |
|
"loss": 1.9896, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.18165304268846502, |
|
"grad_norm": 15.625, |
|
"learning_rate": 1.5151515151515152e-06, |
|
"loss": 1.9998, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.21798365122615804, |
|
"grad_norm": 18.0, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 1.9608, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.254314259763851, |
|
"grad_norm": 15.625, |
|
"learning_rate": 2.1212121212121216e-06, |
|
"loss": 1.9272, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.29064486830154407, |
|
"grad_norm": 15.5625, |
|
"learning_rate": 2.4242424242424244e-06, |
|
"loss": 1.9031, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.32697547683923706, |
|
"grad_norm": 14.1875, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 1.8012, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.36330608537693004, |
|
"grad_norm": 3.421875, |
|
"learning_rate": 3.0303030303030305e-06, |
|
"loss": 1.7756, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3996366939146231, |
|
"grad_norm": 3.09375, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.6836, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.4359673024523161, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 1.7013, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.47229791099000906, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 3.93939393939394e-06, |
|
"loss": 1.6328, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.508628519527702, |
|
"grad_norm": 2.6875, |
|
"learning_rate": 4.242424242424243e-06, |
|
"loss": 1.6888, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5449591280653951, |
|
"grad_norm": 2.828125, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 1.6583, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5812897366030881, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 4.848484848484849e-06, |
|
"loss": 1.5485, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6176203451407811, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 5.151515151515152e-06, |
|
"loss": 1.5464, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6539509536784741, |
|
"grad_norm": 2.25, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 1.5506, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6902815622161671, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 5.7575757575757586e-06, |
|
"loss": 1.5301, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7266121707538601, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 1.5116, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.7629427792915532, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 1.4874, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7992733878292462, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.5056, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.8356039963669392, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 6.969696969696971e-06, |
|
"loss": 1.5036, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.8719346049046321, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 1.4625, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.9082652134423251, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 1.4439, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.9445958219800181, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 7.87878787878788e-06, |
|
"loss": 1.4025, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.9809264305177112, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 1.4267, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.017257039055404, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 8.484848484848486e-06, |
|
"loss": 1.3484, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.0535876475930972, |
|
"grad_norm": 2.0, |
|
"learning_rate": 8.787878787878788e-06, |
|
"loss": 1.3809, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.0899182561307903, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 1.3901, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.1262488646684832, |
|
"grad_norm": 1.8984375, |
|
"learning_rate": 9.393939393939396e-06, |
|
"loss": 1.3203, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.1625794732061763, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 9.696969696969698e-06, |
|
"loss": 1.3295, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.1989100817438691, |
|
"grad_norm": 1.9140625, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3936, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.2352406902815622, |
|
"grad_norm": 2.5, |
|
"learning_rate": 9.999720280459576e-06, |
|
"loss": 1.3483, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.2715712988192553, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 9.99888115313551e-06, |
|
"loss": 1.3384, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.3079019073569482, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 9.997482711915926e-06, |
|
"loss": 1.2888, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.344232515894641, |
|
"grad_norm": 1.9453125, |
|
"learning_rate": 9.99552511326936e-06, |
|
"loss": 1.3144, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.3805631244323342, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 9.993008576227248e-06, |
|
"loss": 1.2905, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.4168937329700273, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 9.989933382359423e-06, |
|
"loss": 1.2816, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.4532243415077202, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 9.986299875742612e-06, |
|
"loss": 1.3315, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.4895549500454133, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 9.982108462921938e-06, |
|
"loss": 1.2984, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.5258855585831061, |
|
"grad_norm": 2.765625, |
|
"learning_rate": 9.977359612865424e-06, |
|
"loss": 1.2677, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.5622161671207992, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 9.972053856911534e-06, |
|
"loss": 1.3098, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.5985467756584923, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 9.966191788709716e-06, |
|
"loss": 1.3503, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.6348773841961854, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 9.959774064153977e-06, |
|
"loss": 1.3301, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.6712079927338783, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 9.952801401309504e-06, |
|
"loss": 1.2454, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.7075386012715712, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 9.945274580332316e-06, |
|
"loss": 1.239, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.7438692098092643, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 9.937194443381972e-06, |
|
"loss": 1.2823, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.7801998183469574, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 9.928561894527354e-06, |
|
"loss": 1.2774, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.8165304268846503, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 9.919377899645497e-06, |
|
"loss": 1.2215, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.8528610354223434, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 9.909643486313533e-06, |
|
"loss": 1.2509, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.8891916439600362, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 9.899359743693715e-06, |
|
"loss": 1.2656, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.9255222524977293, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 9.888527822411543e-06, |
|
"loss": 1.186, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.9618528610354224, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 9.877148934427037e-06, |
|
"loss": 1.2468, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.9981834695731153, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 9.86522435289912e-06, |
|
"loss": 1.2471, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.034514078110808, |
|
"grad_norm": 1.9140625, |
|
"learning_rate": 9.85275541204318e-06, |
|
"loss": 1.1688, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.0708446866485013, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 9.839743506981783e-06, |
|
"loss": 1.1216, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.1071752951861944, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 9.826190093588564e-06, |
|
"loss": 1.1333, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.1435059037238875, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 9.812096688325354e-06, |
|
"loss": 1.1148, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.1798365122615806, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 9.797464868072489e-06, |
|
"loss": 1.1331, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.2161671207992732, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 9.78229626995238e-06, |
|
"loss": 1.1931, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.2524977293369663, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 9.766592591146353e-06, |
|
"loss": 1.146, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.2888283378746594, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 9.750355588704728e-06, |
|
"loss": 1.1397, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.3251589464123525, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 9.733587079350254e-06, |
|
"loss": 1.0941, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.3614895549500456, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 9.716288939274818e-06, |
|
"loss": 1.101, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.3978201634877383, |
|
"grad_norm": 2.703125, |
|
"learning_rate": 9.698463103929542e-06, |
|
"loss": 1.0632, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.4341507720254314, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 9.680111567808212e-06, |
|
"loss": 1.1315, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.4704813805631245, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 9.66123638422413e-06, |
|
"loss": 1.0989, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.5068119891008176, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 9.641839665080363e-06, |
|
"loss": 1.0829, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.5431425976385107, |
|
"grad_norm": 1.9140625, |
|
"learning_rate": 9.621923580633462e-06, |
|
"loss": 1.1155, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.5794732061762033, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 9.601490359250616e-06, |
|
"loss": 1.1248, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.6158038147138964, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 9.580542287160348e-06, |
|
"loss": 1.1391, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.6521344232515895, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 9.559081708196696e-06, |
|
"loss": 1.0538, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.688465031789282, |
|
"grad_norm": 1.953125, |
|
"learning_rate": 9.537111023536973e-06, |
|
"loss": 1.055, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.7247956403269757, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 9.514632691433108e-06, |
|
"loss": 1.1769, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.7611262488646684, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 9.491649226936586e-06, |
|
"loss": 1.0949, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.7974568574023615, |
|
"grad_norm": 1.9609375, |
|
"learning_rate": 9.468163201617063e-06, |
|
"loss": 1.1308, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.8337874659400546, |
|
"grad_norm": 1.953125, |
|
"learning_rate": 9.444177243274619e-06, |
|
"loss": 1.1283, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.8701180744777472, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 9.419694035645753e-06, |
|
"loss": 1.0907, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.9064486830154403, |
|
"grad_norm": 1.5, |
|
"learning_rate": 9.394716318103098e-06, |
|
"loss": 1.1043, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.9427792915531334, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 9.369246885348926e-06, |
|
"loss": 1.0676, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.9791099000908265, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 9.343288587102444e-06, |
|
"loss": 1.0642, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.0154405086285196, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 9.316844327780955e-06, |
|
"loss": 1.0437, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.0517711171662127, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 9.289917066174887e-06, |
|
"loss": 0.9874, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.0881017257039054, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 9.262509815116732e-06, |
|
"loss": 0.9582, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.1244323342415985, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 9.234625641143962e-06, |
|
"loss": 0.9471, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.1607629427792916, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 9.206267664155906e-06, |
|
"loss": 0.9424, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.1970935513169847, |
|
"grad_norm": 1.625, |
|
"learning_rate": 9.177439057064684e-06, |
|
"loss": 0.972, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.2334241598546773, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 9.148143045440181e-06, |
|
"loss": 0.9729, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.2697547683923704, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 9.118382907149164e-06, |
|
"loss": 0.9558, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.3060853769300635, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 9.088161971988517e-06, |
|
"loss": 0.9518, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.3424159854677566, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 9.057483621312671e-06, |
|
"loss": 0.9694, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.3787465940054497, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 9.026351287655294e-06, |
|
"loss": 0.9457, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.4150772025431424, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 8.994768454345207e-06, |
|
"loss": 0.9585, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.4514078110808355, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 8.96273865511666e-06, |
|
"loss": 0.9479, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.4877384196185286, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 8.930265473713939e-06, |
|
"loss": 0.9105, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.5240690281562217, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 8.897352543490396e-06, |
|
"loss": 0.9795, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.560399636693915, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 8.864003547001916e-06, |
|
"loss": 0.9403, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.5967302452316074, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 8.83022221559489e-06, |
|
"loss": 0.925, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.6330608537693005, |
|
"grad_norm": 1.25, |
|
"learning_rate": 8.796012328988716e-06, |
|
"loss": 0.9225, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.6330608537693005, |
|
"eval_loss": 1.1292678117752075, |
|
"eval_runtime": 10.2075, |
|
"eval_samples_per_second": 24.002, |
|
"eval_steps_per_second": 24.002, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.6693914623069936, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 8.7613777148529e-06, |
|
"loss": 0.9778, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.7057220708446867, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 8.726322248378775e-06, |
|
"loss": 0.8997, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.74205267938238, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 8.690849851845933e-06, |
|
"loss": 0.9874, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 3.7783832879200725, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 8.65496449418336e-06, |
|
"loss": 0.9101, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 3.8147138964577656, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 8.61867019052535e-06, |
|
"loss": 0.9101, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 3.8510445049954587, |
|
"grad_norm": 1.25, |
|
"learning_rate": 8.581971001762287e-06, |
|
"loss": 0.9549, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 3.887375113533152, |
|
"grad_norm": 1.375, |
|
"learning_rate": 8.54487103408625e-06, |
|
"loss": 0.9486, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 3.923705722070845, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 8.507374438531606e-06, |
|
"loss": 0.9476, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 3.9600363306085375, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 8.469485410510545e-06, |
|
"loss": 0.9283, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 3.9963669391462306, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 8.43120818934367e-06, |
|
"loss": 0.9616, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.032697547683924, |
|
"grad_norm": 1.5, |
|
"learning_rate": 8.392547057785662e-06, |
|
"loss": 0.8816, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.069028156221616, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 8.353506341546106e-06, |
|
"loss": 0.8704, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.10535876475931, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 8.314090408805481e-06, |
|
"loss": 0.8515, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.141689373297003, |
|
"grad_norm": 1.125, |
|
"learning_rate": 8.274303669726427e-06, |
|
"loss": 0.7986, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.178019981834696, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 8.234150575960288e-06, |
|
"loss": 0.859, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.214350590372389, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 8.193635620149041e-06, |
|
"loss": 0.8537, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.2506811989100814, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 8.152763335422612e-06, |
|
"loss": 0.8513, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.287011807447775, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 8.111538294891684e-06, |
|
"loss": 0.8318, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.323342415985468, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 8.06996511113601e-06, |
|
"loss": 0.846, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.359673024523161, |
|
"grad_norm": 1.0, |
|
"learning_rate": 8.028048435688333e-06, |
|
"loss": 0.8667, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.396003633060854, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 7.985792958513932e-06, |
|
"loss": 0.8715, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.4323342415985465, |
|
"grad_norm": 0.9296875, |
|
"learning_rate": 7.943203407485864e-06, |
|
"loss": 0.848, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.46866485013624, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 7.900284547855992e-06, |
|
"loss": 0.8859, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.504995458673933, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 7.857041181721788e-06, |
|
"loss": 0.8575, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 4.541326067211626, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 7.813478147489052e-06, |
|
"loss": 0.8496, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 4.577656675749319, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 7.769600319330553e-06, |
|
"loss": 0.8166, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 4.6139872842870115, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 7.725412606640658e-06, |
|
"loss": 0.8191, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 4.650317892824705, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 7.680919953486047e-06, |
|
"loss": 0.833, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 4.686648501362398, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 7.636127338052513e-06, |
|
"loss": 0.839, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 4.722979109900091, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 7.5910397720879785e-06, |
|
"loss": 0.807, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 4.759309718437784, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 7.545662300341736e-06, |
|
"loss": 0.8215, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 4.795640326975477, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.8584, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 4.83197093551317, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 7.454057980117842e-06, |
|
"loss": 0.7955, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 4.868301544050863, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 7.407841381047533e-06, |
|
"loss": 0.8482, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 4.904632152588556, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 7.361355373863415e-06, |
|
"loss": 0.8457, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 4.940962761126249, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 7.314605159783313e-06, |
|
"loss": 0.8705, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 4.977293369663942, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 7.2675959695865896e-06, |
|
"loss": 0.829, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 5.013623978201635, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 7.2203330630288714e-06, |
|
"loss": 0.8391, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 5.049954586739328, |
|
"grad_norm": 0.9921875, |
|
"learning_rate": 7.172821728253563e-06, |
|
"loss": 0.7915, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 5.0862851952770205, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 7.1250672812001505e-06, |
|
"loss": 0.8199, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 5.122615803814714, |
|
"grad_norm": 1.0, |
|
"learning_rate": 7.0770750650094335e-06, |
|
"loss": 0.8065, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 5.158946412352407, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 7.02885044942567e-06, |
|
"loss": 0.8098, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 5.1952770208901, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 6.980398830195785e-06, |
|
"loss": 0.7675, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 5.231607629427793, |
|
"grad_norm": 1.125, |
|
"learning_rate": 6.931725628465643e-06, |
|
"loss": 0.7747, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 5.2679382379654855, |
|
"grad_norm": 1.375, |
|
"learning_rate": 6.882836290173493e-06, |
|
"loss": 0.7623, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 5.304268846503179, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 6.833736285440632e-06, |
|
"loss": 0.8113, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 5.340599455040872, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 6.78443110795936e-06, |
|
"loss": 0.8411, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 5.376930063578565, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 6.734926274378313e-06, |
|
"loss": 0.7592, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 5.413260672116258, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 6.685227323685209e-06, |
|
"loss": 0.7776, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 5.449591280653951, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 6.635339816587109e-06, |
|
"loss": 0.7727, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 5.485921889191644, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 6.5852693348882345e-06, |
|
"loss": 0.7539, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 5.522252497729337, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 6.535021480865439e-06, |
|
"loss": 0.8292, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 5.55858310626703, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 6.484601876641375e-06, |
|
"loss": 0.7463, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 5.594913714804723, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 6.434016163555452e-06, |
|
"loss": 0.7744, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 5.631244323342416, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 6.383270001532636e-06, |
|
"loss": 0.7868, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 5.667574931880109, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 6.332369068450175e-06, |
|
"loss": 0.7485, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 5.703905540417802, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 6.2813190595023135e-06, |
|
"loss": 0.7783, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 5.740236148955495, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 6.230125686563068e-06, |
|
"loss": 0.7769, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 5.776566757493188, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 6.178794677547138e-06, |
|
"loss": 0.7977, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 5.812897366030881, |
|
"grad_norm": 3.65625, |
|
"learning_rate": 6.127331775769023e-06, |
|
"loss": 0.8101, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 5.849227974568574, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 6.07574273930042e-06, |
|
"loss": 0.8397, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 5.885558583106267, |
|
"grad_norm": 4.78125, |
|
"learning_rate": 6.024033340325954e-06, |
|
"loss": 0.8191, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 5.9218891916439595, |
|
"grad_norm": 4.625, |
|
"learning_rate": 5.972209364497355e-06, |
|
"loss": 0.7843, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 5.958219800181653, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 5.920276610286102e-06, |
|
"loss": 0.8099, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 5.994550408719346, |
|
"grad_norm": 10.875, |
|
"learning_rate": 5.8682408883346535e-06, |
|
"loss": 0.8025, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 6.030881017257039, |
|
"grad_norm": 10.25, |
|
"learning_rate": 5.816108020806297e-06, |
|
"loss": 0.7426, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 6.067211625794732, |
|
"grad_norm": 9.8125, |
|
"learning_rate": 5.763883840733736e-06, |
|
"loss": 0.7439, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 6.1035422343324255, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 5.711574191366427e-06, |
|
"loss": 0.7617, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 6.139872842870118, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 5.659184925516802e-06, |
|
"loss": 0.7333, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 6.176203451407811, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 5.60672190490541e-06, |
|
"loss": 0.7747, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 6.212534059945504, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 5.5541909995050554e-06, |
|
"loss": 0.7469, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 6.248864668483197, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 5.5015980868840254e-06, |
|
"loss": 0.7507, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 6.28519527702089, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 5.448949051548459e-06, |
|
"loss": 0.7774, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 6.321525885558583, |
|
"grad_norm": 2.921875, |
|
"learning_rate": 5.396249784283943e-06, |
|
"loss": 0.773, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 6.357856494096276, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 5.343506181496405e-06, |
|
"loss": 0.7333, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 6.394187102633969, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 5.290724144552379e-06, |
|
"loss": 0.7352, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 6.430517711171662, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 5.237909579118713e-06, |
|
"loss": 0.68, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 6.466848319709355, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 5.185068394501791e-06, |
|
"loss": 0.7279, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 6.503178928247048, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 5.132206502986368e-06, |
|
"loss": 0.7547, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 6.539509536784741, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 5.07932981917404e-06, |
|
"loss": 0.7015, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 6.575840145322434, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 5.026444259321489e-06, |
|
"loss": 0.7614, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 6.612170753860127, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 4.973555740678512e-06, |
|
"loss": 0.7953, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 6.64850136239782, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 4.9206701808259605e-06, |
|
"loss": 0.736, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 6.684831970935513, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 4.867793497013634e-06, |
|
"loss": 0.7502, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 6.721162579473206, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 4.81493160549821e-06, |
|
"loss": 0.7496, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 6.7574931880108995, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 4.762090420881289e-06, |
|
"loss": 0.7888, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 6.793823796548592, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 4.7092758554476215e-06, |
|
"loss": 0.7238, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 6.830154405086285, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 4.6564938185035954e-06, |
|
"loss": 0.7484, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 6.866485013623978, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 4.603750215716057e-06, |
|
"loss": 0.7835, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 6.902815622161671, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 4.551050948451542e-06, |
|
"loss": 0.6878, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 6.9391462306993645, |
|
"grad_norm": 1.953125, |
|
"learning_rate": 4.498401913115975e-06, |
|
"loss": 0.7528, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 6.975476839237057, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 4.445809000494945e-06, |
|
"loss": 0.7486, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 7.01180744777475, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 4.393278095094591e-06, |
|
"loss": 0.7948, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 7.048138056312443, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 4.340815074483199e-06, |
|
"loss": 0.6872, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 7.084468664850136, |
|
"grad_norm": 1.953125, |
|
"learning_rate": 4.2884258086335755e-06, |
|
"loss": 0.7289, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 7.12079927338783, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 4.2361161592662655e-06, |
|
"loss": 0.612, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 7.157129881925522, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 4.183891979193703e-06, |
|
"loss": 0.7135, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 7.193460490463215, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 4.131759111665349e-06, |
|
"loss": 0.6932, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 7.229791099000908, |
|
"grad_norm": 1.9453125, |
|
"learning_rate": 4.079723389713899e-06, |
|
"loss": 0.705, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 7.266121707538601, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 4.027790635502646e-06, |
|
"loss": 0.7145, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 7.266121707538601, |
|
"eval_loss": 1.1055254936218262, |
|
"eval_runtime": 10.2576, |
|
"eval_samples_per_second": 23.885, |
|
"eval_steps_per_second": 23.885, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 7.302452316076295, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 3.975966659674048e-06, |
|
"loss": 0.6724, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 7.338782924613987, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 3.924257260699583e-06, |
|
"loss": 0.6468, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 7.37511353315168, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 3.872668224230979e-06, |
|
"loss": 0.6792, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 7.4114441416893735, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 3.821205322452863e-06, |
|
"loss": 0.723, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 7.447774750227066, |
|
"grad_norm": 1.984375, |
|
"learning_rate": 3.769874313436933e-06, |
|
"loss": 0.6608, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 7.48410535876476, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 3.7186809404976877e-06, |
|
"loss": 0.6679, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 7.520435967302452, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 3.667630931549826e-06, |
|
"loss": 0.6944, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 7.556766575840145, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 3.6167299984673655e-06, |
|
"loss": 0.7172, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 7.5930971843778385, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 3.5659838364445505e-06, |
|
"loss": 0.7029, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 7.629427792915531, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 3.5153981233586277e-06, |
|
"loss": 0.6549, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 7.665758401453225, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 3.4649785191345613e-06, |
|
"loss": 0.7342, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 7.702089009990917, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 3.4147306651117663e-06, |
|
"loss": 0.6246, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 7.73841961852861, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 3.3646601834128924e-06, |
|
"loss": 0.6587, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 7.774750227066304, |
|
"grad_norm": 1.953125, |
|
"learning_rate": 3.3147726763147913e-06, |
|
"loss": 0.6682, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 7.811080835603996, |
|
"grad_norm": 2.421875, |
|
"learning_rate": 3.2650737256216885e-06, |
|
"loss": 0.6854, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 7.84741144414169, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 3.2155688920406415e-06, |
|
"loss": 0.6908, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 7.883742052679382, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 3.16626371455937e-06, |
|
"loss": 0.6875, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 7.920072661217075, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 3.1171637098265063e-06, |
|
"loss": 0.6732, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 7.956403269754769, |
|
"grad_norm": 1.9765625, |
|
"learning_rate": 3.0682743715343565e-06, |
|
"loss": 0.742, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 7.992733878292461, |
|
"grad_norm": 2.796875, |
|
"learning_rate": 3.019601169804216e-06, |
|
"loss": 0.6791, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 8.029064486830155, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 2.9711495505743317e-06, |
|
"loss": 0.654, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 8.065395095367847, |
|
"grad_norm": 2.0, |
|
"learning_rate": 2.9229249349905686e-06, |
|
"loss": 0.705, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 8.10172570390554, |
|
"grad_norm": 2.0, |
|
"learning_rate": 2.8749327187998516e-06, |
|
"loss": 0.6067, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 8.138056312443233, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 2.8271782717464413e-06, |
|
"loss": 0.6043, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 8.174386920980927, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 2.7796669369711294e-06, |
|
"loss": 0.6292, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 8.21071752951862, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 2.7324040304134125e-06, |
|
"loss": 0.6041, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 8.247048138056313, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 2.685394840216688e-06, |
|
"loss": 0.6222, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 8.283378746594005, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 2.6386446261365874e-06, |
|
"loss": 0.6586, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 8.319709355131698, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 2.5921586189524694e-06, |
|
"loss": 0.6612, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 8.356039963669392, |
|
"grad_norm": 2.0, |
|
"learning_rate": 2.5459420198821604e-06, |
|
"loss": 0.6693, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 8.392370572207085, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 0.6244, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 8.428701180744778, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 2.454337699658267e-06, |
|
"loss": 0.6312, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 8.46503178928247, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 2.4089602279120224e-06, |
|
"loss": 0.6729, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 8.501362397820163, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 2.363872661947488e-06, |
|
"loss": 0.6276, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 8.537693006357856, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 2.319080046513954e-06, |
|
"loss": 0.6292, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 8.57402361489555, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.274587393359342e-06, |
|
"loss": 0.6593, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 8.610354223433243, |
|
"grad_norm": 1.875, |
|
"learning_rate": 2.230399680669449e-06, |
|
"loss": 0.6302, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 8.646684831970935, |
|
"grad_norm": 1.796875, |
|
"learning_rate": 2.1865218525109496e-06, |
|
"loss": 0.6481, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 8.683015440508628, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 2.1429588182782147e-06, |
|
"loss": 0.6175, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 8.719346049046322, |
|
"grad_norm": 1.953125, |
|
"learning_rate": 2.09971545214401e-06, |
|
"loss": 0.6144, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 8.755676657584015, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 2.0567965925141366e-06, |
|
"loss": 0.6572, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 8.792007266121708, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 2.0142070414860704e-06, |
|
"loss": 0.654, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 8.8283378746594, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 1.971951564311668e-06, |
|
"loss": 0.621, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 8.864668483197093, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 1.9300348888639915e-06, |
|
"loss": 0.62, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 8.900999091734786, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 1.8884617051083183e-06, |
|
"loss": 0.5858, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 8.93732970027248, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.8472366645773892e-06, |
|
"loss": 0.6467, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 8.973660308810173, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 1.8063643798509594e-06, |
|
"loss": 0.6138, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 9.009990917347865, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 1.7658494240397127e-06, |
|
"loss": 0.6388, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 9.046321525885558, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.7256963302735752e-06, |
|
"loss": 0.5733, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 9.082652134423252, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.68590959119452e-06, |
|
"loss": 0.6066, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 9.118982742960945, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 1.646493658453896e-06, |
|
"loss": 0.5849, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 9.155313351498638, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 1.6074529422143398e-06, |
|
"loss": 0.6136, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 9.19164396003633, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 1.5687918106563326e-06, |
|
"loss": 0.5891, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 9.227974568574023, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 1.5305145894894547e-06, |
|
"loss": 0.6065, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 9.264305177111716, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.4926255614683931e-06, |
|
"loss": 0.5854, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 9.30063578564941, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.4551289659137497e-06, |
|
"loss": 0.6023, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 9.336966394187103, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 1.4180289982377138e-06, |
|
"loss": 0.6052, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 9.373297002724795, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.3813298094746491e-06, |
|
"loss": 0.5806, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 9.409627611262488, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.345035505816642e-06, |
|
"loss": 0.6058, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 9.44595821980018, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 1.3091501481540676e-06, |
|
"loss": 0.6339, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 9.482288828337875, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 1.2736777516212267e-06, |
|
"loss": 0.6058, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 9.518619436875568, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 1.238622285147103e-06, |
|
"loss": 0.6186, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 9.55495004541326, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 1.2039876710112847e-06, |
|
"loss": 0.5913, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 9.591280653950953, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 1.1697777844051105e-06, |
|
"loss": 0.663, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 9.627611262488646, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 1.135996452998085e-06, |
|
"loss": 0.6278, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 9.66394187102634, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 1.1026474565096068e-06, |
|
"loss": 0.6074, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 9.700272479564033, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.0697345262860638e-06, |
|
"loss": 0.6545, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 9.736603088101726, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 1.0372613448833429e-06, |
|
"loss": 0.6141, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 9.772933696639418, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 1.0052315456547934e-06, |
|
"loss": 0.5699, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 9.809264305177111, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 9.73648712344707e-07, |
|
"loss": 0.579, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 9.845594913714805, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 9.425163786873292e-07, |
|
"loss": 0.61, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 9.881925522252498, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 9.118380280114858e-07, |
|
"loss": 0.6106, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 9.91825613079019, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 8.816170928508367e-07, |
|
"loss": 0.6066, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 9.954586739327883, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 8.518569545598198e-07, |
|
"loss": 0.6094, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 9.990917347865576, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 8.225609429353187e-07, |
|
"loss": 0.6383, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 10.02724795640327, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 7.937323358440935e-07, |
|
"loss": 0.6017, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 10.063578564940963, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 7.653743588560387e-07, |
|
"loss": 0.6005, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 10.099909173478656, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 7.374901848832683e-07, |
|
"loss": 0.647, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 10.136239782016348, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 7.100829338251147e-07, |
|
"loss": 0.5894, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 10.172570390554041, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 6.831556722190453e-07, |
|
"loss": 0.6075, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 10.208900999091735, |
|
"grad_norm": 0.97265625, |
|
"learning_rate": 6.567114128975571e-07, |
|
"loss": 0.5531, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 10.245231607629428, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 6.307531146510754e-07, |
|
"loss": 0.6052, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 10.28156221616712, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.052836818969027e-07, |
|
"loss": 0.5833, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 10.317892824704813, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 5.803059643542491e-07, |
|
"loss": 0.5934, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 10.354223433242506, |
|
"grad_norm": 0.9609375, |
|
"learning_rate": 5.558227567253832e-07, |
|
"loss": 0.5931, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 10.3905540417802, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 5.318367983829393e-07, |
|
"loss": 0.6253, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 10.426884650317893, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 5.083507730634152e-07, |
|
"loss": 0.61, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 10.463215258855586, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 4.853673085668947e-07, |
|
"loss": 0.6329, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 10.499545867393278, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 4.628889764630279e-07, |
|
"loss": 0.5733, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 10.535876475930971, |
|
"grad_norm": 1.0, |
|
"learning_rate": 4.4091829180330503e-07, |
|
"loss": 0.5922, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 10.572207084468666, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 4.194577128396521e-07, |
|
"loss": 0.5843, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 10.608537693006358, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 3.985096407493838e-07, |
|
"loss": 0.6028, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 10.64486830154405, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 3.7807641936653984e-07, |
|
"loss": 0.5767, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 10.681198910081743, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 3.581603349196372e-07, |
|
"loss": 0.6062, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 10.717529518619436, |
|
"grad_norm": 1.0, |
|
"learning_rate": 3.3876361577587115e-07, |
|
"loss": 0.5978, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 10.75386012715713, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 3.1988843219178776e-07, |
|
"loss": 0.5984, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 10.790190735694823, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 3.015368960704584e-07, |
|
"loss": 0.5761, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 10.826521344232516, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 2.8371106072518194e-07, |
|
"loss": 0.5988, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 10.862851952770209, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 2.664129206497479e-07, |
|
"loss": 0.593, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 10.899182561307901, |
|
"grad_norm": 1.125, |
|
"learning_rate": 2.4964441129527337e-07, |
|
"loss": 0.6401, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 10.899182561307901, |
|
"eval_loss": 1.12338387966156, |
|
"eval_runtime": 10.3307, |
|
"eval_samples_per_second": 23.716, |
|
"eval_steps_per_second": 23.716, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 10.935513169845596, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 2.3340740885364922e-07, |
|
"loss": 0.5798, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 10.971843778383288, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 2.1770373004762035e-07, |
|
"loss": 0.5684, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 11.008174386920981, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 2.0253513192751374e-07, |
|
"loss": 0.619, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 11.044504995458674, |
|
"grad_norm": 1.125, |
|
"learning_rate": 1.8790331167464758e-07, |
|
"loss": 0.5739, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 11.080835603996366, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 1.738099064114368e-07, |
|
"loss": 0.6039, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 11.11716621253406, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 1.6025649301821877e-07, |
|
"loss": 0.6057, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 11.153496821071753, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 1.4724458795681962e-07, |
|
"loss": 0.5966, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 11.189827429609446, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 1.3477564710088097e-07, |
|
"loss": 0.5987, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 11.226158038147139, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 1.2285106557296479e-07, |
|
"loss": 0.614, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 11.262488646684831, |
|
"grad_norm": 1.25, |
|
"learning_rate": 1.1147217758845752e-07, |
|
"loss": 0.5854, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 11.298819255222526, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 1.0064025630628583e-07, |
|
"loss": 0.5866, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 11.335149863760218, |
|
"grad_norm": 1.25, |
|
"learning_rate": 9.035651368646647e-08, |
|
"loss": 0.6412, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 11.371480472297911, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 8.06221003545038e-08, |
|
"loss": 0.5935, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 11.407811080835604, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 7.143810547264762e-08, |
|
"loss": 0.6097, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 11.444141689373296, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 6.280555661802857e-08, |
|
"loss": 0.5737, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 11.48047229791099, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 5.472541966768552e-08, |
|
"loss": 0.5908, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 11.516802906448683, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 4.719859869049659e-08, |
|
"loss": 0.5884, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 11.553133514986376, |
|
"grad_norm": 2.125, |
|
"learning_rate": 4.02259358460233e-08, |
|
"loss": 0.6072, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 11.589464123524069, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 3.3808211290284886e-08, |
|
"loss": 0.6049, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 11.625794732061761, |
|
"grad_norm": 3.734375, |
|
"learning_rate": 2.7946143088466437e-08, |
|
"loss": 0.6021, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 11.662125340599456, |
|
"grad_norm": 4.34375, |
|
"learning_rate": 2.264038713457706e-08, |
|
"loss": 0.6059, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 11.698455949137148, |
|
"grad_norm": 3.9375, |
|
"learning_rate": 1.789153707806357e-08, |
|
"loss": 0.601, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 11.734786557674841, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 1.3700124257388092e-08, |
|
"loss": 0.5657, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 11.771117166212534, |
|
"grad_norm": 4.25, |
|
"learning_rate": 1.006661764057837e-08, |
|
"loss": 0.6255, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 11.807447774750226, |
|
"grad_norm": 7.65625, |
|
"learning_rate": 6.991423772753636e-09, |
|
"loss": 0.6209, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 11.84377838328792, |
|
"grad_norm": 7.90625, |
|
"learning_rate": 4.474886730641004e-09, |
|
"loss": 0.6111, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 11.880108991825614, |
|
"grad_norm": 8.5625, |
|
"learning_rate": 2.5172880840745873e-09, |
|
"loss": 0.6328, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 11.916439600363306, |
|
"grad_norm": 8.375, |
|
"learning_rate": 1.118846864490708e-09, |
|
"loss": 0.5511, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 11.952770208900999, |
|
"grad_norm": 8.4375, |
|
"learning_rate": 2.797195404247166e-10, |
|
"loss": 0.5795, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 11.989100817438691, |
|
"grad_norm": 2.59375, |
|
"learning_rate": 0.0, |
|
"loss": 0.5714, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 11.989100817438691, |
|
"step": 3300, |
|
"total_flos": 3.144153968877896e+17, |
|
"train_loss": 0.880180758562955, |
|
"train_runtime": 3585.8754, |
|
"train_samples_per_second": 7.369, |
|
"train_steps_per_second": 0.92 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3300, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 12, |
|
"save_steps": 0, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.144153968877896e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|