|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.995749704840614, |
|
"eval_steps": 500, |
|
"global_step": 6348, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.1830811500549316, |
|
"learning_rate": 1.1811023622047244e-06, |
|
"loss": 2.1629, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.7049249410629272, |
|
"learning_rate": 2.3622047244094487e-06, |
|
"loss": 2.1307, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.4969309568405151, |
|
"learning_rate": 3.543307086614173e-06, |
|
"loss": 2.112, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.9817995429039001, |
|
"learning_rate": 4.7244094488188975e-06, |
|
"loss": 1.9605, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.7148410677909851, |
|
"learning_rate": 5.905511811023622e-06, |
|
"loss": 1.8467, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.4169553518295288, |
|
"learning_rate": 7.086614173228346e-06, |
|
"loss": 1.6873, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.40290525555610657, |
|
"learning_rate": 8.267716535433071e-06, |
|
"loss": 1.5778, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.2909008860588074, |
|
"learning_rate": 9.448818897637795e-06, |
|
"loss": 1.528, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.2634272873401642, |
|
"learning_rate": 1.0629921259842519e-05, |
|
"loss": 1.432, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.2529142200946808, |
|
"learning_rate": 1.1811023622047245e-05, |
|
"loss": 1.392, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.2729162573814392, |
|
"learning_rate": 1.2992125984251968e-05, |
|
"loss": 1.3803, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.29093897342681885, |
|
"learning_rate": 1.4173228346456692e-05, |
|
"loss": 1.2938, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.3456454873085022, |
|
"learning_rate": 1.5354330708661416e-05, |
|
"loss": 1.2638, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.2981075346469879, |
|
"learning_rate": 1.6535433070866142e-05, |
|
"loss": 1.2191, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.26900631189346313, |
|
"learning_rate": 1.7716535433070864e-05, |
|
"loss": 1.1489, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.31342336535453796, |
|
"learning_rate": 1.889763779527559e-05, |
|
"loss": 1.1289, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.3491218388080597, |
|
"learning_rate": 2.0078740157480316e-05, |
|
"loss": 1.0612, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.36805489659309387, |
|
"learning_rate": 2.1259842519685038e-05, |
|
"loss": 1.1026, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.36568203568458557, |
|
"learning_rate": 2.2440944881889763e-05, |
|
"loss": 1.0621, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.41116490960121155, |
|
"learning_rate": 2.362204724409449e-05, |
|
"loss": 1.0184, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.4134870171546936, |
|
"learning_rate": 2.480314960629921e-05, |
|
"loss": 1.0015, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.4291512966156006, |
|
"learning_rate": 2.5984251968503937e-05, |
|
"loss": 0.9931, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.5563335418701172, |
|
"learning_rate": 2.716535433070866e-05, |
|
"loss": 0.9914, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.41773471236228943, |
|
"learning_rate": 2.8346456692913385e-05, |
|
"loss": 0.9395, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.4548351466655731, |
|
"learning_rate": 2.952755905511811e-05, |
|
"loss": 0.958, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.515271782875061, |
|
"learning_rate": 2.9921232277262383e-05, |
|
"loss": 0.9497, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.4861299693584442, |
|
"learning_rate": 2.978995273936636e-05, |
|
"loss": 0.915, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.5384882688522339, |
|
"learning_rate": 2.965867320147033e-05, |
|
"loss": 0.9498, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5463869571685791, |
|
"learning_rate": 2.9527393663574304e-05, |
|
"loss": 0.934, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.5710709691047668, |
|
"learning_rate": 2.939611412567828e-05, |
|
"loss": 0.9273, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.6447290778160095, |
|
"learning_rate": 2.9264834587782252e-05, |
|
"loss": 0.895, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.5392516851425171, |
|
"learning_rate": 2.9133555049886224e-05, |
|
"loss": 0.8863, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5963829159736633, |
|
"learning_rate": 2.90022755119902e-05, |
|
"loss": 0.8505, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.6194504499435425, |
|
"learning_rate": 2.8870995974094173e-05, |
|
"loss": 0.9034, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5820630192756653, |
|
"learning_rate": 2.873971643619815e-05, |
|
"loss": 0.9087, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.5965354442596436, |
|
"learning_rate": 2.8608436898302117e-05, |
|
"loss": 0.8652, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5949137210845947, |
|
"learning_rate": 2.847715736040609e-05, |
|
"loss": 0.8323, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.5632292032241821, |
|
"learning_rate": 2.8345877822510066e-05, |
|
"loss": 0.8636, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.600488543510437, |
|
"learning_rate": 2.8214598284614038e-05, |
|
"loss": 0.8467, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.6131569743156433, |
|
"learning_rate": 2.808331874671801e-05, |
|
"loss": 0.8394, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.6562418937683105, |
|
"learning_rate": 2.7952039208821986e-05, |
|
"loss": 0.8393, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.6331117153167725, |
|
"learning_rate": 2.782075967092596e-05, |
|
"loss": 0.8631, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.667453944683075, |
|
"learning_rate": 2.7689480133029935e-05, |
|
"loss": 0.8447, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.6880422830581665, |
|
"learning_rate": 2.7558200595133907e-05, |
|
"loss": 0.807, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.7089916467666626, |
|
"learning_rate": 2.742692105723788e-05, |
|
"loss": 0.8229, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.6905713081359863, |
|
"learning_rate": 2.7295641519341855e-05, |
|
"loss": 0.8455, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 0.6697952151298523, |
|
"learning_rate": 2.7164361981445824e-05, |
|
"loss": 0.8241, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 0.6053146123886108, |
|
"learning_rate": 2.7033082443549797e-05, |
|
"loss": 0.85, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 0.62926185131073, |
|
"learning_rate": 2.6901802905653773e-05, |
|
"loss": 0.8541, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.7045501470565796, |
|
"learning_rate": 2.6770523367757745e-05, |
|
"loss": 0.8678, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.7079373598098755, |
|
"learning_rate": 2.663924382986172e-05, |
|
"loss": 0.8119, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 0.6754956841468811, |
|
"learning_rate": 2.6507964291965693e-05, |
|
"loss": 0.8201, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.7645794749259949, |
|
"learning_rate": 2.6376684754069666e-05, |
|
"loss": 0.8103, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 0.7001616358757019, |
|
"learning_rate": 2.624540521617364e-05, |
|
"loss": 0.8116, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 0.7295788526535034, |
|
"learning_rate": 2.6114125678277614e-05, |
|
"loss": 0.8106, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.7286357879638672, |
|
"learning_rate": 2.5982846140381586e-05, |
|
"loss": 0.8154, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.6777210235595703, |
|
"learning_rate": 2.5851566602485562e-05, |
|
"loss": 0.8193, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 0.7970985174179077, |
|
"learning_rate": 2.5720287064589535e-05, |
|
"loss": 0.8118, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 0.7872875928878784, |
|
"learning_rate": 2.5589007526693504e-05, |
|
"loss": 0.829, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.6874305009841919, |
|
"learning_rate": 2.545772798879748e-05, |
|
"loss": 0.7935, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.8056828379631042, |
|
"learning_rate": 2.5326448450901452e-05, |
|
"loss": 0.8077, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.7361560463905334, |
|
"learning_rate": 2.5195168913005428e-05, |
|
"loss": 0.8179, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 0.7530879378318787, |
|
"learning_rate": 2.50638893751094e-05, |
|
"loss": 0.7939, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 0.7077944874763489, |
|
"learning_rate": 2.4932609837213373e-05, |
|
"loss": 0.8158, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.7000440955162048, |
|
"learning_rate": 2.480133029931735e-05, |
|
"loss": 0.8224, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 0.6929032206535339, |
|
"learning_rate": 2.467005076142132e-05, |
|
"loss": 0.8073, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 0.6915440559387207, |
|
"learning_rate": 2.4538771223525293e-05, |
|
"loss": 0.8055, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 0.79297935962677, |
|
"learning_rate": 2.440749168562927e-05, |
|
"loss": 0.7866, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.7547681927680969, |
|
"learning_rate": 2.427621214773324e-05, |
|
"loss": 0.8039, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.7506528496742249, |
|
"learning_rate": 2.4144932609837214e-05, |
|
"loss": 0.7823, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 0.7495055198669434, |
|
"learning_rate": 2.4013653071941186e-05, |
|
"loss": 0.7917, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 0.725407600402832, |
|
"learning_rate": 2.388237353404516e-05, |
|
"loss": 0.8195, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 0.7200865745544434, |
|
"learning_rate": 2.3751093996149135e-05, |
|
"loss": 0.7682, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.8560680747032166, |
|
"learning_rate": 2.3619814458253107e-05, |
|
"loss": 0.7862, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.7104244828224182, |
|
"learning_rate": 2.348853492035708e-05, |
|
"loss": 0.8058, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 0.7858747839927673, |
|
"learning_rate": 2.3357255382461055e-05, |
|
"loss": 0.7787, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.8110267519950867, |
|
"learning_rate": 2.3225975844565028e-05, |
|
"loss": 0.7896, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 0.7881801128387451, |
|
"learning_rate": 2.3094696306669004e-05, |
|
"loss": 0.7637, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.7833150625228882, |
|
"learning_rate": 2.2963416768772976e-05, |
|
"loss": 0.7755, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 0.747809886932373, |
|
"learning_rate": 2.283213723087695e-05, |
|
"loss": 0.7449, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.8134004473686218, |
|
"learning_rate": 2.270085769298092e-05, |
|
"loss": 0.7972, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 0.801780641078949, |
|
"learning_rate": 2.2569578155084893e-05, |
|
"loss": 0.7584, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"grad_norm": 0.7908413410186768, |
|
"learning_rate": 2.2438298617188866e-05, |
|
"loss": 0.7783, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 0.7693101167678833, |
|
"learning_rate": 2.230701907929284e-05, |
|
"loss": 0.785, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 0.7623382210731506, |
|
"learning_rate": 2.2175739541396814e-05, |
|
"loss": 0.8268, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.744848906993866, |
|
"learning_rate": 2.2044460003500787e-05, |
|
"loss": 0.8011, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 0.8488389849662781, |
|
"learning_rate": 2.1913180465604762e-05, |
|
"loss": 0.7635, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 0.6634330153465271, |
|
"learning_rate": 2.1781900927708735e-05, |
|
"loss": 0.7519, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.837360680103302, |
|
"learning_rate": 2.165062138981271e-05, |
|
"loss": 0.7826, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 1.0188002586364746, |
|
"learning_rate": 2.1519341851916683e-05, |
|
"loss": 0.7705, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.7619863152503967, |
|
"learning_rate": 2.1388062314020655e-05, |
|
"loss": 0.7823, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.781378448009491, |
|
"learning_rate": 2.1256782776124628e-05, |
|
"loss": 0.769, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"grad_norm": 0.7792461514472961, |
|
"learning_rate": 2.11255032382286e-05, |
|
"loss": 0.7791, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.8438695669174194, |
|
"learning_rate": 2.0994223700332573e-05, |
|
"loss": 0.7616, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.7547187805175781, |
|
"learning_rate": 2.086294416243655e-05, |
|
"loss": 0.766, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.7716125249862671, |
|
"learning_rate": 2.073166462454052e-05, |
|
"loss": 0.7596, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.8215446472167969, |
|
"learning_rate": 2.0600385086644497e-05, |
|
"loss": 0.7801, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.8171260952949524, |
|
"learning_rate": 2.046910554874847e-05, |
|
"loss": 0.757, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 0.7741256952285767, |
|
"learning_rate": 2.0337826010852442e-05, |
|
"loss": 0.773, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 0.9329236149787903, |
|
"learning_rate": 2.0206546472956418e-05, |
|
"loss": 0.7859, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 0.8117268681526184, |
|
"learning_rate": 2.007526693506039e-05, |
|
"loss": 0.7277, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.793161928653717, |
|
"learning_rate": 1.9943987397164362e-05, |
|
"loss": 0.7643, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.766437828540802, |
|
"learning_rate": 1.9812707859268338e-05, |
|
"loss": 0.79, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 0.8206325173377991, |
|
"learning_rate": 1.9681428321372307e-05, |
|
"loss": 0.7662, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.7363426685333252, |
|
"learning_rate": 1.9550148783476283e-05, |
|
"loss": 0.7887, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.7617402672767639, |
|
"learning_rate": 1.9418869245580256e-05, |
|
"loss": 0.771, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.7949870824813843, |
|
"learning_rate": 1.9287589707684228e-05, |
|
"loss": 0.7358, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.8213860392570496, |
|
"learning_rate": 1.9156310169788204e-05, |
|
"loss": 0.7434, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.8030453324317932, |
|
"learning_rate": 1.9025030631892176e-05, |
|
"loss": 0.7535, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.7638863921165466, |
|
"learning_rate": 1.889375109399615e-05, |
|
"loss": 0.7735, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.7628746628761292, |
|
"learning_rate": 1.8762471556100124e-05, |
|
"loss": 0.7749, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.7935550808906555, |
|
"learning_rate": 1.8631192018204097e-05, |
|
"loss": 0.7527, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.7617929577827454, |
|
"learning_rate": 1.8499912480308073e-05, |
|
"loss": 0.7672, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 0.7850877642631531, |
|
"learning_rate": 1.8368632942412045e-05, |
|
"loss": 0.7802, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.8149070739746094, |
|
"learning_rate": 1.8237353404516014e-05, |
|
"loss": 0.785, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.8640419840812683, |
|
"learning_rate": 1.810607386661999e-05, |
|
"loss": 0.796, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 0.8122150897979736, |
|
"learning_rate": 1.7974794328723962e-05, |
|
"loss": 0.764, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.7679978013038635, |
|
"learning_rate": 1.7843514790827935e-05, |
|
"loss": 0.7656, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.8210141658782959, |
|
"learning_rate": 1.771223525293191e-05, |
|
"loss": 0.7408, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 0.8836562633514404, |
|
"learning_rate": 1.7580955715035883e-05, |
|
"loss": 0.7599, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 0.8436664342880249, |
|
"learning_rate": 1.7449676177139856e-05, |
|
"loss": 0.7245, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.7050043940544128, |
|
"learning_rate": 1.731839663924383e-05, |
|
"loss": 0.7428, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.8637681603431702, |
|
"learning_rate": 1.7187117101347804e-05, |
|
"loss": 0.7336, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 0.8028432726860046, |
|
"learning_rate": 1.705583756345178e-05, |
|
"loss": 0.7803, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 0.8453486561775208, |
|
"learning_rate": 1.6924558025555752e-05, |
|
"loss": 0.8016, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 0.8883052468299866, |
|
"learning_rate": 1.679327848765972e-05, |
|
"loss": 0.7431, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.8956327438354492, |
|
"learning_rate": 1.6661998949763697e-05, |
|
"loss": 0.7743, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 0.7592474222183228, |
|
"learning_rate": 1.653071941186767e-05, |
|
"loss": 0.7287, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 0.9048654437065125, |
|
"learning_rate": 1.6399439873971642e-05, |
|
"loss": 0.7576, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 0.7991531491279602, |
|
"learning_rate": 1.6268160336075618e-05, |
|
"loss": 0.7564, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 0.814805269241333, |
|
"learning_rate": 1.613688079817959e-05, |
|
"loss": 0.7562, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 0.8084884285926819, |
|
"learning_rate": 1.6005601260283566e-05, |
|
"loss": 0.7654, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 0.8876549601554871, |
|
"learning_rate": 1.587432172238754e-05, |
|
"loss": 0.7366, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 0.8986101150512695, |
|
"learning_rate": 1.574304218449151e-05, |
|
"loss": 0.7451, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 0.826542854309082, |
|
"learning_rate": 1.5611762646595487e-05, |
|
"loss": 0.7708, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 0.8317553997039795, |
|
"learning_rate": 1.548048310869946e-05, |
|
"loss": 0.7273, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 0.820817232131958, |
|
"learning_rate": 1.5349203570803428e-05, |
|
"loss": 0.7797, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 0.8864796757698059, |
|
"learning_rate": 1.5217924032907404e-05, |
|
"loss": 0.7653, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 0.8678339123725891, |
|
"learning_rate": 1.5086644495011376e-05, |
|
"loss": 0.7488, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"grad_norm": 0.8052393198013306, |
|
"learning_rate": 1.495536495711535e-05, |
|
"loss": 0.6971, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 0.8921340703964233, |
|
"learning_rate": 1.4824085419219325e-05, |
|
"loss": 0.7846, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 0.823711097240448, |
|
"learning_rate": 1.4692805881323299e-05, |
|
"loss": 0.765, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 0.8947944641113281, |
|
"learning_rate": 1.4561526343427271e-05, |
|
"loss": 0.7597, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 0.9127305746078491, |
|
"learning_rate": 1.4430246805531245e-05, |
|
"loss": 0.7359, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 0.8424604535102844, |
|
"learning_rate": 1.429896726763522e-05, |
|
"loss": 0.7678, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 0.7486136555671692, |
|
"learning_rate": 1.4167687729739192e-05, |
|
"loss": 0.7801, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 0.9274526834487915, |
|
"learning_rate": 1.4036408191843164e-05, |
|
"loss": 0.7456, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 0.8695843815803528, |
|
"learning_rate": 1.3905128653947138e-05, |
|
"loss": 0.7412, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.881801426410675, |
|
"learning_rate": 1.3773849116051113e-05, |
|
"loss": 0.7626, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 0.9325851798057556, |
|
"learning_rate": 1.3642569578155085e-05, |
|
"loss": 0.7803, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"grad_norm": 0.8312095403671265, |
|
"learning_rate": 1.3511290040259059e-05, |
|
"loss": 0.7396, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 0.8334879279136658, |
|
"learning_rate": 1.3380010502363032e-05, |
|
"loss": 0.7402, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"grad_norm": 0.8369125723838806, |
|
"learning_rate": 1.3248730964467006e-05, |
|
"loss": 0.7308, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 0.7799876928329468, |
|
"learning_rate": 1.3117451426570978e-05, |
|
"loss": 0.7249, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 0.841027021408081, |
|
"learning_rate": 1.2986171888674952e-05, |
|
"loss": 0.7235, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 0.8014695644378662, |
|
"learning_rate": 1.2854892350778926e-05, |
|
"loss": 0.7234, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"grad_norm": 0.7508646249771118, |
|
"learning_rate": 1.2723612812882899e-05, |
|
"loss": 0.7556, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 0.7451643943786621, |
|
"learning_rate": 1.2592333274986871e-05, |
|
"loss": 0.7268, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 0.8782030940055847, |
|
"learning_rate": 1.2461053737090845e-05, |
|
"loss": 0.7357, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"grad_norm": 0.8773329257965088, |
|
"learning_rate": 1.232977419919482e-05, |
|
"loss": 0.7559, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 0.866611123085022, |
|
"learning_rate": 1.2198494661298794e-05, |
|
"loss": 0.7314, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 0.9285776615142822, |
|
"learning_rate": 1.2067215123402766e-05, |
|
"loss": 0.7459, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"grad_norm": 0.8246288299560547, |
|
"learning_rate": 1.1935935585506738e-05, |
|
"loss": 0.7536, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"grad_norm": 0.9669488668441772, |
|
"learning_rate": 1.1804656047610713e-05, |
|
"loss": 0.758, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 0.8978260159492493, |
|
"learning_rate": 1.1673376509714687e-05, |
|
"loss": 0.7262, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 0.8229988217353821, |
|
"learning_rate": 1.154209697181866e-05, |
|
"loss": 0.7331, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 0.8377344012260437, |
|
"learning_rate": 1.1410817433922633e-05, |
|
"loss": 0.7555, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"grad_norm": 0.7936707735061646, |
|
"learning_rate": 1.1279537896026606e-05, |
|
"loss": 0.7285, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"grad_norm": 0.9590585231781006, |
|
"learning_rate": 1.114825835813058e-05, |
|
"loss": 0.7239, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"grad_norm": 0.9101057648658752, |
|
"learning_rate": 1.1016978820234552e-05, |
|
"loss": 0.755, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"grad_norm": 0.8772357106208801, |
|
"learning_rate": 1.0885699282338526e-05, |
|
"loss": 0.7154, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"grad_norm": 0.8619498610496521, |
|
"learning_rate": 1.07544197444425e-05, |
|
"loss": 0.7231, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"grad_norm": 0.8514516353607178, |
|
"learning_rate": 1.0623140206546475e-05, |
|
"loss": 0.7426, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"grad_norm": 0.8754194974899292, |
|
"learning_rate": 1.0491860668650445e-05, |
|
"loss": 0.7552, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"grad_norm": 0.8537304997444153, |
|
"learning_rate": 1.036058113075442e-05, |
|
"loss": 0.7414, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 0.8096929788589478, |
|
"learning_rate": 1.0229301592858394e-05, |
|
"loss": 0.7555, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"grad_norm": 0.9001064300537109, |
|
"learning_rate": 1.0098022054962368e-05, |
|
"loss": 0.7485, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 0.9434337019920349, |
|
"learning_rate": 9.96674251706634e-06, |
|
"loss": 0.7263, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"grad_norm": 0.8765788078308105, |
|
"learning_rate": 9.835462979170314e-06, |
|
"loss": 0.7512, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"grad_norm": 0.7964215278625488, |
|
"learning_rate": 9.704183441274287e-06, |
|
"loss": 0.7276, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"grad_norm": 0.7790128588676453, |
|
"learning_rate": 9.57290390337826e-06, |
|
"loss": 0.7731, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"grad_norm": 0.8012252449989319, |
|
"learning_rate": 9.441624365482233e-06, |
|
"loss": 0.7123, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 0.9006531238555908, |
|
"learning_rate": 9.310344827586207e-06, |
|
"loss": 0.7267, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"grad_norm": 0.8349692821502686, |
|
"learning_rate": 9.179065289690182e-06, |
|
"loss": 0.7815, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"grad_norm": 0.8102928400039673, |
|
"learning_rate": 9.047785751794152e-06, |
|
"loss": 0.7022, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"grad_norm": 0.8270648717880249, |
|
"learning_rate": 8.916506213898126e-06, |
|
"loss": 0.7155, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"grad_norm": 0.901828944683075, |
|
"learning_rate": 8.7852266760021e-06, |
|
"loss": 0.7353, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 0.8448766469955444, |
|
"learning_rate": 8.653947138106075e-06, |
|
"loss": 0.7427, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 0.9027690291404724, |
|
"learning_rate": 8.522667600210047e-06, |
|
"loss": 0.7498, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"grad_norm": 0.8727796077728271, |
|
"learning_rate": 8.391388062314021e-06, |
|
"loss": 0.7186, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"grad_norm": 0.8412611484527588, |
|
"learning_rate": 8.260108524417994e-06, |
|
"loss": 0.7088, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"grad_norm": 0.822242259979248, |
|
"learning_rate": 8.128828986521968e-06, |
|
"loss": 0.731, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"grad_norm": 0.8580783009529114, |
|
"learning_rate": 7.99754944862594e-06, |
|
"loss": 0.76, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"grad_norm": 0.8335314393043518, |
|
"learning_rate": 7.866269910729914e-06, |
|
"loss": 0.7565, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 0.9883183240890503, |
|
"learning_rate": 7.734990372833889e-06, |
|
"loss": 0.73, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"grad_norm": 0.8578433990478516, |
|
"learning_rate": 7.603710834937862e-06, |
|
"loss": 0.7178, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"grad_norm": 0.8443204164505005, |
|
"learning_rate": 7.472431297041835e-06, |
|
"loss": 0.7331, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 0.8731228113174438, |
|
"learning_rate": 7.3411517591458075e-06, |
|
"loss": 0.7395, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"grad_norm": 0.8685081601142883, |
|
"learning_rate": 7.209872221249782e-06, |
|
"loss": 0.7429, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 0.849574625492096, |
|
"learning_rate": 7.078592683353755e-06, |
|
"loss": 0.7477, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"grad_norm": 0.902864396572113, |
|
"learning_rate": 6.947313145457728e-06, |
|
"loss": 0.7447, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"grad_norm": 0.8804737329483032, |
|
"learning_rate": 6.8160336075617015e-06, |
|
"loss": 0.7229, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"grad_norm": 0.9037989377975464, |
|
"learning_rate": 6.684754069665676e-06, |
|
"loss": 0.7626, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"grad_norm": 0.9347764849662781, |
|
"learning_rate": 6.553474531769648e-06, |
|
"loss": 0.7505, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 0.7910996675491333, |
|
"learning_rate": 6.422194993873622e-06, |
|
"loss": 0.7259, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"grad_norm": 0.912019670009613, |
|
"learning_rate": 6.290915455977595e-06, |
|
"loss": 0.7319, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"grad_norm": 0.9191232919692993, |
|
"learning_rate": 6.159635918081569e-06, |
|
"loss": 0.7356, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"grad_norm": 0.8757007122039795, |
|
"learning_rate": 6.028356380185542e-06, |
|
"loss": 0.732, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"grad_norm": 0.8221475481987, |
|
"learning_rate": 5.897076842289515e-06, |
|
"loss": 0.6988, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 0.8426682949066162, |
|
"learning_rate": 5.765797304393489e-06, |
|
"loss": 0.7385, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"grad_norm": 0.8677511215209961, |
|
"learning_rate": 5.634517766497462e-06, |
|
"loss": 0.7276, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"grad_norm": 0.8274013996124268, |
|
"learning_rate": 5.503238228601435e-06, |
|
"loss": 0.7247, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"grad_norm": 0.8982018828392029, |
|
"learning_rate": 5.3719586907054085e-06, |
|
"loss": 0.7201, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"grad_norm": 0.8524619340896606, |
|
"learning_rate": 5.240679152809383e-06, |
|
"loss": 0.7283, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"grad_norm": 0.873533308506012, |
|
"learning_rate": 5.109399614913355e-06, |
|
"loss": 0.7264, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"grad_norm": 0.8456885814666748, |
|
"learning_rate": 4.978120077017329e-06, |
|
"loss": 0.7224, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"grad_norm": 0.8955851793289185, |
|
"learning_rate": 4.8468405391213024e-06, |
|
"loss": 0.7344, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"grad_norm": 0.8928526639938354, |
|
"learning_rate": 4.715561001225276e-06, |
|
"loss": 0.7368, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"grad_norm": 0.8710963129997253, |
|
"learning_rate": 4.584281463329249e-06, |
|
"loss": 0.757, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"grad_norm": 0.9394299387931824, |
|
"learning_rate": 4.453001925433223e-06, |
|
"loss": 0.7283, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"grad_norm": 0.9365102052688599, |
|
"learning_rate": 4.3217223875371956e-06, |
|
"loss": 0.7294, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"grad_norm": 0.8301792740821838, |
|
"learning_rate": 4.19044284964117e-06, |
|
"loss": 0.7362, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"grad_norm": 0.8896806240081787, |
|
"learning_rate": 4.059163311745142e-06, |
|
"loss": 0.7358, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"grad_norm": 0.9053913354873657, |
|
"learning_rate": 3.927883773849116e-06, |
|
"loss": 0.755, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"grad_norm": 0.9083183407783508, |
|
"learning_rate": 3.79660423595309e-06, |
|
"loss": 0.7463, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"grad_norm": 0.9348452091217041, |
|
"learning_rate": 3.6653246980570632e-06, |
|
"loss": 0.727, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"grad_norm": 0.8955479264259338, |
|
"learning_rate": 3.534045160161036e-06, |
|
"loss": 0.7438, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"grad_norm": 0.9147844910621643, |
|
"learning_rate": 3.4027656222650094e-06, |
|
"loss": 0.7224, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"grad_norm": 0.9772645235061646, |
|
"learning_rate": 3.271486084368983e-06, |
|
"loss": 0.7523, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"grad_norm": 0.8953329920768738, |
|
"learning_rate": 3.1402065464729564e-06, |
|
"loss": 0.7247, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"grad_norm": 0.8330567479133606, |
|
"learning_rate": 3.0089270085769296e-06, |
|
"loss": 0.7132, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"grad_norm": 0.9282602667808533, |
|
"learning_rate": 2.8776474706809033e-06, |
|
"loss": 0.7399, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"grad_norm": 0.9033696055412292, |
|
"learning_rate": 2.7463679327848766e-06, |
|
"loss": 0.735, |
|
"step": 5825 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"grad_norm": 0.9616188406944275, |
|
"learning_rate": 2.61508839488885e-06, |
|
"loss": 0.7329, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"grad_norm": 0.863400936126709, |
|
"learning_rate": 2.483808856992823e-06, |
|
"loss": 0.7611, |
|
"step": 5875 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"grad_norm": 0.8783301711082458, |
|
"learning_rate": 2.352529319096797e-06, |
|
"loss": 0.6993, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"grad_norm": 0.8729863166809082, |
|
"learning_rate": 2.22124978120077e-06, |
|
"loss": 0.7058, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"grad_norm": 0.8404456377029419, |
|
"learning_rate": 2.095221424820585e-06, |
|
"loss": 0.7172, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"grad_norm": 0.8831392526626587, |
|
"learning_rate": 1.963941886924558e-06, |
|
"loss": 0.7216, |
|
"step": 5975 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"grad_norm": 0.8616348505020142, |
|
"learning_rate": 1.8326623490285316e-06, |
|
"loss": 0.7412, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"grad_norm": 0.9031267166137695, |
|
"learning_rate": 1.7013828111325047e-06, |
|
"loss": 0.7245, |
|
"step": 6025 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"grad_norm": 0.8461517691612244, |
|
"learning_rate": 1.5701032732364782e-06, |
|
"loss": 0.7148, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"grad_norm": 0.8163701891899109, |
|
"learning_rate": 1.4388237353404517e-06, |
|
"loss": 0.6964, |
|
"step": 6075 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"grad_norm": 0.8187835216522217, |
|
"learning_rate": 1.307544197444425e-06, |
|
"loss": 0.7143, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"grad_norm": 0.8065425157546997, |
|
"learning_rate": 1.1762646595483984e-06, |
|
"loss": 0.6912, |
|
"step": 6125 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"grad_norm": 0.9366108179092407, |
|
"learning_rate": 1.0449851216523717e-06, |
|
"loss": 0.7214, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"grad_norm": 0.8997251391410828, |
|
"learning_rate": 9.137055837563452e-07, |
|
"loss": 0.7478, |
|
"step": 6175 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"grad_norm": 0.8792877197265625, |
|
"learning_rate": 7.824260458603186e-07, |
|
"loss": 0.74, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"grad_norm": 0.8877577185630798, |
|
"learning_rate": 6.51146507964292e-07, |
|
"loss": 0.7186, |
|
"step": 6225 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"grad_norm": 0.8192344307899475, |
|
"learning_rate": 5.198669700682654e-07, |
|
"loss": 0.7578, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"grad_norm": 0.9072625041007996, |
|
"learning_rate": 3.8858743217223877e-07, |
|
"loss": 0.7066, |
|
"step": 6275 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"grad_norm": 0.831196665763855, |
|
"learning_rate": 2.5730789427621215e-07, |
|
"loss": 0.7467, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"grad_norm": 0.9208534359931946, |
|
"learning_rate": 1.2602835638018557e-07, |
|
"loss": 0.7377, |
|
"step": 6325 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 6348, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 6, |
|
"save_steps": 500, |
|
"total_flos": 1.1235523741876224e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|