Jamesb1974's picture
Upload folder using huggingface_hub
d334e67 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 6.0,
"eval_steps": 500,
"global_step": 6366,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.02,
"grad_norm": 1.4550137519836426,
"learning_rate": 1.1773940345368917e-06,
"loss": 2.7174,
"step": 25
},
{
"epoch": 0.05,
"grad_norm": 1.3239152431488037,
"learning_rate": 2.3547880690737835e-06,
"loss": 2.7301,
"step": 50
},
{
"epoch": 0.07,
"grad_norm": 1.2503877878189087,
"learning_rate": 3.532182103610675e-06,
"loss": 2.7177,
"step": 75
},
{
"epoch": 0.09,
"grad_norm": 1.4258066415786743,
"learning_rate": 4.709576138147567e-06,
"loss": 2.6627,
"step": 100
},
{
"epoch": 0.12,
"grad_norm": 1.5414007902145386,
"learning_rate": 5.886970172684459e-06,
"loss": 2.5528,
"step": 125
},
{
"epoch": 0.14,
"grad_norm": 1.1376500129699707,
"learning_rate": 7.06436420722135e-06,
"loss": 2.4215,
"step": 150
},
{
"epoch": 0.16,
"grad_norm": 0.903887152671814,
"learning_rate": 8.241758241758243e-06,
"loss": 2.2661,
"step": 175
},
{
"epoch": 0.19,
"grad_norm": 0.7810421586036682,
"learning_rate": 9.419152276295134e-06,
"loss": 2.1661,
"step": 200
},
{
"epoch": 0.21,
"grad_norm": 0.806953489780426,
"learning_rate": 1.054945054945055e-05,
"loss": 2.0261,
"step": 225
},
{
"epoch": 0.24,
"grad_norm": 0.6058614253997803,
"learning_rate": 1.1726844583987441e-05,
"loss": 1.9551,
"step": 250
},
{
"epoch": 0.26,
"grad_norm": 0.528014063835144,
"learning_rate": 1.2904238618524332e-05,
"loss": 1.8942,
"step": 275
},
{
"epoch": 0.28,
"grad_norm": 0.47531402111053467,
"learning_rate": 1.4081632653061225e-05,
"loss": 1.7891,
"step": 300
},
{
"epoch": 0.31,
"grad_norm": 0.47076013684272766,
"learning_rate": 1.5259026687598116e-05,
"loss": 1.7725,
"step": 325
},
{
"epoch": 0.33,
"grad_norm": 0.5390304327011108,
"learning_rate": 1.643642072213501e-05,
"loss": 1.7132,
"step": 350
},
{
"epoch": 0.35,
"grad_norm": 0.6033918857574463,
"learning_rate": 1.7613814756671898e-05,
"loss": 1.7432,
"step": 375
},
{
"epoch": 0.38,
"grad_norm": 0.6497958302497864,
"learning_rate": 1.8791208791208793e-05,
"loss": 1.7009,
"step": 400
},
{
"epoch": 0.4,
"grad_norm": 0.3979990780353546,
"learning_rate": 1.9968602825745684e-05,
"loss": 1.6574,
"step": 425
},
{
"epoch": 0.42,
"grad_norm": 0.39559292793273926,
"learning_rate": 2.1145996860282575e-05,
"loss": 1.596,
"step": 450
},
{
"epoch": 0.45,
"grad_norm": 0.4321981370449066,
"learning_rate": 2.2323390894819466e-05,
"loss": 1.5367,
"step": 475
},
{
"epoch": 0.47,
"grad_norm": 0.4313070476055145,
"learning_rate": 2.350078492935636e-05,
"loss": 1.5457,
"step": 500
},
{
"epoch": 0.49,
"grad_norm": 0.4958736002445221,
"learning_rate": 2.4678178963893248e-05,
"loss": 1.5258,
"step": 525
},
{
"epoch": 0.52,
"grad_norm": 0.5069326758384705,
"learning_rate": 2.5855572998430143e-05,
"loss": 1.5096,
"step": 550
},
{
"epoch": 0.54,
"grad_norm": 0.5262688994407654,
"learning_rate": 2.7032967032967034e-05,
"loss": 1.4645,
"step": 575
},
{
"epoch": 0.57,
"grad_norm": 0.6320958137512207,
"learning_rate": 2.821036106750393e-05,
"loss": 1.4081,
"step": 600
},
{
"epoch": 0.59,
"grad_norm": 0.44404885172843933,
"learning_rate": 2.9387755102040816e-05,
"loss": 1.4278,
"step": 625
},
{
"epoch": 0.61,
"grad_norm": 0.5611394047737122,
"learning_rate": 2.9937161808343517e-05,
"loss": 1.359,
"step": 650
},
{
"epoch": 0.64,
"grad_norm": 0.50496906042099,
"learning_rate": 2.980624890905917e-05,
"loss": 1.3366,
"step": 675
},
{
"epoch": 0.66,
"grad_norm": 0.49874988198280334,
"learning_rate": 2.967533600977483e-05,
"loss": 1.3175,
"step": 700
},
{
"epoch": 0.68,
"grad_norm": 0.4491003453731537,
"learning_rate": 2.954442311049049e-05,
"loss": 1.351,
"step": 725
},
{
"epoch": 0.71,
"grad_norm": 0.6342208981513977,
"learning_rate": 2.9413510211206147e-05,
"loss": 1.353,
"step": 750
},
{
"epoch": 0.73,
"grad_norm": 0.4711619019508362,
"learning_rate": 2.9282597311921802e-05,
"loss": 1.2784,
"step": 775
},
{
"epoch": 0.75,
"grad_norm": 0.533415675163269,
"learning_rate": 2.9151684412637457e-05,
"loss": 1.2759,
"step": 800
},
{
"epoch": 0.78,
"grad_norm": 0.6422008872032166,
"learning_rate": 2.9020771513353116e-05,
"loss": 1.2829,
"step": 825
},
{
"epoch": 0.8,
"grad_norm": 0.5320819616317749,
"learning_rate": 2.8889858614068774e-05,
"loss": 1.294,
"step": 850
},
{
"epoch": 0.82,
"grad_norm": 0.6057177782058716,
"learning_rate": 2.8758945714784433e-05,
"loss": 1.2728,
"step": 875
},
{
"epoch": 0.85,
"grad_norm": 0.47568175196647644,
"learning_rate": 2.8628032815500088e-05,
"loss": 1.2308,
"step": 900
},
{
"epoch": 0.87,
"grad_norm": 0.5981771349906921,
"learning_rate": 2.8497119916215743e-05,
"loss": 1.2759,
"step": 925
},
{
"epoch": 0.9,
"grad_norm": 0.5540325045585632,
"learning_rate": 2.83662070169314e-05,
"loss": 1.2731,
"step": 950
},
{
"epoch": 0.92,
"grad_norm": 0.4720264673233032,
"learning_rate": 2.823529411764706e-05,
"loss": 1.1777,
"step": 975
},
{
"epoch": 0.94,
"grad_norm": 0.5205626487731934,
"learning_rate": 2.8104381218362718e-05,
"loss": 1.2607,
"step": 1000
},
{
"epoch": 0.97,
"grad_norm": 0.6065472960472107,
"learning_rate": 2.7973468319078373e-05,
"loss": 1.2371,
"step": 1025
},
{
"epoch": 0.99,
"grad_norm": 0.5618084669113159,
"learning_rate": 2.7842555419794032e-05,
"loss": 1.2351,
"step": 1050
},
{
"epoch": 1.01,
"grad_norm": 0.49749478697776794,
"learning_rate": 2.7711642520509687e-05,
"loss": 1.229,
"step": 1075
},
{
"epoch": 1.04,
"grad_norm": 0.5713375210762024,
"learning_rate": 2.7580729621225345e-05,
"loss": 1.2189,
"step": 1100
},
{
"epoch": 1.06,
"grad_norm": 0.5449201464653015,
"learning_rate": 2.7449816721941004e-05,
"loss": 1.2217,
"step": 1125
},
{
"epoch": 1.08,
"grad_norm": 0.5718939304351807,
"learning_rate": 2.731890382265666e-05,
"loss": 1.2169,
"step": 1150
},
{
"epoch": 1.11,
"grad_norm": 0.5724524259567261,
"learning_rate": 2.7187990923372317e-05,
"loss": 1.1946,
"step": 1175
},
{
"epoch": 1.13,
"grad_norm": 0.4745756685733795,
"learning_rate": 2.7057078024087976e-05,
"loss": 1.1995,
"step": 1200
},
{
"epoch": 1.15,
"grad_norm": 0.679215669631958,
"learning_rate": 2.692616512480363e-05,
"loss": 1.1511,
"step": 1225
},
{
"epoch": 1.18,
"grad_norm": 0.5852200388908386,
"learning_rate": 2.679525222551929e-05,
"loss": 1.1293,
"step": 1250
},
{
"epoch": 1.2,
"grad_norm": 0.5490656495094299,
"learning_rate": 2.6664339326234944e-05,
"loss": 1.192,
"step": 1275
},
{
"epoch": 1.23,
"grad_norm": 0.7541109919548035,
"learning_rate": 2.6533426426950603e-05,
"loss": 1.2221,
"step": 1300
},
{
"epoch": 1.25,
"grad_norm": 0.7117947340011597,
"learning_rate": 2.640251352766626e-05,
"loss": 1.1575,
"step": 1325
},
{
"epoch": 1.27,
"grad_norm": 0.5711504817008972,
"learning_rate": 2.627160062838192e-05,
"loss": 1.1758,
"step": 1350
},
{
"epoch": 1.3,
"grad_norm": 0.6238616108894348,
"learning_rate": 2.614068772909757e-05,
"loss": 1.1511,
"step": 1375
},
{
"epoch": 1.32,
"grad_norm": 0.5755292773246765,
"learning_rate": 2.600977482981323e-05,
"loss": 1.1309,
"step": 1400
},
{
"epoch": 1.34,
"grad_norm": 0.5652589797973633,
"learning_rate": 2.587886193052889e-05,
"loss": 1.1294,
"step": 1425
},
{
"epoch": 1.37,
"grad_norm": 0.5230275988578796,
"learning_rate": 2.5747949031244547e-05,
"loss": 1.1318,
"step": 1450
},
{
"epoch": 1.39,
"grad_norm": 0.6454095840454102,
"learning_rate": 2.5617036131960205e-05,
"loss": 1.1668,
"step": 1475
},
{
"epoch": 1.41,
"grad_norm": 0.7409027218818665,
"learning_rate": 2.5486123232675857e-05,
"loss": 1.1187,
"step": 1500
},
{
"epoch": 1.44,
"grad_norm": 0.5434130430221558,
"learning_rate": 2.5355210333391516e-05,
"loss": 1.186,
"step": 1525
},
{
"epoch": 1.46,
"grad_norm": 0.5816170573234558,
"learning_rate": 2.5224297434107174e-05,
"loss": 1.126,
"step": 1550
},
{
"epoch": 1.48,
"grad_norm": 0.5217471122741699,
"learning_rate": 2.5093384534822833e-05,
"loss": 1.1651,
"step": 1575
},
{
"epoch": 1.51,
"grad_norm": 0.6262175440788269,
"learning_rate": 2.496247163553849e-05,
"loss": 1.1083,
"step": 1600
},
{
"epoch": 1.53,
"grad_norm": 0.7060381174087524,
"learning_rate": 2.4831558736254146e-05,
"loss": 1.1523,
"step": 1625
},
{
"epoch": 1.56,
"grad_norm": 0.627524197101593,
"learning_rate": 2.47006458369698e-05,
"loss": 1.1575,
"step": 1650
},
{
"epoch": 1.58,
"grad_norm": 0.6749070286750793,
"learning_rate": 2.456973293768546e-05,
"loss": 1.1535,
"step": 1675
},
{
"epoch": 1.6,
"grad_norm": 0.6341940760612488,
"learning_rate": 2.4438820038401118e-05,
"loss": 1.1009,
"step": 1700
},
{
"epoch": 1.63,
"grad_norm": 0.5389090776443481,
"learning_rate": 2.4307907139116777e-05,
"loss": 1.1221,
"step": 1725
},
{
"epoch": 1.65,
"grad_norm": 0.6468391418457031,
"learning_rate": 2.417699423983243e-05,
"loss": 1.1398,
"step": 1750
},
{
"epoch": 1.67,
"grad_norm": 0.5632995367050171,
"learning_rate": 2.404608134054809e-05,
"loss": 1.098,
"step": 1775
},
{
"epoch": 1.7,
"grad_norm": 0.5900394320487976,
"learning_rate": 2.3915168441263745e-05,
"loss": 1.1081,
"step": 1800
},
{
"epoch": 1.72,
"grad_norm": 0.5528013110160828,
"learning_rate": 2.3784255541979404e-05,
"loss": 1.1314,
"step": 1825
},
{
"epoch": 1.74,
"grad_norm": 0.5529065132141113,
"learning_rate": 2.3653342642695062e-05,
"loss": 1.151,
"step": 1850
},
{
"epoch": 1.77,
"grad_norm": 0.6382936835289001,
"learning_rate": 2.3522429743410717e-05,
"loss": 1.0977,
"step": 1875
},
{
"epoch": 1.79,
"grad_norm": 0.6123020648956299,
"learning_rate": 2.3391516844126376e-05,
"loss": 1.1206,
"step": 1900
},
{
"epoch": 1.81,
"grad_norm": 0.5873188376426697,
"learning_rate": 2.3260603944842034e-05,
"loss": 1.1202,
"step": 1925
},
{
"epoch": 1.84,
"grad_norm": 0.6366047859191895,
"learning_rate": 2.312969104555769e-05,
"loss": 1.1159,
"step": 1950
},
{
"epoch": 1.86,
"grad_norm": 0.6183773875236511,
"learning_rate": 2.2998778146273344e-05,
"loss": 1.1617,
"step": 1975
},
{
"epoch": 1.89,
"grad_norm": 0.6744990944862366,
"learning_rate": 2.2867865246989003e-05,
"loss": 1.1109,
"step": 2000
},
{
"epoch": 1.91,
"grad_norm": 0.6510053277015686,
"learning_rate": 2.273695234770466e-05,
"loss": 1.0807,
"step": 2025
},
{
"epoch": 1.93,
"grad_norm": 0.637360692024231,
"learning_rate": 2.260603944842032e-05,
"loss": 1.122,
"step": 2050
},
{
"epoch": 1.96,
"grad_norm": 0.65646892786026,
"learning_rate": 2.2475126549135978e-05,
"loss": 1.1264,
"step": 2075
},
{
"epoch": 1.98,
"grad_norm": 0.5863879919052124,
"learning_rate": 2.234421364985163e-05,
"loss": 1.0748,
"step": 2100
},
{
"epoch": 2.0,
"grad_norm": 0.6076034903526306,
"learning_rate": 2.221330075056729e-05,
"loss": 1.0891,
"step": 2125
},
{
"epoch": 2.03,
"grad_norm": 0.6606038808822632,
"learning_rate": 2.2082387851282947e-05,
"loss": 1.1157,
"step": 2150
},
{
"epoch": 2.05,
"grad_norm": 0.7062047719955444,
"learning_rate": 2.1951474951998605e-05,
"loss": 1.0886,
"step": 2175
},
{
"epoch": 2.07,
"grad_norm": 0.5990228056907654,
"learning_rate": 2.1820562052714264e-05,
"loss": 1.1008,
"step": 2200
},
{
"epoch": 2.1,
"grad_norm": 0.6606985926628113,
"learning_rate": 2.168964915342992e-05,
"loss": 1.0696,
"step": 2225
},
{
"epoch": 2.12,
"grad_norm": 0.6489419937133789,
"learning_rate": 2.1558736254145574e-05,
"loss": 1.1455,
"step": 2250
},
{
"epoch": 2.14,
"grad_norm": 0.6855771541595459,
"learning_rate": 2.1427823354861232e-05,
"loss": 1.099,
"step": 2275
},
{
"epoch": 2.17,
"grad_norm": 0.620990514755249,
"learning_rate": 2.129691045557689e-05,
"loss": 1.0636,
"step": 2300
},
{
"epoch": 2.19,
"grad_norm": 0.6790271401405334,
"learning_rate": 2.116599755629255e-05,
"loss": 1.1138,
"step": 2325
},
{
"epoch": 2.21,
"grad_norm": 0.7244608998298645,
"learning_rate": 2.1035084657008204e-05,
"loss": 1.1204,
"step": 2350
},
{
"epoch": 2.24,
"grad_norm": 0.627005934715271,
"learning_rate": 2.090417175772386e-05,
"loss": 1.0882,
"step": 2375
},
{
"epoch": 2.26,
"grad_norm": 0.6555240750312805,
"learning_rate": 2.0773258858439518e-05,
"loss": 1.1137,
"step": 2400
},
{
"epoch": 2.29,
"grad_norm": 0.5681769847869873,
"learning_rate": 2.0642345959155177e-05,
"loss": 1.0877,
"step": 2425
},
{
"epoch": 2.31,
"grad_norm": 0.7020453810691833,
"learning_rate": 2.051143305987083e-05,
"loss": 1.0683,
"step": 2450
},
{
"epoch": 2.33,
"grad_norm": 0.569598376750946,
"learning_rate": 2.038052016058649e-05,
"loss": 1.0832,
"step": 2475
},
{
"epoch": 2.36,
"grad_norm": 0.6652840971946716,
"learning_rate": 2.024960726130215e-05,
"loss": 1.0934,
"step": 2500
},
{
"epoch": 2.38,
"grad_norm": 0.5610460042953491,
"learning_rate": 2.0118694362017804e-05,
"loss": 1.0266,
"step": 2525
},
{
"epoch": 2.4,
"grad_norm": 0.8069761395454407,
"learning_rate": 1.9987781462733462e-05,
"loss": 1.1016,
"step": 2550
},
{
"epoch": 2.43,
"grad_norm": 0.5885408520698547,
"learning_rate": 1.9856868563449117e-05,
"loss": 1.0981,
"step": 2575
},
{
"epoch": 2.45,
"grad_norm": 0.7160201072692871,
"learning_rate": 1.9725955664164776e-05,
"loss": 1.064,
"step": 2600
},
{
"epoch": 2.47,
"grad_norm": 0.6466575264930725,
"learning_rate": 1.9595042764880434e-05,
"loss": 1.103,
"step": 2625
},
{
"epoch": 2.5,
"grad_norm": 0.826030969619751,
"learning_rate": 1.9464129865596093e-05,
"loss": 1.1049,
"step": 2650
},
{
"epoch": 2.52,
"grad_norm": 0.6309208273887634,
"learning_rate": 1.9333216966311748e-05,
"loss": 1.1226,
"step": 2675
},
{
"epoch": 2.54,
"grad_norm": 0.7945444583892822,
"learning_rate": 1.9202304067027403e-05,
"loss": 1.0658,
"step": 2700
},
{
"epoch": 2.57,
"grad_norm": 0.5897024869918823,
"learning_rate": 1.907139116774306e-05,
"loss": 1.1115,
"step": 2725
},
{
"epoch": 2.59,
"grad_norm": 0.6963732242584229,
"learning_rate": 1.894047826845872e-05,
"loss": 1.0736,
"step": 2750
},
{
"epoch": 2.62,
"grad_norm": 0.6119255423545837,
"learning_rate": 1.8809565369174378e-05,
"loss": 1.0645,
"step": 2775
},
{
"epoch": 2.64,
"grad_norm": 0.674546480178833,
"learning_rate": 1.8678652469890037e-05,
"loss": 1.087,
"step": 2800
},
{
"epoch": 2.66,
"grad_norm": 0.6722444891929626,
"learning_rate": 1.854773957060569e-05,
"loss": 1.0613,
"step": 2825
},
{
"epoch": 2.69,
"grad_norm": 0.6713480949401855,
"learning_rate": 1.8416826671321347e-05,
"loss": 1.0695,
"step": 2850
},
{
"epoch": 2.71,
"grad_norm": 0.7733056545257568,
"learning_rate": 1.8285913772037005e-05,
"loss": 1.0502,
"step": 2875
},
{
"epoch": 2.73,
"grad_norm": 0.6311067938804626,
"learning_rate": 1.8155000872752664e-05,
"loss": 1.056,
"step": 2900
},
{
"epoch": 2.76,
"grad_norm": 0.6068445444107056,
"learning_rate": 1.8024087973468322e-05,
"loss": 1.0751,
"step": 2925
},
{
"epoch": 2.78,
"grad_norm": 0.6926267743110657,
"learning_rate": 1.7893175074183977e-05,
"loss": 1.0392,
"step": 2950
},
{
"epoch": 2.8,
"grad_norm": 0.6936379075050354,
"learning_rate": 1.7762262174899632e-05,
"loss": 1.0896,
"step": 2975
},
{
"epoch": 2.83,
"grad_norm": 0.6111378073692322,
"learning_rate": 1.763134927561529e-05,
"loss": 1.068,
"step": 3000
},
{
"epoch": 2.85,
"grad_norm": 0.5566198229789734,
"learning_rate": 1.750043637633095e-05,
"loss": 1.094,
"step": 3025
},
{
"epoch": 2.87,
"grad_norm": 0.6551353335380554,
"learning_rate": 1.7369523477046604e-05,
"loss": 1.0362,
"step": 3050
},
{
"epoch": 2.9,
"grad_norm": 0.702276349067688,
"learning_rate": 1.7238610577762263e-05,
"loss": 1.0609,
"step": 3075
},
{
"epoch": 2.92,
"grad_norm": 0.5946235060691833,
"learning_rate": 1.7107697678477918e-05,
"loss": 1.0816,
"step": 3100
},
{
"epoch": 2.95,
"grad_norm": 0.5896125435829163,
"learning_rate": 1.6976784779193576e-05,
"loss": 1.0511,
"step": 3125
},
{
"epoch": 2.97,
"grad_norm": 0.7790045738220215,
"learning_rate": 1.6845871879909235e-05,
"loss": 1.0315,
"step": 3150
},
{
"epoch": 2.99,
"grad_norm": 0.6814244985580444,
"learning_rate": 1.671495898062489e-05,
"loss": 1.0399,
"step": 3175
},
{
"epoch": 3.02,
"grad_norm": 0.6677020788192749,
"learning_rate": 1.658404608134055e-05,
"loss": 1.0403,
"step": 3200
},
{
"epoch": 3.04,
"grad_norm": 0.5795513987541199,
"learning_rate": 1.6453133182056207e-05,
"loss": 1.076,
"step": 3225
},
{
"epoch": 3.06,
"grad_norm": 0.8617599606513977,
"learning_rate": 1.6322220282771862e-05,
"loss": 1.0515,
"step": 3250
},
{
"epoch": 3.09,
"grad_norm": 0.6513819098472595,
"learning_rate": 1.619130738348752e-05,
"loss": 1.0493,
"step": 3275
},
{
"epoch": 3.11,
"grad_norm": 0.7479323148727417,
"learning_rate": 1.6060394484203176e-05,
"loss": 1.0876,
"step": 3300
},
{
"epoch": 3.13,
"grad_norm": 0.718534529209137,
"learning_rate": 1.5929481584918834e-05,
"loss": 1.029,
"step": 3325
},
{
"epoch": 3.16,
"grad_norm": 0.6990604400634766,
"learning_rate": 1.5798568685634493e-05,
"loss": 1.015,
"step": 3350
},
{
"epoch": 3.18,
"grad_norm": 0.6723743677139282,
"learning_rate": 1.566765578635015e-05,
"loss": 1.0693,
"step": 3375
},
{
"epoch": 3.2,
"grad_norm": 0.7294295430183411,
"learning_rate": 1.5536742887065806e-05,
"loss": 1.0229,
"step": 3400
},
{
"epoch": 3.23,
"grad_norm": 0.7458708882331848,
"learning_rate": 1.540582998778146e-05,
"loss": 1.0861,
"step": 3425
},
{
"epoch": 3.25,
"grad_norm": 0.702514111995697,
"learning_rate": 1.527491708849712e-05,
"loss": 1.0729,
"step": 3450
},
{
"epoch": 3.28,
"grad_norm": 0.6749513745307922,
"learning_rate": 1.5144004189212778e-05,
"loss": 1.0461,
"step": 3475
},
{
"epoch": 3.3,
"grad_norm": 0.6903010010719299,
"learning_rate": 1.5013091289928435e-05,
"loss": 1.0243,
"step": 3500
},
{
"epoch": 3.32,
"grad_norm": 0.6058205366134644,
"learning_rate": 1.4882178390644092e-05,
"loss": 1.058,
"step": 3525
},
{
"epoch": 3.35,
"grad_norm": 0.7527756690979004,
"learning_rate": 1.475126549135975e-05,
"loss": 1.0847,
"step": 3550
},
{
"epoch": 3.37,
"grad_norm": 0.7360561490058899,
"learning_rate": 1.4620352592075405e-05,
"loss": 1.05,
"step": 3575
},
{
"epoch": 3.39,
"grad_norm": 0.616054356098175,
"learning_rate": 1.4489439692791064e-05,
"loss": 1.0508,
"step": 3600
},
{
"epoch": 3.42,
"grad_norm": 0.6187878847122192,
"learning_rate": 1.435852679350672e-05,
"loss": 1.0636,
"step": 3625
},
{
"epoch": 3.44,
"grad_norm": 0.7025036215782166,
"learning_rate": 1.4227613894222377e-05,
"loss": 1.061,
"step": 3650
},
{
"epoch": 3.46,
"grad_norm": 0.6583489179611206,
"learning_rate": 1.4096700994938036e-05,
"loss": 1.0788,
"step": 3675
},
{
"epoch": 3.49,
"grad_norm": 0.6254659295082092,
"learning_rate": 1.3965788095653692e-05,
"loss": 1.0755,
"step": 3700
},
{
"epoch": 3.51,
"grad_norm": 0.6398507356643677,
"learning_rate": 1.383487519636935e-05,
"loss": 1.0371,
"step": 3725
},
{
"epoch": 3.53,
"grad_norm": 0.684704065322876,
"learning_rate": 1.3703962297085006e-05,
"loss": 1.0261,
"step": 3750
},
{
"epoch": 3.56,
"grad_norm": 0.6592278480529785,
"learning_rate": 1.3573049397800665e-05,
"loss": 1.0256,
"step": 3775
},
{
"epoch": 3.58,
"grad_norm": 0.7980201244354248,
"learning_rate": 1.344213649851632e-05,
"loss": 1.0478,
"step": 3800
},
{
"epoch": 3.61,
"grad_norm": 0.6509789228439331,
"learning_rate": 1.3311223599231978e-05,
"loss": 1.0565,
"step": 3825
},
{
"epoch": 3.63,
"grad_norm": 0.6620888113975525,
"learning_rate": 1.3180310699947637e-05,
"loss": 1.0396,
"step": 3850
},
{
"epoch": 3.65,
"grad_norm": 0.9185547828674316,
"learning_rate": 1.3049397800663292e-05,
"loss": 1.0543,
"step": 3875
},
{
"epoch": 3.68,
"grad_norm": 0.6710821986198425,
"learning_rate": 1.291848490137895e-05,
"loss": 1.0762,
"step": 3900
},
{
"epoch": 3.7,
"grad_norm": 0.7180879712104797,
"learning_rate": 1.2787572002094607e-05,
"loss": 1.0178,
"step": 3925
},
{
"epoch": 3.72,
"grad_norm": 0.7070004343986511,
"learning_rate": 1.2656659102810264e-05,
"loss": 1.111,
"step": 3950
},
{
"epoch": 3.75,
"grad_norm": 0.6417202353477478,
"learning_rate": 1.2525746203525922e-05,
"loss": 1.0187,
"step": 3975
},
{
"epoch": 3.77,
"grad_norm": 0.6464525461196899,
"learning_rate": 1.2394833304241579e-05,
"loss": 1.0538,
"step": 4000
},
{
"epoch": 3.79,
"grad_norm": 0.7412322759628296,
"learning_rate": 1.2263920404957236e-05,
"loss": 0.9896,
"step": 4025
},
{
"epoch": 3.82,
"grad_norm": 0.6961266398429871,
"learning_rate": 1.2133007505672892e-05,
"loss": 1.038,
"step": 4050
},
{
"epoch": 3.84,
"grad_norm": 0.6745458245277405,
"learning_rate": 1.2002094606388551e-05,
"loss": 1.0348,
"step": 4075
},
{
"epoch": 3.86,
"grad_norm": 0.7007986307144165,
"learning_rate": 1.1871181707104206e-05,
"loss": 1.056,
"step": 4100
},
{
"epoch": 3.89,
"grad_norm": 0.6684093475341797,
"learning_rate": 1.1740268807819864e-05,
"loss": 1.0506,
"step": 4125
},
{
"epoch": 3.91,
"grad_norm": 0.6373914480209351,
"learning_rate": 1.1609355908535521e-05,
"loss": 1.0317,
"step": 4150
},
{
"epoch": 3.93,
"grad_norm": 0.6588457822799683,
"learning_rate": 1.1478443009251178e-05,
"loss": 1.0408,
"step": 4175
},
{
"epoch": 3.96,
"grad_norm": 0.6651216745376587,
"learning_rate": 1.1347530109966836e-05,
"loss": 1.0409,
"step": 4200
},
{
"epoch": 3.98,
"grad_norm": 0.6777089834213257,
"learning_rate": 1.1216617210682492e-05,
"loss": 1.0415,
"step": 4225
},
{
"epoch": 4.01,
"grad_norm": 0.5962642431259155,
"learning_rate": 1.108570431139815e-05,
"loss": 1.0276,
"step": 4250
},
{
"epoch": 4.03,
"grad_norm": 0.7103810906410217,
"learning_rate": 1.0954791412113807e-05,
"loss": 1.0398,
"step": 4275
},
{
"epoch": 4.05,
"grad_norm": 0.6657726764678955,
"learning_rate": 1.0823878512829464e-05,
"loss": 1.0295,
"step": 4300
},
{
"epoch": 4.08,
"grad_norm": 0.6180024147033691,
"learning_rate": 1.0692965613545122e-05,
"loss": 1.0673,
"step": 4325
},
{
"epoch": 4.1,
"grad_norm": 0.6927649974822998,
"learning_rate": 1.0562052714260779e-05,
"loss": 1.006,
"step": 4350
},
{
"epoch": 4.12,
"grad_norm": 0.6404564380645752,
"learning_rate": 1.0431139814976436e-05,
"loss": 1.0415,
"step": 4375
},
{
"epoch": 4.15,
"grad_norm": 0.7053262591362,
"learning_rate": 1.0300226915692092e-05,
"loss": 0.9987,
"step": 4400
},
{
"epoch": 4.17,
"grad_norm": 0.6685893535614014,
"learning_rate": 1.0169314016407751e-05,
"loss": 1.0407,
"step": 4425
},
{
"epoch": 4.19,
"grad_norm": 0.7120438814163208,
"learning_rate": 1.0038401117123408e-05,
"loss": 1.1023,
"step": 4450
},
{
"epoch": 4.22,
"grad_norm": 0.765691876411438,
"learning_rate": 9.907488217839064e-06,
"loss": 1.061,
"step": 4475
},
{
"epoch": 4.24,
"grad_norm": 0.7191650867462158,
"learning_rate": 9.776575318554723e-06,
"loss": 1.0052,
"step": 4500
},
{
"epoch": 4.26,
"grad_norm": 0.6924612522125244,
"learning_rate": 9.645662419270378e-06,
"loss": 1.028,
"step": 4525
},
{
"epoch": 4.29,
"grad_norm": 0.7597302198410034,
"learning_rate": 9.514749519986036e-06,
"loss": 1.0309,
"step": 4550
},
{
"epoch": 4.31,
"grad_norm": 0.7024239301681519,
"learning_rate": 9.383836620701693e-06,
"loss": 1.0261,
"step": 4575
},
{
"epoch": 4.34,
"grad_norm": 0.7462353706359863,
"learning_rate": 9.25292372141735e-06,
"loss": 1.0323,
"step": 4600
},
{
"epoch": 4.36,
"grad_norm": 0.6952032446861267,
"learning_rate": 9.122010822133008e-06,
"loss": 1.044,
"step": 4625
},
{
"epoch": 4.38,
"grad_norm": 0.7031837701797485,
"learning_rate": 8.991097922848665e-06,
"loss": 0.9975,
"step": 4650
},
{
"epoch": 4.41,
"grad_norm": 0.6907106041908264,
"learning_rate": 8.860185023564322e-06,
"loss": 1.0763,
"step": 4675
},
{
"epoch": 4.43,
"grad_norm": 0.6010780334472656,
"learning_rate": 8.729272124279979e-06,
"loss": 0.9934,
"step": 4700
},
{
"epoch": 4.45,
"grad_norm": 0.6455862522125244,
"learning_rate": 8.598359224995637e-06,
"loss": 1.051,
"step": 4725
},
{
"epoch": 4.48,
"grad_norm": 0.6011804342269897,
"learning_rate": 8.467446325711294e-06,
"loss": 1.0386,
"step": 4750
},
{
"epoch": 4.5,
"grad_norm": 0.7045350074768066,
"learning_rate": 8.33653342642695e-06,
"loss": 1.051,
"step": 4775
},
{
"epoch": 4.52,
"grad_norm": 0.6799806356430054,
"learning_rate": 8.20562052714261e-06,
"loss": 1.0509,
"step": 4800
},
{
"epoch": 4.55,
"grad_norm": 0.6759226322174072,
"learning_rate": 8.074707627858264e-06,
"loss": 0.983,
"step": 4825
},
{
"epoch": 4.57,
"grad_norm": 0.6579796671867371,
"learning_rate": 7.943794728573923e-06,
"loss": 1.0901,
"step": 4850
},
{
"epoch": 4.59,
"grad_norm": 0.6630005240440369,
"learning_rate": 7.812881829289578e-06,
"loss": 1.0501,
"step": 4875
},
{
"epoch": 4.62,
"grad_norm": 0.693554699420929,
"learning_rate": 7.681968930005236e-06,
"loss": 1.0399,
"step": 4900
},
{
"epoch": 4.64,
"grad_norm": 0.6797517538070679,
"learning_rate": 7.551056030720894e-06,
"loss": 1.062,
"step": 4925
},
{
"epoch": 4.67,
"grad_norm": 0.671291172504425,
"learning_rate": 7.420143131436551e-06,
"loss": 1.0045,
"step": 4950
},
{
"epoch": 4.69,
"grad_norm": 0.7400732040405273,
"learning_rate": 7.2892302321522084e-06,
"loss": 1.041,
"step": 4975
},
{
"epoch": 4.71,
"grad_norm": 0.5939465165138245,
"learning_rate": 7.158317332867866e-06,
"loss": 1.0551,
"step": 5000
},
{
"epoch": 4.74,
"grad_norm": 0.6042287349700928,
"learning_rate": 7.027404433583523e-06,
"loss": 1.0298,
"step": 5025
},
{
"epoch": 4.76,
"grad_norm": 0.6053622961044312,
"learning_rate": 6.89649153429918e-06,
"loss": 1.0218,
"step": 5050
},
{
"epoch": 4.78,
"grad_norm": 0.6911068558692932,
"learning_rate": 6.77081515098621e-06,
"loss": 1.0442,
"step": 5075
},
{
"epoch": 4.81,
"grad_norm": 0.6786155104637146,
"learning_rate": 6.639902251701868e-06,
"loss": 1.0321,
"step": 5100
},
{
"epoch": 4.83,
"grad_norm": 0.6624850034713745,
"learning_rate": 6.508989352417526e-06,
"loss": 1.0696,
"step": 5125
},
{
"epoch": 4.85,
"grad_norm": 0.850053071975708,
"learning_rate": 6.378076453133182e-06,
"loss": 0.9769,
"step": 5150
},
{
"epoch": 4.88,
"grad_norm": 0.8631001710891724,
"learning_rate": 6.247163553848839e-06,
"loss": 0.9791,
"step": 5175
},
{
"epoch": 4.9,
"grad_norm": 0.6453446745872498,
"learning_rate": 6.116250654564497e-06,
"loss": 1.035,
"step": 5200
},
{
"epoch": 4.92,
"grad_norm": 0.6186447739601135,
"learning_rate": 5.9853377552801536e-06,
"loss": 1.0209,
"step": 5225
},
{
"epoch": 4.95,
"grad_norm": 0.6838425397872925,
"learning_rate": 5.854424855995811e-06,
"loss": 0.9938,
"step": 5250
},
{
"epoch": 4.97,
"grad_norm": 0.6573156118392944,
"learning_rate": 5.723511956711468e-06,
"loss": 1.0532,
"step": 5275
},
{
"epoch": 5.0,
"grad_norm": 0.6593102216720581,
"learning_rate": 5.592599057427126e-06,
"loss": 0.9892,
"step": 5300
},
{
"epoch": 5.02,
"grad_norm": 0.6164034605026245,
"learning_rate": 5.461686158142782e-06,
"loss": 1.0142,
"step": 5325
},
{
"epoch": 5.04,
"grad_norm": 0.5685546398162842,
"learning_rate": 5.330773258858439e-06,
"loss": 1.0124,
"step": 5350
},
{
"epoch": 5.07,
"grad_norm": 0.718358039855957,
"learning_rate": 5.199860359574097e-06,
"loss": 1.036,
"step": 5375
},
{
"epoch": 5.09,
"grad_norm": 0.7039633989334106,
"learning_rate": 5.068947460289754e-06,
"loss": 0.9901,
"step": 5400
},
{
"epoch": 5.11,
"grad_norm": 0.7492026090621948,
"learning_rate": 4.938034561005411e-06,
"loss": 1.0949,
"step": 5425
},
{
"epoch": 5.14,
"grad_norm": 0.7225030064582825,
"learning_rate": 4.807121661721069e-06,
"loss": 0.9975,
"step": 5450
},
{
"epoch": 5.16,
"grad_norm": 0.7279338240623474,
"learning_rate": 4.6762087624367256e-06,
"loss": 1.0505,
"step": 5475
},
{
"epoch": 5.18,
"grad_norm": 0.5847637057304382,
"learning_rate": 4.545295863152382e-06,
"loss": 1.0111,
"step": 5500
},
{
"epoch": 5.21,
"grad_norm": 0.647482693195343,
"learning_rate": 4.41438296386804e-06,
"loss": 1.0477,
"step": 5525
},
{
"epoch": 5.23,
"grad_norm": 0.6671623587608337,
"learning_rate": 4.283470064583698e-06,
"loss": 1.0096,
"step": 5550
},
{
"epoch": 5.25,
"grad_norm": 0.7235545516014099,
"learning_rate": 4.152557165299354e-06,
"loss": 1.0466,
"step": 5575
},
{
"epoch": 5.28,
"grad_norm": 0.7705885767936707,
"learning_rate": 4.021644266015012e-06,
"loss": 1.005,
"step": 5600
},
{
"epoch": 5.3,
"grad_norm": 0.6789776682853699,
"learning_rate": 3.890731366730669e-06,
"loss": 1.0132,
"step": 5625
},
{
"epoch": 5.33,
"grad_norm": 0.6266222596168518,
"learning_rate": 3.7598184674463255e-06,
"loss": 1.053,
"step": 5650
},
{
"epoch": 5.35,
"grad_norm": 0.6509959697723389,
"learning_rate": 3.628905568161983e-06,
"loss": 1.0416,
"step": 5675
},
{
"epoch": 5.37,
"grad_norm": 0.6754451394081116,
"learning_rate": 3.4979926688776404e-06,
"loss": 1.0234,
"step": 5700
},
{
"epoch": 5.4,
"grad_norm": 0.6019960045814514,
"learning_rate": 3.367079769593297e-06,
"loss": 1.024,
"step": 5725
},
{
"epoch": 5.42,
"grad_norm": 0.6941302418708801,
"learning_rate": 3.2361668703089548e-06,
"loss": 1.0429,
"step": 5750
},
{
"epoch": 5.44,
"grad_norm": 0.7364933490753174,
"learning_rate": 3.1052539710246115e-06,
"loss": 0.9956,
"step": 5775
},
{
"epoch": 5.47,
"grad_norm": 0.7257634401321411,
"learning_rate": 2.9743410717402687e-06,
"loss": 1.0185,
"step": 5800
},
{
"epoch": 5.49,
"grad_norm": 0.7992274761199951,
"learning_rate": 2.8434281724559264e-06,
"loss": 1.0608,
"step": 5825
},
{
"epoch": 5.51,
"grad_norm": 0.6821667551994324,
"learning_rate": 2.7177517891429567e-06,
"loss": 0.98,
"step": 5850
},
{
"epoch": 5.54,
"grad_norm": 0.8012374043464661,
"learning_rate": 2.5868388898586143e-06,
"loss": 1.0294,
"step": 5875
},
{
"epoch": 5.56,
"grad_norm": 0.6576132774353027,
"learning_rate": 2.4559259905742715e-06,
"loss": 1.0476,
"step": 5900
},
{
"epoch": 5.58,
"grad_norm": 0.6078372597694397,
"learning_rate": 2.3250130912899283e-06,
"loss": 1.0589,
"step": 5925
},
{
"epoch": 5.61,
"grad_norm": 0.7334024906158447,
"learning_rate": 2.194100192005586e-06,
"loss": 1.0186,
"step": 5950
},
{
"epoch": 5.63,
"grad_norm": 0.6493911743164062,
"learning_rate": 2.0631872927212427e-06,
"loss": 1.0143,
"step": 5975
},
{
"epoch": 5.66,
"grad_norm": 0.8003960251808167,
"learning_rate": 1.9322743934369e-06,
"loss": 1.0243,
"step": 6000
},
{
"epoch": 5.68,
"grad_norm": 0.7148457169532776,
"learning_rate": 1.801361494152557e-06,
"loss": 1.0195,
"step": 6025
},
{
"epoch": 5.7,
"grad_norm": 0.6924374103546143,
"learning_rate": 1.6704485948682145e-06,
"loss": 1.0034,
"step": 6050
},
{
"epoch": 5.73,
"grad_norm": 0.7333653569221497,
"learning_rate": 1.5395356955838715e-06,
"loss": 1.0448,
"step": 6075
},
{
"epoch": 5.75,
"grad_norm": 0.7860874533653259,
"learning_rate": 1.4086227962995287e-06,
"loss": 1.0205,
"step": 6100
},
{
"epoch": 5.77,
"grad_norm": 0.6410927176475525,
"learning_rate": 1.2777098970151859e-06,
"loss": 0.9727,
"step": 6125
},
{
"epoch": 5.8,
"grad_norm": 0.6415342688560486,
"learning_rate": 1.146796997730843e-06,
"loss": 1.0137,
"step": 6150
},
{
"epoch": 5.82,
"grad_norm": 0.6379125118255615,
"learning_rate": 1.0158840984465003e-06,
"loss": 1.0076,
"step": 6175
},
{
"epoch": 5.84,
"grad_norm": 0.6775155663490295,
"learning_rate": 8.849711991621574e-07,
"loss": 1.0328,
"step": 6200
},
{
"epoch": 5.87,
"grad_norm": 0.614631712436676,
"learning_rate": 7.540582998778147e-07,
"loss": 0.9982,
"step": 6225
},
{
"epoch": 5.89,
"grad_norm": 0.7537350058555603,
"learning_rate": 6.231454005934718e-07,
"loss": 1.0465,
"step": 6250
},
{
"epoch": 5.91,
"grad_norm": 0.6336871385574341,
"learning_rate": 4.92232501309129e-07,
"loss": 1.0098,
"step": 6275
},
{
"epoch": 5.94,
"grad_norm": 0.7179773449897766,
"learning_rate": 3.613196020247862e-07,
"loss": 1.0218,
"step": 6300
},
{
"epoch": 5.96,
"grad_norm": 0.6881077289581299,
"learning_rate": 2.3040670274044336e-07,
"loss": 1.0088,
"step": 6325
},
{
"epoch": 5.98,
"grad_norm": 0.6044571399688721,
"learning_rate": 9.949380345610055e-08,
"loss": 1.0526,
"step": 6350
}
],
"logging_steps": 25,
"max_steps": 6366,
"num_input_tokens_seen": 0,
"num_train_epochs": 6,
"save_steps": 500,
"total_flos": 1.3676194390081536e+16,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}