Succ_21_Final / trainer_state.json
sgarrett's picture
Upload 15 files
c551821 verified
raw
history blame
41.2 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 200.0,
"eval_steps": 500,
"global_step": 114600,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.8726003490401396,
"grad_norm": 0.931673526763916,
"learning_rate": 0.0009956369982547994,
"loss": 2.6507,
"step": 500
},
{
"epoch": 1.7452006980802792,
"grad_norm": 1.8975260257720947,
"learning_rate": 0.0009912739965095986,
"loss": 1.6969,
"step": 1000
},
{
"epoch": 2.6178010471204187,
"grad_norm": 0.5923702120780945,
"learning_rate": 0.000986910994764398,
"loss": 1.287,
"step": 1500
},
{
"epoch": 3.4904013961605584,
"grad_norm": 0.79058438539505,
"learning_rate": 0.000982547993019197,
"loss": 0.9398,
"step": 2000
},
{
"epoch": 4.363001745200698,
"grad_norm": 0.7330807447433472,
"learning_rate": 0.0009781849912739965,
"loss": 0.6968,
"step": 2500
},
{
"epoch": 5.2356020942408374,
"grad_norm": 0.6996338963508606,
"learning_rate": 0.0009738219895287959,
"loss": 0.4816,
"step": 3000
},
{
"epoch": 6.108202443280978,
"grad_norm": 0.5383062362670898,
"learning_rate": 0.0009694589877835951,
"loss": 0.3189,
"step": 3500
},
{
"epoch": 6.980802792321117,
"grad_norm": 0.6486771106719971,
"learning_rate": 0.0009650959860383944,
"loss": 0.2218,
"step": 4000
},
{
"epoch": 7.853403141361256,
"grad_norm": 0.507366955280304,
"learning_rate": 0.0009607329842931938,
"loss": 0.1542,
"step": 4500
},
{
"epoch": 8.726003490401396,
"grad_norm": 0.8452386260032654,
"learning_rate": 0.000956369982547993,
"loss": 0.1285,
"step": 5000
},
{
"epoch": 9.598603839441536,
"grad_norm": 0.43142977356910706,
"learning_rate": 0.0009520069808027923,
"loss": 0.1175,
"step": 5500
},
{
"epoch": 10.471204188481675,
"grad_norm": 0.2331060916185379,
"learning_rate": 0.0009476439790575916,
"loss": 0.1053,
"step": 6000
},
{
"epoch": 11.343804537521814,
"grad_norm": 0.4272315800189972,
"learning_rate": 0.000943280977312391,
"loss": 0.092,
"step": 6500
},
{
"epoch": 12.216404886561955,
"grad_norm": 0.2999955117702484,
"learning_rate": 0.0009389179755671902,
"loss": 0.0878,
"step": 7000
},
{
"epoch": 13.089005235602095,
"grad_norm": 2.55757474899292,
"learning_rate": 0.0009345549738219895,
"loss": 0.0884,
"step": 7500
},
{
"epoch": 13.961605584642234,
"grad_norm": 0.33913654088974,
"learning_rate": 0.0009301919720767889,
"loss": 0.0805,
"step": 8000
},
{
"epoch": 14.834205933682373,
"grad_norm": 0.3642922341823578,
"learning_rate": 0.0009258289703315882,
"loss": 0.0721,
"step": 8500
},
{
"epoch": 15.706806282722512,
"grad_norm": 0.7718423008918762,
"learning_rate": 0.0009214659685863874,
"loss": 0.0687,
"step": 9000
},
{
"epoch": 16.57940663176265,
"grad_norm": 0.5820666551589966,
"learning_rate": 0.0009171029668411868,
"loss": 0.0644,
"step": 9500
},
{
"epoch": 17.452006980802793,
"grad_norm": 0.2773011028766632,
"learning_rate": 0.000912739965095986,
"loss": 0.0598,
"step": 10000
},
{
"epoch": 18.324607329842934,
"grad_norm": 0.25250518321990967,
"learning_rate": 0.0009083769633507853,
"loss": 0.0653,
"step": 10500
},
{
"epoch": 19.19720767888307,
"grad_norm": 0.24120768904685974,
"learning_rate": 0.0009040139616055847,
"loss": 0.0632,
"step": 11000
},
{
"epoch": 20.069808027923212,
"grad_norm": 0.2897244989871979,
"learning_rate": 0.0008996509598603839,
"loss": 0.0594,
"step": 11500
},
{
"epoch": 20.94240837696335,
"grad_norm": 0.25145065784454346,
"learning_rate": 0.0008952879581151833,
"loss": 0.0556,
"step": 12000
},
{
"epoch": 21.81500872600349,
"grad_norm": 0.27175939083099365,
"learning_rate": 0.0008909249563699826,
"loss": 0.0481,
"step": 12500
},
{
"epoch": 22.68760907504363,
"grad_norm": 0.8626015782356262,
"learning_rate": 0.0008865619546247818,
"loss": 0.0466,
"step": 13000
},
{
"epoch": 23.56020942408377,
"grad_norm": 0.18672889471054077,
"learning_rate": 0.0008821989528795812,
"loss": 0.0512,
"step": 13500
},
{
"epoch": 24.43280977312391,
"grad_norm": 0.2387542873620987,
"learning_rate": 0.0008778359511343804,
"loss": 0.0512,
"step": 14000
},
{
"epoch": 25.305410122164048,
"grad_norm": 0.2665075957775116,
"learning_rate": 0.0008734729493891797,
"loss": 0.0483,
"step": 14500
},
{
"epoch": 26.17801047120419,
"grad_norm": 0.16715960204601288,
"learning_rate": 0.0008691099476439791,
"loss": 0.0441,
"step": 15000
},
{
"epoch": 27.050610820244327,
"grad_norm": 0.1875993311405182,
"learning_rate": 0.0008647469458987784,
"loss": 0.0397,
"step": 15500
},
{
"epoch": 27.923211169284468,
"grad_norm": 0.25451162457466125,
"learning_rate": 0.0008603839441535776,
"loss": 0.039,
"step": 16000
},
{
"epoch": 28.79581151832461,
"grad_norm": 0.307054728269577,
"learning_rate": 0.000856020942408377,
"loss": 0.0443,
"step": 16500
},
{
"epoch": 29.668411867364746,
"grad_norm": 0.2467741221189499,
"learning_rate": 0.0008516579406631763,
"loss": 0.038,
"step": 17000
},
{
"epoch": 30.541012216404887,
"grad_norm": 0.23178012669086456,
"learning_rate": 0.0008472949389179755,
"loss": 0.0351,
"step": 17500
},
{
"epoch": 31.413612565445025,
"grad_norm": 0.35061487555503845,
"learning_rate": 0.0008429319371727748,
"loss": 0.0323,
"step": 18000
},
{
"epoch": 32.28621291448517,
"grad_norm": 0.3363337218761444,
"learning_rate": 0.0008385689354275742,
"loss": 0.038,
"step": 18500
},
{
"epoch": 33.1588132635253,
"grad_norm": 0.24015972018241882,
"learning_rate": 0.0008342059336823735,
"loss": 0.0361,
"step": 19000
},
{
"epoch": 34.031413612565444,
"grad_norm": 0.5147364735603333,
"learning_rate": 0.0008298429319371727,
"loss": 0.0318,
"step": 19500
},
{
"epoch": 34.904013961605585,
"grad_norm": 0.17477251589298248,
"learning_rate": 0.0008254799301919721,
"loss": 0.0307,
"step": 20000
},
{
"epoch": 35.776614310645726,
"grad_norm": 0.2511900067329407,
"learning_rate": 0.0008211169284467714,
"loss": 0.0348,
"step": 20500
},
{
"epoch": 36.64921465968587,
"grad_norm": 0.2036200314760208,
"learning_rate": 0.0008167539267015707,
"loss": 0.0298,
"step": 21000
},
{
"epoch": 37.521815008726,
"grad_norm": 0.16981545090675354,
"learning_rate": 0.00081239092495637,
"loss": 0.0319,
"step": 21500
},
{
"epoch": 38.39441535776614,
"grad_norm": 0.22329097986221313,
"learning_rate": 0.0008080279232111692,
"loss": 0.0359,
"step": 22000
},
{
"epoch": 39.26701570680628,
"grad_norm": 0.13258185982704163,
"learning_rate": 0.0008036649214659686,
"loss": 0.0266,
"step": 22500
},
{
"epoch": 40.139616055846425,
"grad_norm": 0.12790647149085999,
"learning_rate": 0.000799301919720768,
"loss": 0.026,
"step": 23000
},
{
"epoch": 41.01221640488656,
"grad_norm": 0.23635344207286835,
"learning_rate": 0.0007949389179755671,
"loss": 0.0279,
"step": 23500
},
{
"epoch": 41.8848167539267,
"grad_norm": 0.11364254355430603,
"learning_rate": 0.0007905759162303665,
"loss": 0.0257,
"step": 24000
},
{
"epoch": 42.75741710296684,
"grad_norm": 0.2781168520450592,
"learning_rate": 0.0007862129144851659,
"loss": 0.0295,
"step": 24500
},
{
"epoch": 43.63001745200698,
"grad_norm": 0.106789730489254,
"learning_rate": 0.0007818499127399651,
"loss": 0.0308,
"step": 25000
},
{
"epoch": 44.50261780104712,
"grad_norm": 0.16404911875724792,
"learning_rate": 0.0007774869109947644,
"loss": 0.0222,
"step": 25500
},
{
"epoch": 45.37521815008726,
"grad_norm": 0.14249293506145477,
"learning_rate": 0.0007731239092495637,
"loss": 0.0225,
"step": 26000
},
{
"epoch": 46.2478184991274,
"grad_norm": 0.1853444129228592,
"learning_rate": 0.0007687609075043631,
"loss": 0.0261,
"step": 26500
},
{
"epoch": 47.12041884816754,
"grad_norm": 0.1456003040075302,
"learning_rate": 0.0007643979057591623,
"loss": 0.0252,
"step": 27000
},
{
"epoch": 47.99301919720768,
"grad_norm": 0.16386698186397552,
"learning_rate": 0.0007600349040139616,
"loss": 0.0259,
"step": 27500
},
{
"epoch": 48.86561954624782,
"grad_norm": 0.12221992015838623,
"learning_rate": 0.000755671902268761,
"loss": 0.0253,
"step": 28000
},
{
"epoch": 49.738219895287955,
"grad_norm": 0.14093224704265594,
"learning_rate": 0.0007513089005235602,
"loss": 0.0203,
"step": 28500
},
{
"epoch": 50.610820244328096,
"grad_norm": 0.1189383938908577,
"learning_rate": 0.0007469458987783595,
"loss": 0.0207,
"step": 29000
},
{
"epoch": 51.48342059336824,
"grad_norm": 0.1471104919910431,
"learning_rate": 0.0007425828970331589,
"loss": 0.0209,
"step": 29500
},
{
"epoch": 52.35602094240838,
"grad_norm": 0.08947575837373734,
"learning_rate": 0.0007382198952879581,
"loss": 0.0234,
"step": 30000
},
{
"epoch": 53.22862129144852,
"grad_norm": 0.18746259808540344,
"learning_rate": 0.0007338568935427574,
"loss": 0.0245,
"step": 30500
},
{
"epoch": 54.10122164048865,
"grad_norm": 0.1539311408996582,
"learning_rate": 0.0007294938917975568,
"loss": 0.0214,
"step": 31000
},
{
"epoch": 54.973821989528794,
"grad_norm": 0.11201947182416916,
"learning_rate": 0.000725130890052356,
"loss": 0.0194,
"step": 31500
},
{
"epoch": 55.846422338568935,
"grad_norm": 0.16618479788303375,
"learning_rate": 0.0007207678883071554,
"loss": 0.0185,
"step": 32000
},
{
"epoch": 56.719022687609076,
"grad_norm": 0.1569599211215973,
"learning_rate": 0.0007164048865619547,
"loss": 0.0234,
"step": 32500
},
{
"epoch": 57.59162303664922,
"grad_norm": 0.11062045395374298,
"learning_rate": 0.0007120418848167539,
"loss": 0.0187,
"step": 33000
},
{
"epoch": 58.46422338568935,
"grad_norm": 0.1617700457572937,
"learning_rate": 0.0007076788830715533,
"loss": 0.0176,
"step": 33500
},
{
"epoch": 59.33682373472949,
"grad_norm": 0.11750755459070206,
"learning_rate": 0.0007033158813263525,
"loss": 0.0188,
"step": 34000
},
{
"epoch": 60.20942408376963,
"grad_norm": 0.24937282502651215,
"learning_rate": 0.0006989528795811518,
"loss": 0.0225,
"step": 34500
},
{
"epoch": 61.082024432809774,
"grad_norm": 0.22410957515239716,
"learning_rate": 0.0006945898778359512,
"loss": 0.0192,
"step": 35000
},
{
"epoch": 61.954624781849915,
"grad_norm": 0.18029357492923737,
"learning_rate": 0.0006902268760907505,
"loss": 0.0169,
"step": 35500
},
{
"epoch": 62.82722513089005,
"grad_norm": 0.14077898859977722,
"learning_rate": 0.0006858638743455497,
"loss": 0.0167,
"step": 36000
},
{
"epoch": 63.69982547993019,
"grad_norm": 0.12804169952869415,
"learning_rate": 0.0006815008726003491,
"loss": 0.0167,
"step": 36500
},
{
"epoch": 64.57242582897034,
"grad_norm": 0.11356078833341599,
"learning_rate": 0.0006771378708551484,
"loss": 0.0183,
"step": 37000
},
{
"epoch": 65.44502617801047,
"grad_norm": 0.07214757055044174,
"learning_rate": 0.0006727748691099476,
"loss": 0.0167,
"step": 37500
},
{
"epoch": 66.3176265270506,
"grad_norm": 0.20497408509254456,
"learning_rate": 0.0006684118673647469,
"loss": 0.0186,
"step": 38000
},
{
"epoch": 67.19022687609075,
"grad_norm": 0.09408937394618988,
"learning_rate": 0.0006640488656195463,
"loss": 0.0169,
"step": 38500
},
{
"epoch": 68.06282722513089,
"grad_norm": 0.12423662841320038,
"learning_rate": 0.0006596858638743456,
"loss": 0.0179,
"step": 39000
},
{
"epoch": 68.93542757417103,
"grad_norm": 0.4406953752040863,
"learning_rate": 0.0006553228621291448,
"loss": 0.019,
"step": 39500
},
{
"epoch": 69.80802792321117,
"grad_norm": 0.11233725398778915,
"learning_rate": 0.0006509598603839442,
"loss": 0.0151,
"step": 40000
},
{
"epoch": 70.68062827225131,
"grad_norm": 0.08892803639173508,
"learning_rate": 0.0006465968586387435,
"loss": 0.0137,
"step": 40500
},
{
"epoch": 71.55322862129145,
"grad_norm": 0.11712398380041122,
"learning_rate": 0.0006422338568935428,
"loss": 0.0145,
"step": 41000
},
{
"epoch": 72.4258289703316,
"grad_norm": 0.11560557782649994,
"learning_rate": 0.0006378708551483421,
"loss": 0.0181,
"step": 41500
},
{
"epoch": 73.29842931937172,
"grad_norm": 0.10780952870845795,
"learning_rate": 0.0006335078534031413,
"loss": 0.0162,
"step": 42000
},
{
"epoch": 74.17102966841186,
"grad_norm": 0.06540343165397644,
"learning_rate": 0.0006291448516579407,
"loss": 0.0157,
"step": 42500
},
{
"epoch": 75.043630017452,
"grad_norm": 0.08087161183357239,
"learning_rate": 0.00062478184991274,
"loss": 0.0181,
"step": 43000
},
{
"epoch": 75.91623036649214,
"grad_norm": 0.08909798413515091,
"learning_rate": 0.0006204188481675392,
"loss": 0.0136,
"step": 43500
},
{
"epoch": 76.78883071553228,
"grad_norm": 0.09045905619859695,
"learning_rate": 0.0006160558464223386,
"loss": 0.0124,
"step": 44000
},
{
"epoch": 77.66143106457243,
"grad_norm": 0.1375885307788849,
"learning_rate": 0.000611692844677138,
"loss": 0.013,
"step": 44500
},
{
"epoch": 78.53403141361257,
"grad_norm": 0.06584478169679642,
"learning_rate": 0.0006073298429319371,
"loss": 0.0149,
"step": 45000
},
{
"epoch": 79.40663176265271,
"grad_norm": 0.10484465211629868,
"learning_rate": 0.0006029668411867365,
"loss": 0.0141,
"step": 45500
},
{
"epoch": 80.27923211169285,
"grad_norm": 0.11171326786279678,
"learning_rate": 0.0005986038394415358,
"loss": 0.0142,
"step": 46000
},
{
"epoch": 81.15183246073299,
"grad_norm": 0.12636296451091766,
"learning_rate": 0.000594240837696335,
"loss": 0.0154,
"step": 46500
},
{
"epoch": 82.02443280977312,
"grad_norm": 0.06967689841985703,
"learning_rate": 0.0005898778359511344,
"loss": 0.0144,
"step": 47000
},
{
"epoch": 82.89703315881326,
"grad_norm": 0.09147424250841141,
"learning_rate": 0.0005855148342059337,
"loss": 0.0127,
"step": 47500
},
{
"epoch": 83.7696335078534,
"grad_norm": 0.6871227025985718,
"learning_rate": 0.000581151832460733,
"loss": 0.0113,
"step": 48000
},
{
"epoch": 84.64223385689354,
"grad_norm": 0.07160250097513199,
"learning_rate": 0.0005767888307155323,
"loss": 0.014,
"step": 48500
},
{
"epoch": 85.51483420593368,
"grad_norm": 0.06839723885059357,
"learning_rate": 0.0005724258289703316,
"loss": 0.0121,
"step": 49000
},
{
"epoch": 86.38743455497382,
"grad_norm": 0.05615156516432762,
"learning_rate": 0.0005680628272251309,
"loss": 0.0136,
"step": 49500
},
{
"epoch": 87.26003490401396,
"grad_norm": 0.10079669952392578,
"learning_rate": 0.0005636998254799302,
"loss": 0.0116,
"step": 50000
},
{
"epoch": 88.1326352530541,
"grad_norm": 0.09019599854946136,
"learning_rate": 0.0005593368237347295,
"loss": 0.0122,
"step": 50500
},
{
"epoch": 89.00523560209425,
"grad_norm": 0.05459260195493698,
"learning_rate": 0.0005549738219895288,
"loss": 0.0113,
"step": 51000
},
{
"epoch": 89.87783595113439,
"grad_norm": 0.07002388685941696,
"learning_rate": 0.0005506108202443281,
"loss": 0.0112,
"step": 51500
},
{
"epoch": 90.75043630017451,
"grad_norm": 0.12079566717147827,
"learning_rate": 0.0005462478184991274,
"loss": 0.0115,
"step": 52000
},
{
"epoch": 91.62303664921465,
"grad_norm": 0.08160880208015442,
"learning_rate": 0.0005418848167539267,
"loss": 0.0135,
"step": 52500
},
{
"epoch": 92.4956369982548,
"grad_norm": 0.053758975118398666,
"learning_rate": 0.000537521815008726,
"loss": 0.0111,
"step": 53000
},
{
"epoch": 93.36823734729494,
"grad_norm": 0.08977492153644562,
"learning_rate": 0.0005331588132635254,
"loss": 0.0118,
"step": 53500
},
{
"epoch": 94.24083769633508,
"grad_norm": 0.14162831008434296,
"learning_rate": 0.0005287958115183245,
"loss": 0.0118,
"step": 54000
},
{
"epoch": 95.11343804537522,
"grad_norm": 0.09927529096603394,
"learning_rate": 0.0005244328097731239,
"loss": 0.011,
"step": 54500
},
{
"epoch": 95.98603839441536,
"grad_norm": 0.08714314550161362,
"learning_rate": 0.0005200698080279233,
"loss": 0.0101,
"step": 55000
},
{
"epoch": 96.8586387434555,
"grad_norm": 0.09934234619140625,
"learning_rate": 0.0005157068062827224,
"loss": 0.0109,
"step": 55500
},
{
"epoch": 97.73123909249564,
"grad_norm": 0.11947502195835114,
"learning_rate": 0.0005113438045375218,
"loss": 0.0114,
"step": 56000
},
{
"epoch": 98.60383944153578,
"grad_norm": 0.0667385384440422,
"learning_rate": 0.0005069808027923212,
"loss": 0.0099,
"step": 56500
},
{
"epoch": 99.47643979057591,
"grad_norm": 0.11594051122665405,
"learning_rate": 0.0005026178010471204,
"loss": 0.0117,
"step": 57000
},
{
"epoch": 100.34904013961605,
"grad_norm": 0.06558683514595032,
"learning_rate": 0.0004982547993019197,
"loss": 0.0104,
"step": 57500
},
{
"epoch": 101.22164048865619,
"grad_norm": 0.07644475996494293,
"learning_rate": 0.000493891797556719,
"loss": 0.0098,
"step": 58000
},
{
"epoch": 102.09424083769633,
"grad_norm": 0.06626095622777939,
"learning_rate": 0.0004895287958115183,
"loss": 0.0095,
"step": 58500
},
{
"epoch": 102.96684118673647,
"grad_norm": 0.10808754712343216,
"learning_rate": 0.0004851657940663176,
"loss": 0.0108,
"step": 59000
},
{
"epoch": 103.83944153577661,
"grad_norm": 0.05127561092376709,
"learning_rate": 0.000480802792321117,
"loss": 0.01,
"step": 59500
},
{
"epoch": 104.71204188481676,
"grad_norm": 0.12128196656703949,
"learning_rate": 0.00047643979057591625,
"loss": 0.0092,
"step": 60000
},
{
"epoch": 105.5846422338569,
"grad_norm": 0.06882330775260925,
"learning_rate": 0.0004720767888307155,
"loss": 0.0111,
"step": 60500
},
{
"epoch": 106.45724258289704,
"grad_norm": 0.05268734693527222,
"learning_rate": 0.00046771378708551485,
"loss": 0.0094,
"step": 61000
},
{
"epoch": 107.32984293193718,
"grad_norm": 0.046268824487924576,
"learning_rate": 0.00046335078534031417,
"loss": 0.0082,
"step": 61500
},
{
"epoch": 108.2024432809773,
"grad_norm": 0.04883289709687233,
"learning_rate": 0.00045898778359511344,
"loss": 0.0085,
"step": 62000
},
{
"epoch": 109.07504363001745,
"grad_norm": 0.0723048597574234,
"learning_rate": 0.00045462478184991276,
"loss": 0.0093,
"step": 62500
},
{
"epoch": 109.94764397905759,
"grad_norm": 0.06026541814208031,
"learning_rate": 0.00045026178010471203,
"loss": 0.0095,
"step": 63000
},
{
"epoch": 110.82024432809773,
"grad_norm": 0.058908674865961075,
"learning_rate": 0.00044589877835951136,
"loss": 0.0085,
"step": 63500
},
{
"epoch": 111.69284467713787,
"grad_norm": 0.05758107081055641,
"learning_rate": 0.0004415357766143107,
"loss": 0.01,
"step": 64000
},
{
"epoch": 112.56544502617801,
"grad_norm": 0.06559444963932037,
"learning_rate": 0.00043717277486910995,
"loss": 0.0088,
"step": 64500
},
{
"epoch": 113.43804537521815,
"grad_norm": 0.05080035701394081,
"learning_rate": 0.0004328097731239092,
"loss": 0.0083,
"step": 65000
},
{
"epoch": 114.3106457242583,
"grad_norm": 0.0523524135351181,
"learning_rate": 0.0004284467713787086,
"loss": 0.0078,
"step": 65500
},
{
"epoch": 115.18324607329843,
"grad_norm": 0.14169646799564362,
"learning_rate": 0.00042408376963350787,
"loss": 0.0085,
"step": 66000
},
{
"epoch": 116.05584642233858,
"grad_norm": 0.05305915325880051,
"learning_rate": 0.00041972076788830714,
"loss": 0.0091,
"step": 66500
},
{
"epoch": 116.9284467713787,
"grad_norm": 0.05915080010890961,
"learning_rate": 0.00041535776614310646,
"loss": 0.0085,
"step": 67000
},
{
"epoch": 117.80104712041884,
"grad_norm": 0.07950141280889511,
"learning_rate": 0.0004109947643979058,
"loss": 0.0075,
"step": 67500
},
{
"epoch": 118.67364746945898,
"grad_norm": 0.083484947681427,
"learning_rate": 0.00040663176265270506,
"loss": 0.0086,
"step": 68000
},
{
"epoch": 119.54624781849913,
"grad_norm": 0.1149265244603157,
"learning_rate": 0.0004022687609075044,
"loss": 0.0087,
"step": 68500
},
{
"epoch": 120.41884816753927,
"grad_norm": 0.10079418867826462,
"learning_rate": 0.00039790575916230365,
"loss": 0.0087,
"step": 69000
},
{
"epoch": 121.29144851657941,
"grad_norm": 0.04444717988371849,
"learning_rate": 0.000393542757417103,
"loss": 0.0083,
"step": 69500
},
{
"epoch": 122.16404886561955,
"grad_norm": 0.015783872455358505,
"learning_rate": 0.0003891797556719023,
"loss": 0.0074,
"step": 70000
},
{
"epoch": 123.03664921465969,
"grad_norm": 0.07311473041772842,
"learning_rate": 0.00038481675392670157,
"loss": 0.007,
"step": 70500
},
{
"epoch": 123.90924956369983,
"grad_norm": 0.03907659277319908,
"learning_rate": 0.00038045375218150084,
"loss": 0.0071,
"step": 71000
},
{
"epoch": 124.78184991273997,
"grad_norm": 0.031013870611786842,
"learning_rate": 0.0003760907504363002,
"loss": 0.0071,
"step": 71500
},
{
"epoch": 125.6544502617801,
"grad_norm": 0.05435263365507126,
"learning_rate": 0.0003717277486910995,
"loss": 0.0077,
"step": 72000
},
{
"epoch": 126.52705061082024,
"grad_norm": 0.015180529095232487,
"learning_rate": 0.00036736474694589876,
"loss": 0.0072,
"step": 72500
},
{
"epoch": 127.39965095986038,
"grad_norm": 0.02177223190665245,
"learning_rate": 0.0003630017452006981,
"loss": 0.0074,
"step": 73000
},
{
"epoch": 128.27225130890054,
"grad_norm": 0.04897777736186981,
"learning_rate": 0.0003586387434554974,
"loss": 0.0076,
"step": 73500
},
{
"epoch": 129.14485165794068,
"grad_norm": 0.02429993264377117,
"learning_rate": 0.0003542757417102967,
"loss": 0.0067,
"step": 74000
},
{
"epoch": 130.0174520069808,
"grad_norm": 0.019401997327804565,
"learning_rate": 0.000349912739965096,
"loss": 0.0071,
"step": 74500
},
{
"epoch": 130.89005235602093,
"grad_norm": 0.05895571410655975,
"learning_rate": 0.00034554973821989527,
"loss": 0.0067,
"step": 75000
},
{
"epoch": 131.76265270506107,
"grad_norm": 0.08564560860395432,
"learning_rate": 0.0003411867364746946,
"loss": 0.0066,
"step": 75500
},
{
"epoch": 132.6352530541012,
"grad_norm": 0.0574815534055233,
"learning_rate": 0.0003368237347294939,
"loss": 0.0063,
"step": 76000
},
{
"epoch": 133.50785340314135,
"grad_norm": 0.08703868091106415,
"learning_rate": 0.0003324607329842932,
"loss": 0.0064,
"step": 76500
},
{
"epoch": 134.3804537521815,
"grad_norm": 0.1667858362197876,
"learning_rate": 0.00032809773123909246,
"loss": 0.0074,
"step": 77000
},
{
"epoch": 135.25305410122164,
"grad_norm": 0.07352814823389053,
"learning_rate": 0.00032373472949389184,
"loss": 0.0067,
"step": 77500
},
{
"epoch": 136.12565445026178,
"grad_norm": 0.10321661829948425,
"learning_rate": 0.0003193717277486911,
"loss": 0.0065,
"step": 78000
},
{
"epoch": 136.99825479930192,
"grad_norm": 0.046309106051921844,
"learning_rate": 0.0003150087260034904,
"loss": 0.0065,
"step": 78500
},
{
"epoch": 137.87085514834206,
"grad_norm": 0.014806479215621948,
"learning_rate": 0.0003106457242582897,
"loss": 0.0061,
"step": 79000
},
{
"epoch": 138.7434554973822,
"grad_norm": 0.041382819414138794,
"learning_rate": 0.000306282722513089,
"loss": 0.0059,
"step": 79500
},
{
"epoch": 139.61605584642234,
"grad_norm": 0.07493896782398224,
"learning_rate": 0.0003019197207678883,
"loss": 0.0061,
"step": 80000
},
{
"epoch": 140.48865619546248,
"grad_norm": 0.052245959639549255,
"learning_rate": 0.0002975567190226876,
"loss": 0.006,
"step": 80500
},
{
"epoch": 141.36125654450262,
"grad_norm": 0.10472971946001053,
"learning_rate": 0.0002931937172774869,
"loss": 0.0061,
"step": 81000
},
{
"epoch": 142.23385689354276,
"grad_norm": 0.033921029418706894,
"learning_rate": 0.0002888307155322862,
"loss": 0.0059,
"step": 81500
},
{
"epoch": 143.1064572425829,
"grad_norm": 0.06276967376470566,
"learning_rate": 0.00028446771378708553,
"loss": 0.0059,
"step": 82000
},
{
"epoch": 143.97905759162305,
"grad_norm": 0.022356705740094185,
"learning_rate": 0.0002801047120418848,
"loss": 0.0056,
"step": 82500
},
{
"epoch": 144.8516579406632,
"grad_norm": 0.008057367987930775,
"learning_rate": 0.0002757417102966841,
"loss": 0.0054,
"step": 83000
},
{
"epoch": 145.72425828970333,
"grad_norm": 0.00805575679987669,
"learning_rate": 0.00027137870855148345,
"loss": 0.0054,
"step": 83500
},
{
"epoch": 146.59685863874344,
"grad_norm": 0.07681386172771454,
"learning_rate": 0.0002670157068062827,
"loss": 0.0053,
"step": 84000
},
{
"epoch": 147.46945898778358,
"grad_norm": 0.041651804000139236,
"learning_rate": 0.00026265270506108205,
"loss": 0.0054,
"step": 84500
},
{
"epoch": 148.34205933682372,
"grad_norm": 0.09435189515352249,
"learning_rate": 0.0002582897033158813,
"loss": 0.0057,
"step": 85000
},
{
"epoch": 149.21465968586386,
"grad_norm": 0.02968364767730236,
"learning_rate": 0.00025392670157068064,
"loss": 0.0053,
"step": 85500
},
{
"epoch": 150.087260034904,
"grad_norm": 0.04307426139712334,
"learning_rate": 0.00024956369982547996,
"loss": 0.0053,
"step": 86000
},
{
"epoch": 150.95986038394415,
"grad_norm": 0.08466316014528275,
"learning_rate": 0.00024520069808027923,
"loss": 0.0051,
"step": 86500
},
{
"epoch": 151.8324607329843,
"grad_norm": 0.14644889533519745,
"learning_rate": 0.00024083769633507853,
"loss": 0.005,
"step": 87000
},
{
"epoch": 152.70506108202443,
"grad_norm": 0.033381011337041855,
"learning_rate": 0.00023647469458987783,
"loss": 0.0052,
"step": 87500
},
{
"epoch": 153.57766143106457,
"grad_norm": 0.04635755345225334,
"learning_rate": 0.00023211169284467715,
"loss": 0.0055,
"step": 88000
},
{
"epoch": 154.4502617801047,
"grad_norm": 0.03803477808833122,
"learning_rate": 0.00022774869109947642,
"loss": 0.0051,
"step": 88500
},
{
"epoch": 155.32286212914485,
"grad_norm": 0.04374915733933449,
"learning_rate": 0.00022338568935427575,
"loss": 0.0052,
"step": 89000
},
{
"epoch": 156.195462478185,
"grad_norm": 0.018039803951978683,
"learning_rate": 0.00021902268760907504,
"loss": 0.0049,
"step": 89500
},
{
"epoch": 157.06806282722513,
"grad_norm": 0.017863890156149864,
"learning_rate": 0.00021465968586387437,
"loss": 0.0047,
"step": 90000
},
{
"epoch": 157.94066317626528,
"grad_norm": 0.04274868592619896,
"learning_rate": 0.00021029668411867364,
"loss": 0.0047,
"step": 90500
},
{
"epoch": 158.81326352530542,
"grad_norm": 0.01725621707737446,
"learning_rate": 0.00020593368237347296,
"loss": 0.0048,
"step": 91000
},
{
"epoch": 159.68586387434556,
"grad_norm": 0.027782520279288292,
"learning_rate": 0.00020157068062827226,
"loss": 0.0047,
"step": 91500
},
{
"epoch": 160.5584642233857,
"grad_norm": 0.031710293143987656,
"learning_rate": 0.00019720767888307156,
"loss": 0.0046,
"step": 92000
},
{
"epoch": 161.43106457242584,
"grad_norm": 0.014840440824627876,
"learning_rate": 0.00019284467713787085,
"loss": 0.0046,
"step": 92500
},
{
"epoch": 162.30366492146598,
"grad_norm": 0.019418081268668175,
"learning_rate": 0.00018848167539267018,
"loss": 0.0046,
"step": 93000
},
{
"epoch": 163.17626527050612,
"grad_norm": 0.029342737048864365,
"learning_rate": 0.00018411867364746945,
"loss": 0.0045,
"step": 93500
},
{
"epoch": 164.04886561954623,
"grad_norm": 0.012960207648575306,
"learning_rate": 0.00017975567190226877,
"loss": 0.0046,
"step": 94000
},
{
"epoch": 164.92146596858638,
"grad_norm": 0.059924036264419556,
"learning_rate": 0.00017539267015706807,
"loss": 0.0044,
"step": 94500
},
{
"epoch": 165.79406631762652,
"grad_norm": 0.01782877743244171,
"learning_rate": 0.00017102966841186736,
"loss": 0.0043,
"step": 95000
},
{
"epoch": 166.66666666666666,
"grad_norm": 0.007615593262016773,
"learning_rate": 0.00016666666666666666,
"loss": 0.0045,
"step": 95500
},
{
"epoch": 167.5392670157068,
"grad_norm": 0.019025860354304314,
"learning_rate": 0.00016230366492146599,
"loss": 0.0043,
"step": 96000
},
{
"epoch": 168.41186736474694,
"grad_norm": 0.0066482871770858765,
"learning_rate": 0.00015794066317626526,
"loss": 0.0045,
"step": 96500
},
{
"epoch": 169.28446771378708,
"grad_norm": 0.006196535658091307,
"learning_rate": 0.00015357766143106458,
"loss": 0.0044,
"step": 97000
},
{
"epoch": 170.15706806282722,
"grad_norm": 0.029925299808382988,
"learning_rate": 0.00014921465968586388,
"loss": 0.0041,
"step": 97500
},
{
"epoch": 171.02966841186736,
"grad_norm": 0.007536090444773436,
"learning_rate": 0.00014485165794066317,
"loss": 0.0042,
"step": 98000
},
{
"epoch": 171.9022687609075,
"grad_norm": 0.043489061295986176,
"learning_rate": 0.00014048865619546247,
"loss": 0.004,
"step": 98500
},
{
"epoch": 172.77486910994764,
"grad_norm": 0.012075472623109818,
"learning_rate": 0.0001361256544502618,
"loss": 0.004,
"step": 99000
},
{
"epoch": 173.64746945898779,
"grad_norm": 0.006986881606280804,
"learning_rate": 0.00013176265270506106,
"loss": 0.0042,
"step": 99500
},
{
"epoch": 174.52006980802793,
"grad_norm": 0.008854834362864494,
"learning_rate": 0.0001273996509598604,
"loss": 0.0041,
"step": 100000
},
{
"epoch": 175.39267015706807,
"grad_norm": 0.019908135756850243,
"learning_rate": 0.00012303664921465968,
"loss": 0.0039,
"step": 100500
},
{
"epoch": 176.2652705061082,
"grad_norm": 0.006664547137916088,
"learning_rate": 0.000118673647469459,
"loss": 0.004,
"step": 101000
},
{
"epoch": 177.13787085514835,
"grad_norm": 0.010044758208096027,
"learning_rate": 0.00011431064572425829,
"loss": 0.004,
"step": 101500
},
{
"epoch": 178.0104712041885,
"grad_norm": 0.006337651051580906,
"learning_rate": 0.0001099476439790576,
"loss": 0.0041,
"step": 102000
},
{
"epoch": 178.88307155322863,
"grad_norm": 0.007907305844128132,
"learning_rate": 0.0001055846422338569,
"loss": 0.0039,
"step": 102500
},
{
"epoch": 179.75567190226877,
"grad_norm": 0.011269304901361465,
"learning_rate": 0.0001012216404886562,
"loss": 0.0038,
"step": 103000
},
{
"epoch": 180.6282722513089,
"grad_norm": 0.024572385475039482,
"learning_rate": 9.685863874345551e-05,
"loss": 0.0039,
"step": 103500
},
{
"epoch": 181.50087260034903,
"grad_norm": 0.014140106737613678,
"learning_rate": 9.24956369982548e-05,
"loss": 0.0037,
"step": 104000
},
{
"epoch": 182.37347294938917,
"grad_norm": 0.007248425856232643,
"learning_rate": 8.813263525305411e-05,
"loss": 0.0038,
"step": 104500
},
{
"epoch": 183.2460732984293,
"grad_norm": 0.008632407523691654,
"learning_rate": 8.376963350785341e-05,
"loss": 0.0037,
"step": 105000
},
{
"epoch": 184.11867364746945,
"grad_norm": 0.004191929940134287,
"learning_rate": 7.940663176265271e-05,
"loss": 0.0037,
"step": 105500
},
{
"epoch": 184.9912739965096,
"grad_norm": 0.008341754786670208,
"learning_rate": 7.504363001745202e-05,
"loss": 0.0038,
"step": 106000
},
{
"epoch": 185.86387434554973,
"grad_norm": 0.006414365489035845,
"learning_rate": 7.068062827225132e-05,
"loss": 0.0037,
"step": 106500
},
{
"epoch": 186.73647469458987,
"grad_norm": 0.006369850132614374,
"learning_rate": 6.631762652705061e-05,
"loss": 0.0036,
"step": 107000
},
{
"epoch": 187.60907504363,
"grad_norm": 0.005431192461401224,
"learning_rate": 6.195462478184991e-05,
"loss": 0.0036,
"step": 107500
},
{
"epoch": 188.48167539267016,
"grad_norm": 0.0051409355364739895,
"learning_rate": 5.7591623036649214e-05,
"loss": 0.0036,
"step": 108000
},
{
"epoch": 189.3542757417103,
"grad_norm": 0.00500549515709281,
"learning_rate": 5.322862129144852e-05,
"loss": 0.0035,
"step": 108500
},
{
"epoch": 190.22687609075044,
"grad_norm": 0.007591998670250177,
"learning_rate": 4.886561954624782e-05,
"loss": 0.0036,
"step": 109000
},
{
"epoch": 191.09947643979058,
"grad_norm": 0.0033338970970362425,
"learning_rate": 4.450261780104712e-05,
"loss": 0.0035,
"step": 109500
},
{
"epoch": 191.97207678883072,
"grad_norm": 0.003059329465031624,
"learning_rate": 4.013961605584642e-05,
"loss": 0.0035,
"step": 110000
},
{
"epoch": 192.84467713787086,
"grad_norm": 0.007428302429616451,
"learning_rate": 3.5776614310645726e-05,
"loss": 0.0035,
"step": 110500
},
{
"epoch": 193.717277486911,
"grad_norm": 0.00737199978902936,
"learning_rate": 3.141361256544502e-05,
"loss": 0.0035,
"step": 111000
},
{
"epoch": 194.58987783595114,
"grad_norm": 0.006674023810774088,
"learning_rate": 2.7050610820244326e-05,
"loss": 0.0035,
"step": 111500
},
{
"epoch": 195.46247818499128,
"grad_norm": 0.004724672995507717,
"learning_rate": 2.268760907504363e-05,
"loss": 0.0035,
"step": 112000
},
{
"epoch": 196.33507853403142,
"grad_norm": 0.006876371800899506,
"learning_rate": 1.832460732984293e-05,
"loss": 0.0035,
"step": 112500
},
{
"epoch": 197.20767888307157,
"grad_norm": 0.006131891161203384,
"learning_rate": 1.3961605584642234e-05,
"loss": 0.0034,
"step": 113000
},
{
"epoch": 198.0802792321117,
"grad_norm": 0.008056155405938625,
"learning_rate": 9.598603839441536e-06,
"loss": 0.0034,
"step": 113500
},
{
"epoch": 198.95287958115182,
"grad_norm": 0.0045640296302735806,
"learning_rate": 5.235602094240838e-06,
"loss": 0.0034,
"step": 114000
},
{
"epoch": 199.82547993019196,
"grad_norm": 0.006915738806128502,
"learning_rate": 8.726003490401396e-07,
"loss": 0.0033,
"step": 114500
},
{
"epoch": 200.0,
"step": 114600,
"total_flos": 1.9933765828608e+18,
"train_loss": 0.0009824057427896462,
"train_runtime": 45478.3747,
"train_samples_per_second": 10.071,
"train_steps_per_second": 2.52
}
],
"logging_steps": 500,
"max_steps": 114600,
"num_input_tokens_seen": 0,
"num_train_epochs": 200,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.9933765828608e+18,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}