OpenELM-1_1B-SFT / trainer_state.json
CharlesLi's picture
Model save
2b1f8b7 verified
raw
history blame
40.9 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.9995619798510732,
"eval_steps": 500,
"global_step": 1141,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0008760402978537013,
"grad_norm": 247.023226046774,
"learning_rate": 1.7391304347826088e-07,
"loss": 8.8281,
"step": 1
},
{
"epoch": 0.004380201489268506,
"grad_norm": 232.10560021267386,
"learning_rate": 8.695652173913044e-07,
"loss": 8.7637,
"step": 5
},
{
"epoch": 0.008760402978537012,
"grad_norm": 197.45293542519394,
"learning_rate": 1.7391304347826088e-06,
"loss": 8.5789,
"step": 10
},
{
"epoch": 0.013140604467805518,
"grad_norm": 69.72380158032517,
"learning_rate": 2.6086956521739132e-06,
"loss": 7.607,
"step": 15
},
{
"epoch": 0.017520805957074025,
"grad_norm": 40.79010585762893,
"learning_rate": 3.4782608695652175e-06,
"loss": 6.5969,
"step": 20
},
{
"epoch": 0.021901007446342532,
"grad_norm": 31.021399931633685,
"learning_rate": 4.347826086956522e-06,
"loss": 5.3516,
"step": 25
},
{
"epoch": 0.026281208935611037,
"grad_norm": 15.829514000742273,
"learning_rate": 5.2173913043478265e-06,
"loss": 4.018,
"step": 30
},
{
"epoch": 0.030661410424879545,
"grad_norm": 12.017912967930332,
"learning_rate": 6.086956521739132e-06,
"loss": 2.9813,
"step": 35
},
{
"epoch": 0.03504161191414805,
"grad_norm": 2.8893117110423487,
"learning_rate": 6.956521739130435e-06,
"loss": 2.1303,
"step": 40
},
{
"epoch": 0.03942181340341656,
"grad_norm": 1.4949499944110431,
"learning_rate": 7.82608695652174e-06,
"loss": 1.8031,
"step": 45
},
{
"epoch": 0.043802014892685065,
"grad_norm": 0.7508052500953509,
"learning_rate": 8.695652173913044e-06,
"loss": 1.5938,
"step": 50
},
{
"epoch": 0.04818221638195357,
"grad_norm": 0.4902022246315334,
"learning_rate": 9.565217391304349e-06,
"loss": 1.5084,
"step": 55
},
{
"epoch": 0.052562417871222074,
"grad_norm": 0.42532974187985395,
"learning_rate": 1.0434782608695653e-05,
"loss": 1.4203,
"step": 60
},
{
"epoch": 0.05694261936049058,
"grad_norm": 0.3463140884611265,
"learning_rate": 1.1304347826086957e-05,
"loss": 1.3941,
"step": 65
},
{
"epoch": 0.06132282084975909,
"grad_norm": 0.3189213985873913,
"learning_rate": 1.2173913043478263e-05,
"loss": 1.3566,
"step": 70
},
{
"epoch": 0.0657030223390276,
"grad_norm": 0.2996468490222568,
"learning_rate": 1.3043478260869566e-05,
"loss": 1.3418,
"step": 75
},
{
"epoch": 0.0700832238282961,
"grad_norm": 0.26712973341018265,
"learning_rate": 1.391304347826087e-05,
"loss": 1.3201,
"step": 80
},
{
"epoch": 0.07446342531756461,
"grad_norm": 0.2668394062417099,
"learning_rate": 1.4782608695652174e-05,
"loss": 1.3143,
"step": 85
},
{
"epoch": 0.07884362680683311,
"grad_norm": 0.26274351296407206,
"learning_rate": 1.565217391304348e-05,
"loss": 1.3031,
"step": 90
},
{
"epoch": 0.08322382829610162,
"grad_norm": 0.25741452098888334,
"learning_rate": 1.6521739130434785e-05,
"loss": 1.2848,
"step": 95
},
{
"epoch": 0.08760402978537013,
"grad_norm": 0.2441325116229632,
"learning_rate": 1.739130434782609e-05,
"loss": 1.2732,
"step": 100
},
{
"epoch": 0.09198423127463863,
"grad_norm": 0.24162625924392253,
"learning_rate": 1.8260869565217393e-05,
"loss": 1.2715,
"step": 105
},
{
"epoch": 0.09636443276390715,
"grad_norm": 0.2519915389487053,
"learning_rate": 1.9130434782608697e-05,
"loss": 1.2711,
"step": 110
},
{
"epoch": 0.10074463425317565,
"grad_norm": 0.2465793202977791,
"learning_rate": 2e-05,
"loss": 1.2459,
"step": 115
},
{
"epoch": 0.10512483574244415,
"grad_norm": 0.23497052771629895,
"learning_rate": 1.99988280568259e-05,
"loss": 1.248,
"step": 120
},
{
"epoch": 0.10950503723171266,
"grad_norm": 0.24553176964100193,
"learning_rate": 1.9995312501993765e-05,
"loss": 1.2238,
"step": 125
},
{
"epoch": 0.11388523872098116,
"grad_norm": 0.23370960048474732,
"learning_rate": 1.998945415950969e-05,
"loss": 1.248,
"step": 130
},
{
"epoch": 0.11826544021024968,
"grad_norm": 0.24397346786614016,
"learning_rate": 1.9981254402502568e-05,
"loss": 1.2063,
"step": 135
},
{
"epoch": 0.12264564169951818,
"grad_norm": 0.27495355893049633,
"learning_rate": 1.9970715152902257e-05,
"loss": 1.2381,
"step": 140
},
{
"epoch": 0.12702584318878668,
"grad_norm": 0.25282538712904246,
"learning_rate": 1.9957838880989076e-05,
"loss": 1.2281,
"step": 145
},
{
"epoch": 0.1314060446780552,
"grad_norm": 0.22995731363863653,
"learning_rate": 1.9942628604814827e-05,
"loss": 1.2051,
"step": 150
},
{
"epoch": 0.1357862461673237,
"grad_norm": 0.23386474611286825,
"learning_rate": 1.9925087889495374e-05,
"loss": 1.2141,
"step": 155
},
{
"epoch": 0.1401664476565922,
"grad_norm": 0.2309649578398331,
"learning_rate": 1.990522084637503e-05,
"loss": 1.1822,
"step": 160
},
{
"epoch": 0.1445466491458607,
"grad_norm": 0.24003805632416436,
"learning_rate": 1.9883032132062926e-05,
"loss": 1.2063,
"step": 165
},
{
"epoch": 0.14892685063512923,
"grad_norm": 0.24338387371805312,
"learning_rate": 1.98585269473415e-05,
"loss": 1.1879,
"step": 170
},
{
"epoch": 0.1533070521243977,
"grad_norm": 0.23476112374060998,
"learning_rate": 1.9831711035947552e-05,
"loss": 1.2031,
"step": 175
},
{
"epoch": 0.15768725361366623,
"grad_norm": 0.22915392045241134,
"learning_rate": 1.9802590683225945e-05,
"loss": 1.1943,
"step": 180
},
{
"epoch": 0.16206745510293474,
"grad_norm": 0.24585515208967154,
"learning_rate": 1.97711727146564e-05,
"loss": 1.1961,
"step": 185
},
{
"epoch": 0.16644765659220323,
"grad_norm": 0.21471505934692195,
"learning_rate": 1.973746449425368e-05,
"loss": 1.1883,
"step": 190
},
{
"epoch": 0.17082785808147175,
"grad_norm": 0.22942379356513737,
"learning_rate": 1.970147392284154e-05,
"loss": 1.2193,
"step": 195
},
{
"epoch": 0.17520805957074026,
"grad_norm": 0.23244007409565076,
"learning_rate": 1.9663209436200887e-05,
"loss": 1.207,
"step": 200
},
{
"epoch": 0.17958826106000875,
"grad_norm": 0.22422374010740104,
"learning_rate": 1.9622680003092503e-05,
"loss": 1.174,
"step": 205
},
{
"epoch": 0.18396846254927726,
"grad_norm": 0.2239852274530115,
"learning_rate": 1.957989512315489e-05,
"loss": 1.1766,
"step": 210
},
{
"epoch": 0.18834866403854578,
"grad_norm": 0.22417592332949882,
"learning_rate": 1.953486482467764e-05,
"loss": 1.2016,
"step": 215
},
{
"epoch": 0.1927288655278143,
"grad_norm": 0.23718560975504316,
"learning_rate": 1.9487599662250945e-05,
"loss": 1.1836,
"step": 220
},
{
"epoch": 0.19710906701708278,
"grad_norm": 0.24831579992264016,
"learning_rate": 1.9438110714291697e-05,
"loss": 1.1885,
"step": 225
},
{
"epoch": 0.2014892685063513,
"grad_norm": 0.22705440610322433,
"learning_rate": 1.9386409580446846e-05,
"loss": 1.1768,
"step": 230
},
{
"epoch": 0.2058694699956198,
"grad_norm": 0.22232082846727982,
"learning_rate": 1.933250837887457e-05,
"loss": 1.1695,
"step": 235
},
{
"epoch": 0.2102496714848883,
"grad_norm": 0.20849807972480366,
"learning_rate": 1.9276419743403934e-05,
"loss": 1.1664,
"step": 240
},
{
"epoch": 0.2146298729741568,
"grad_norm": 0.22884959889218245,
"learning_rate": 1.9218156820573618e-05,
"loss": 1.1715,
"step": 245
},
{
"epoch": 0.21901007446342532,
"grad_norm": 0.22251029198025443,
"learning_rate": 1.9157733266550577e-05,
"loss": 1.1664,
"step": 250
},
{
"epoch": 0.2233902759526938,
"grad_norm": 0.21607317255636946,
"learning_rate": 1.9095163243929143e-05,
"loss": 1.1758,
"step": 255
},
{
"epoch": 0.22777047744196233,
"grad_norm": 0.2275376818247485,
"learning_rate": 1.9030461418411498e-05,
"loss": 1.173,
"step": 260
},
{
"epoch": 0.23215067893123084,
"grad_norm": 0.2289840446661591,
"learning_rate": 1.8963642955370203e-05,
"loss": 1.1852,
"step": 265
},
{
"epoch": 0.23653088042049936,
"grad_norm": 0.23768451520157288,
"learning_rate": 1.889472351629358e-05,
"loss": 1.1551,
"step": 270
},
{
"epoch": 0.24091108190976784,
"grad_norm": 0.22459484609481822,
"learning_rate": 1.882371925511488e-05,
"loss": 1.1564,
"step": 275
},
{
"epoch": 0.24529128339903636,
"grad_norm": 0.22828454066610995,
"learning_rate": 1.875064681442594e-05,
"loss": 1.1838,
"step": 280
},
{
"epoch": 0.24967148488830487,
"grad_norm": 0.23814414474493634,
"learning_rate": 1.867552332157637e-05,
"loss": 1.165,
"step": 285
},
{
"epoch": 0.25405168637757336,
"grad_norm": 0.23695582890788727,
"learning_rate": 1.8598366384659113e-05,
"loss": 1.1496,
"step": 290
},
{
"epoch": 0.25843188786684185,
"grad_norm": 0.21285197719762172,
"learning_rate": 1.851919408838327e-05,
"loss": 1.1671,
"step": 295
},
{
"epoch": 0.2628120893561104,
"grad_norm": 0.23402098820515907,
"learning_rate": 1.843802498983529e-05,
"loss": 1.1662,
"step": 300
},
{
"epoch": 0.2671922908453789,
"grad_norm": 0.21951316193224032,
"learning_rate": 1.8354878114129368e-05,
"loss": 1.1623,
"step": 305
},
{
"epoch": 0.2715724923346474,
"grad_norm": 0.21226630310736813,
"learning_rate": 1.8269772949948185e-05,
"loss": 1.1571,
"step": 310
},
{
"epoch": 0.2759526938239159,
"grad_norm": 0.21300490616956957,
"learning_rate": 1.8182729444974993e-05,
"loss": 1.1729,
"step": 315
},
{
"epoch": 0.2803328953131844,
"grad_norm": 0.2298100232081548,
"learning_rate": 1.8093768001218096e-05,
"loss": 1.1648,
"step": 320
},
{
"epoch": 0.28471309680245294,
"grad_norm": 0.20939381292392953,
"learning_rate": 1.800290947022884e-05,
"loss": 1.1633,
"step": 325
},
{
"epoch": 0.2890932982917214,
"grad_norm": 0.21652397528724918,
"learning_rate": 1.7910175148214274e-05,
"loss": 1.1703,
"step": 330
},
{
"epoch": 0.2934734997809899,
"grad_norm": 0.23693799112996852,
"learning_rate": 1.7815586771045535e-05,
"loss": 1.1557,
"step": 335
},
{
"epoch": 0.29785370127025845,
"grad_norm": 0.23875037210600733,
"learning_rate": 1.771916650916321e-05,
"loss": 1.1546,
"step": 340
},
{
"epoch": 0.30223390275952694,
"grad_norm": 0.2086246080725189,
"learning_rate": 1.762093696238086e-05,
"loss": 1.1414,
"step": 345
},
{
"epoch": 0.3066141042487954,
"grad_norm": 0.2032809446181252,
"learning_rate": 1.752092115458784e-05,
"loss": 1.1557,
"step": 350
},
{
"epoch": 0.31099430573806397,
"grad_norm": 0.2299671919872682,
"learning_rate": 1.7419142528352815e-05,
"loss": 1.1584,
"step": 355
},
{
"epoch": 0.31537450722733246,
"grad_norm": 0.2208475255216932,
"learning_rate": 1.731562493942904e-05,
"loss": 1.1436,
"step": 360
},
{
"epoch": 0.31975470871660094,
"grad_norm": 0.21527409406555012,
"learning_rate": 1.721039265116285e-05,
"loss": 1.1592,
"step": 365
},
{
"epoch": 0.3241349102058695,
"grad_norm": 0.2075937240152717,
"learning_rate": 1.710347032880664e-05,
"loss": 1.1332,
"step": 370
},
{
"epoch": 0.328515111695138,
"grad_norm": 0.2155228439936621,
"learning_rate": 1.6994883033737582e-05,
"loss": 1.1395,
"step": 375
},
{
"epoch": 0.33289531318440646,
"grad_norm": 0.24834287218683154,
"learning_rate": 1.688465621758352e-05,
"loss": 1.1566,
"step": 380
},
{
"epoch": 0.337275514673675,
"grad_norm": 0.2401994560404927,
"learning_rate": 1.6772815716257414e-05,
"loss": 1.1582,
"step": 385
},
{
"epoch": 0.3416557161629435,
"grad_norm": 0.22010411419127637,
"learning_rate": 1.6659387743901688e-05,
"loss": 1.1479,
"step": 390
},
{
"epoch": 0.346035917652212,
"grad_norm": 0.2141467823787157,
"learning_rate": 1.6544398886743934e-05,
"loss": 1.1424,
"step": 395
},
{
"epoch": 0.3504161191414805,
"grad_norm": 0.20902762397068814,
"learning_rate": 1.6427876096865394e-05,
"loss": 1.1401,
"step": 400
},
{
"epoch": 0.354796320630749,
"grad_norm": 0.20864014440590625,
"learning_rate": 1.6309846685883726e-05,
"loss": 1.1404,
"step": 405
},
{
"epoch": 0.3591765221200175,
"grad_norm": 0.22432181453053213,
"learning_rate": 1.6190338318551426e-05,
"loss": 1.1539,
"step": 410
},
{
"epoch": 0.36355672360928604,
"grad_norm": 0.20427723583571802,
"learning_rate": 1.606937900627157e-05,
"loss": 1.1402,
"step": 415
},
{
"epoch": 0.3679369250985545,
"grad_norm": 0.20345261931074785,
"learning_rate": 1.594699710053223e-05,
"loss": 1.1454,
"step": 420
},
{
"epoch": 0.37231712658782307,
"grad_norm": 0.20907937476304458,
"learning_rate": 1.5823221286261217e-05,
"loss": 1.1282,
"step": 425
},
{
"epoch": 0.37669732807709155,
"grad_norm": 0.2065934957421514,
"learning_rate": 1.5698080575102662e-05,
"loss": 1.1334,
"step": 430
},
{
"epoch": 0.38107752956636004,
"grad_norm": 0.2161916996533843,
"learning_rate": 1.557160429861702e-05,
"loss": 1.152,
"step": 435
},
{
"epoch": 0.3854577310556286,
"grad_norm": 0.21804609517909665,
"learning_rate": 1.5443822101406066e-05,
"loss": 1.1379,
"step": 440
},
{
"epoch": 0.38983793254489707,
"grad_norm": 0.20636895330872523,
"learning_rate": 1.531476393416456e-05,
"loss": 1.1592,
"step": 445
},
{
"epoch": 0.39421813403416556,
"grad_norm": 0.20437911994063385,
"learning_rate": 1.5184460046660139e-05,
"loss": 1.1277,
"step": 450
},
{
"epoch": 0.3985983355234341,
"grad_norm": 0.2060999428500134,
"learning_rate": 1.50529409806431e-05,
"loss": 1.1453,
"step": 455
},
{
"epoch": 0.4029785370127026,
"grad_norm": 0.22533191644861675,
"learning_rate": 1.4920237562687784e-05,
"loss": 1.1502,
"step": 460
},
{
"epoch": 0.4073587385019711,
"grad_norm": 0.23260594557785327,
"learning_rate": 1.478638089696716e-05,
"loss": 1.1248,
"step": 465
},
{
"epoch": 0.4117389399912396,
"grad_norm": 0.22947787094553646,
"learning_rate": 1.4651402357962368e-05,
"loss": 1.1318,
"step": 470
},
{
"epoch": 0.4161191414805081,
"grad_norm": 0.2009264544016685,
"learning_rate": 1.4515333583108896e-05,
"loss": 1.1543,
"step": 475
},
{
"epoch": 0.4204993429697766,
"grad_norm": 0.20022135464643717,
"learning_rate": 1.4378206465381122e-05,
"loss": 1.1405,
"step": 480
},
{
"epoch": 0.42487954445904513,
"grad_norm": 0.20457278775552865,
"learning_rate": 1.4240053145816968e-05,
"loss": 1.1434,
"step": 485
},
{
"epoch": 0.4292597459483136,
"grad_norm": 0.20324616627736056,
"learning_rate": 1.4100906005984404e-05,
"loss": 1.1479,
"step": 490
},
{
"epoch": 0.4336399474375821,
"grad_norm": 0.21908018025065346,
"learning_rate": 1.396079766039157e-05,
"loss": 1.1449,
"step": 495
},
{
"epoch": 0.43802014892685065,
"grad_norm": 0.210033035128553,
"learning_rate": 1.381976094884232e-05,
"loss": 1.132,
"step": 500
},
{
"epoch": 0.44240035041611914,
"grad_norm": 0.2013972756453919,
"learning_rate": 1.3677828928738934e-05,
"loss": 1.1473,
"step": 505
},
{
"epoch": 0.4467805519053876,
"grad_norm": 0.20820144402446275,
"learning_rate": 1.3535034867333838e-05,
"loss": 1.1398,
"step": 510
},
{
"epoch": 0.45116075339465617,
"grad_norm": 0.21960485445473304,
"learning_rate": 1.3391412233932148e-05,
"loss": 1.1527,
"step": 515
},
{
"epoch": 0.45554095488392465,
"grad_norm": 0.21395914429214383,
"learning_rate": 1.3246994692046837e-05,
"loss": 1.1486,
"step": 520
},
{
"epoch": 0.45992115637319314,
"grad_norm": 0.21230485993764248,
"learning_rate": 1.3101816091508389e-05,
"loss": 1.1537,
"step": 525
},
{
"epoch": 0.4643013578624617,
"grad_norm": 0.21651837542900804,
"learning_rate": 1.2955910460530787e-05,
"loss": 1.1496,
"step": 530
},
{
"epoch": 0.46868155935173017,
"grad_norm": 0.20750629246921734,
"learning_rate": 1.2809311997735697e-05,
"loss": 1.1355,
"step": 535
},
{
"epoch": 0.4730617608409987,
"grad_norm": 0.2039566928608635,
"learning_rate": 1.266205506413667e-05,
"loss": 1.1375,
"step": 540
},
{
"epoch": 0.4774419623302672,
"grad_norm": 0.20670703932671342,
"learning_rate": 1.2514174175085346e-05,
"loss": 1.1441,
"step": 545
},
{
"epoch": 0.4818221638195357,
"grad_norm": 0.20394796072197643,
"learning_rate": 1.2365703992181425e-05,
"loss": 1.1192,
"step": 550
},
{
"epoch": 0.48620236530880423,
"grad_norm": 0.19785902077082673,
"learning_rate": 1.2216679315148388e-05,
"loss": 1.1303,
"step": 555
},
{
"epoch": 0.4905825667980727,
"grad_norm": 0.20446955932426367,
"learning_rate": 1.2067135073676841e-05,
"loss": 1.1327,
"step": 560
},
{
"epoch": 0.4949627682873412,
"grad_norm": 0.20819551033939337,
"learning_rate": 1.1917106319237386e-05,
"loss": 1.1262,
"step": 565
},
{
"epoch": 0.49934296977660975,
"grad_norm": 0.202514436311041,
"learning_rate": 1.1766628216864961e-05,
"loss": 1.1451,
"step": 570
},
{
"epoch": 0.5037231712658782,
"grad_norm": 0.20270248880014877,
"learning_rate": 1.161573603691655e-05,
"loss": 1.1096,
"step": 575
},
{
"epoch": 0.5081033727551467,
"grad_norm": 0.19902608440066913,
"learning_rate": 1.1464465146804218e-05,
"loss": 1.1429,
"step": 580
},
{
"epoch": 0.5124835742444153,
"grad_norm": 0.21488103670405032,
"learning_rate": 1.1312851002705383e-05,
"loss": 1.1258,
"step": 585
},
{
"epoch": 0.5168637757336837,
"grad_norm": 0.2079533279978145,
"learning_rate": 1.1160929141252303e-05,
"loss": 1.1326,
"step": 590
},
{
"epoch": 0.5212439772229522,
"grad_norm": 0.2024904670673594,
"learning_rate": 1.1008735171202685e-05,
"loss": 1.1516,
"step": 595
},
{
"epoch": 0.5256241787122208,
"grad_norm": 0.2009798683547318,
"learning_rate": 1.0856304765093391e-05,
"loss": 1.126,
"step": 600
},
{
"epoch": 0.5300043802014893,
"grad_norm": 0.20247591682210167,
"learning_rate": 1.0703673650879219e-05,
"loss": 1.1182,
"step": 605
},
{
"epoch": 0.5343845816907578,
"grad_norm": 0.2040201994565877,
"learning_rate": 1.0550877603558656e-05,
"loss": 1.1406,
"step": 610
},
{
"epoch": 0.5387647831800263,
"grad_norm": 0.2015454994775207,
"learning_rate": 1.0397952436788643e-05,
"loss": 1.1166,
"step": 615
},
{
"epoch": 0.5431449846692948,
"grad_norm": 0.19828586396121736,
"learning_rate": 1.024493399449025e-05,
"loss": 1.1271,
"step": 620
},
{
"epoch": 0.5475251861585633,
"grad_norm": 0.202990185981543,
"learning_rate": 1.0091858142447266e-05,
"loss": 1.1404,
"step": 625
},
{
"epoch": 0.5519053876478318,
"grad_norm": 0.1953396887915411,
"learning_rate": 9.938760759899674e-06,
"loss": 1.1282,
"step": 630
},
{
"epoch": 0.5562855891371004,
"grad_norm": 0.19897584069638927,
"learning_rate": 9.785677731133972e-06,
"loss": 1.116,
"step": 635
},
{
"epoch": 0.5606657906263688,
"grad_norm": 0.19579937491086502,
"learning_rate": 9.632644937072277e-06,
"loss": 1.1211,
"step": 640
},
{
"epoch": 0.5650459921156373,
"grad_norm": 0.19292547383921327,
"learning_rate": 9.479698246862277e-06,
"loss": 1.1249,
"step": 645
},
{
"epoch": 0.5694261936049059,
"grad_norm": 0.2035897790907176,
"learning_rate": 9.326873509469887e-06,
"loss": 1.1222,
"step": 650
},
{
"epoch": 0.5738063950941743,
"grad_norm": 0.20245065608346366,
"learning_rate": 9.174206545276678e-06,
"loss": 1.1354,
"step": 655
},
{
"epoch": 0.5781865965834428,
"grad_norm": 0.19934269310500388,
"learning_rate": 9.021733137683963e-06,
"loss": 1.1443,
"step": 660
},
{
"epoch": 0.5825667980727114,
"grad_norm": 0.19598319523008984,
"learning_rate": 8.869489024725595e-06,
"loss": 1.1326,
"step": 665
},
{
"epoch": 0.5869469995619798,
"grad_norm": 0.1945281080977394,
"learning_rate": 8.717509890691369e-06,
"loss": 1.1239,
"step": 670
},
{
"epoch": 0.5913272010512484,
"grad_norm": 0.20806507918308362,
"learning_rate": 8.565831357763039e-06,
"loss": 1.1294,
"step": 675
},
{
"epoch": 0.5957074025405169,
"grad_norm": 0.20251893056570133,
"learning_rate": 8.414488977664858e-06,
"loss": 1.1239,
"step": 680
},
{
"epoch": 0.6000876040297853,
"grad_norm": 0.1991936009015444,
"learning_rate": 8.263518223330698e-06,
"loss": 1.1322,
"step": 685
},
{
"epoch": 0.6044678055190539,
"grad_norm": 0.1961163554964441,
"learning_rate": 8.112954480589558e-06,
"loss": 1.127,
"step": 690
},
{
"epoch": 0.6088480070083224,
"grad_norm": 0.1939932601906298,
"learning_rate": 7.962833039871562e-06,
"loss": 1.1275,
"step": 695
},
{
"epoch": 0.6132282084975909,
"grad_norm": 0.19487292794675937,
"learning_rate": 7.813189087936243e-06,
"loss": 1.1222,
"step": 700
},
{
"epoch": 0.6176084099868594,
"grad_norm": 0.19843142347965212,
"learning_rate": 7.664057699625215e-06,
"loss": 1.1509,
"step": 705
},
{
"epoch": 0.6219886114761279,
"grad_norm": 0.20098071367810383,
"learning_rate": 7.515473829640987e-06,
"loss": 1.1228,
"step": 710
},
{
"epoch": 0.6263688129653964,
"grad_norm": 0.1972827327140634,
"learning_rate": 7.367472304354011e-06,
"loss": 1.1268,
"step": 715
},
{
"epoch": 0.6307490144546649,
"grad_norm": 0.20576577812522315,
"learning_rate": 7.2200878136397355e-06,
"loss": 1.115,
"step": 720
},
{
"epoch": 0.6351292159439335,
"grad_norm": 0.1972932881390162,
"learning_rate": 7.073354902747742e-06,
"loss": 1.1104,
"step": 725
},
{
"epoch": 0.6395094174332019,
"grad_norm": 0.20208492611746368,
"learning_rate": 6.927307964204695e-06,
"loss": 1.1356,
"step": 730
},
{
"epoch": 0.6438896189224704,
"grad_norm": 0.19600730585362314,
"learning_rate": 6.781981229753145e-06,
"loss": 1.1225,
"step": 735
},
{
"epoch": 0.648269820411739,
"grad_norm": 0.19661439451248647,
"learning_rate": 6.637408762327972e-06,
"loss": 1.1175,
"step": 740
},
{
"epoch": 0.6526500219010074,
"grad_norm": 0.20253732148210266,
"learning_rate": 6.4936244480724575e-06,
"loss": 1.1224,
"step": 745
},
{
"epoch": 0.657030223390276,
"grad_norm": 0.197391894393827,
"learning_rate": 6.350661988395723e-06,
"loss": 1.1369,
"step": 750
},
{
"epoch": 0.6614104248795445,
"grad_norm": 0.19396216640485134,
"learning_rate": 6.208554892073528e-06,
"loss": 1.1332,
"step": 755
},
{
"epoch": 0.6657906263688129,
"grad_norm": 0.19950066877821537,
"learning_rate": 6.067336467394169e-06,
"loss": 1.1199,
"step": 760
},
{
"epoch": 0.6701708278580815,
"grad_norm": 0.20943729884482393,
"learning_rate": 5.927039814351426e-06,
"loss": 1.1201,
"step": 765
},
{
"epoch": 0.67455102934735,
"grad_norm": 0.19237438150257924,
"learning_rate": 5.787697816886273e-06,
"loss": 1.1267,
"step": 770
},
{
"epoch": 0.6789312308366184,
"grad_norm": 0.19252386131062685,
"learning_rate": 5.649343135179271e-06,
"loss": 1.11,
"step": 775
},
{
"epoch": 0.683311432325887,
"grad_norm": 0.19638246872011564,
"learning_rate": 5.512008197995379e-06,
"loss": 1.1164,
"step": 780
},
{
"epoch": 0.6876916338151555,
"grad_norm": 0.1877681729558122,
"learning_rate": 5.375725195083046e-06,
"loss": 1.1257,
"step": 785
},
{
"epoch": 0.692071835304424,
"grad_norm": 0.19133651997309503,
"learning_rate": 5.240526069629265e-06,
"loss": 1.134,
"step": 790
},
{
"epoch": 0.6964520367936925,
"grad_norm": 0.19745307442946453,
"learning_rate": 5.106442510772489e-06,
"loss": 1.1412,
"step": 795
},
{
"epoch": 0.700832238282961,
"grad_norm": 0.18826987382961585,
"learning_rate": 4.97350594617502e-06,
"loss": 1.12,
"step": 800
},
{
"epoch": 0.7052124397722295,
"grad_norm": 0.20198435756216934,
"learning_rate": 4.8417475346567635e-06,
"loss": 1.1343,
"step": 805
},
{
"epoch": 0.709592641261498,
"grad_norm": 0.19941557715707345,
"learning_rate": 4.711198158891909e-06,
"loss": 1.1229,
"step": 810
},
{
"epoch": 0.7139728427507666,
"grad_norm": 0.20442006319949774,
"learning_rate": 4.581888418170429e-06,
"loss": 1.1353,
"step": 815
},
{
"epoch": 0.718353044240035,
"grad_norm": 0.2001055609078045,
"learning_rate": 4.453848621225913e-06,
"loss": 1.1149,
"step": 820
},
{
"epoch": 0.7227332457293035,
"grad_norm": 0.18925787454519752,
"learning_rate": 4.327108779131573e-06,
"loss": 1.133,
"step": 825
},
{
"epoch": 0.7271134472185721,
"grad_norm": 0.1863143936478909,
"learning_rate": 4.201698598265973e-06,
"loss": 1.1162,
"step": 830
},
{
"epoch": 0.7314936487078406,
"grad_norm": 0.1879694577872159,
"learning_rate": 4.077647473350201e-06,
"loss": 1.126,
"step": 835
},
{
"epoch": 0.735873850197109,
"grad_norm": 0.1919436380192233,
"learning_rate": 3.954984480558071e-06,
"loss": 1.1195,
"step": 840
},
{
"epoch": 0.7402540516863776,
"grad_norm": 0.20358611131495935,
"learning_rate": 3.83373837070101e-06,
"loss": 1.1255,
"step": 845
},
{
"epoch": 0.7446342531756461,
"grad_norm": 0.20008378915370192,
"learning_rate": 3.7139375624891795e-06,
"loss": 1.1412,
"step": 850
},
{
"epoch": 0.7490144546649146,
"grad_norm": 0.19290096180375577,
"learning_rate": 3.595610135870472e-06,
"loss": 1.1207,
"step": 855
},
{
"epoch": 0.7533946561541831,
"grad_norm": 0.196072459641094,
"learning_rate": 3.478783825448869e-06,
"loss": 1.1125,
"step": 860
},
{
"epoch": 0.7577748576434516,
"grad_norm": 0.1928642029524879,
"learning_rate": 3.3634860139837877e-06,
"loss": 1.1229,
"step": 865
},
{
"epoch": 0.7621550591327201,
"grad_norm": 0.20130571120198174,
"learning_rate": 3.249743725971849e-06,
"loss": 1.1312,
"step": 870
},
{
"epoch": 0.7665352606219886,
"grad_norm": 0.20961643180726597,
"learning_rate": 3.1375836213126653e-06,
"loss": 1.1232,
"step": 875
},
{
"epoch": 0.7709154621112572,
"grad_norm": 0.18941668671789638,
"learning_rate": 3.0270319890600465e-06,
"loss": 1.1434,
"step": 880
},
{
"epoch": 0.7752956636005256,
"grad_norm": 0.19381237131051324,
"learning_rate": 2.918114741260156e-06,
"loss": 1.1439,
"step": 885
},
{
"epoch": 0.7796758650897941,
"grad_norm": 0.1945291975485229,
"learning_rate": 2.8108574068780093e-06,
"loss": 1.1158,
"step": 890
},
{
"epoch": 0.7840560665790627,
"grad_norm": 0.18927206178983724,
"learning_rate": 2.7052851258137936e-06,
"loss": 1.1241,
"step": 895
},
{
"epoch": 0.7884362680683311,
"grad_norm": 0.19223522737541443,
"learning_rate": 2.601422643010335e-06,
"loss": 1.1056,
"step": 900
},
{
"epoch": 0.7928164695575997,
"grad_norm": 0.20045874340313333,
"learning_rate": 2.4992943026531935e-06,
"loss": 1.1239,
"step": 905
},
{
"epoch": 0.7971966710468682,
"grad_norm": 0.19018510655063894,
"learning_rate": 2.3989240424646355e-06,
"loss": 1.1189,
"step": 910
},
{
"epoch": 0.8015768725361366,
"grad_norm": 0.19034937418838804,
"learning_rate": 2.300335388092929e-06,
"loss": 1.1152,
"step": 915
},
{
"epoch": 0.8059570740254052,
"grad_norm": 0.19645900089237364,
"learning_rate": 2.2035514475981756e-06,
"loss": 1.1313,
"step": 920
},
{
"epoch": 0.8103372755146737,
"grad_norm": 0.18847516841511264,
"learning_rate": 2.1085949060360654e-06,
"loss": 1.1135,
"step": 925
},
{
"epoch": 0.8147174770039421,
"grad_norm": 0.19132781030558177,
"learning_rate": 2.015488020140737e-06,
"loss": 1.1045,
"step": 930
},
{
"epoch": 0.8190976784932107,
"grad_norm": 0.18904026009165054,
"learning_rate": 1.924252613108073e-06,
"loss": 1.1318,
"step": 935
},
{
"epoch": 0.8234778799824792,
"grad_norm": 0.19251546970491693,
"learning_rate": 1.8349100694805711e-06,
"loss": 1.1243,
"step": 940
},
{
"epoch": 0.8278580814717477,
"grad_norm": 0.19006518497007893,
"learning_rate": 1.7474813301350668e-06,
"loss": 1.123,
"step": 945
},
{
"epoch": 0.8322382829610162,
"grad_norm": 0.19325212813265874,
"learning_rate": 1.661986887374415e-06,
"loss": 1.1129,
"step": 950
},
{
"epoch": 0.8366184844502847,
"grad_norm": 0.1889419224748978,
"learning_rate": 1.578446780124344e-06,
"loss": 1.1103,
"step": 955
},
{
"epoch": 0.8409986859395532,
"grad_norm": 0.19033279856504365,
"learning_rate": 1.49688058923654e-06,
"loss": 1.1098,
"step": 960
},
{
"epoch": 0.8453788874288217,
"grad_norm": 0.18633846336335902,
"learning_rate": 1.4173074328991376e-06,
"loss": 1.1287,
"step": 965
},
{
"epoch": 0.8497590889180903,
"grad_norm": 0.19006030021409406,
"learning_rate": 1.339745962155613e-06,
"loss": 1.1329,
"step": 970
},
{
"epoch": 0.8541392904073587,
"grad_norm": 0.18903779583859964,
"learning_rate": 1.2642143565332154e-06,
"loss": 1.1316,
"step": 975
},
{
"epoch": 0.8585194918966272,
"grad_norm": 0.19272564813632415,
"learning_rate": 1.1907303197818665e-06,
"loss": 1.1303,
"step": 980
},
{
"epoch": 0.8628996933858958,
"grad_norm": 0.18878052764179085,
"learning_rate": 1.1193110757246251e-06,
"loss": 1.1205,
"step": 985
},
{
"epoch": 0.8672798948751642,
"grad_norm": 0.19218035241765347,
"learning_rate": 1.0499733642206034e-06,
"loss": 1.124,
"step": 990
},
{
"epoch": 0.8716600963644328,
"grad_norm": 0.18852107182367117,
"learning_rate": 9.827334372413444e-07,
"loss": 1.1301,
"step": 995
},
{
"epoch": 0.8760402978537013,
"grad_norm": 0.19264462588904008,
"learning_rate": 9.176070550615379e-07,
"loss": 1.1254,
"step": 1000
},
{
"epoch": 0.8804204993429697,
"grad_norm": 0.18440224651228512,
"learning_rate": 8.546094825649909e-07,
"loss": 1.1189,
"step": 1005
},
{
"epoch": 0.8848007008322383,
"grad_norm": 0.18637643466062778,
"learning_rate": 7.937554856667196e-07,
"loss": 1.1386,
"step": 1010
},
{
"epoch": 0.8891809023215068,
"grad_norm": 0.19217636415097236,
"learning_rate": 7.350593278519824e-07,
"loss": 1.1512,
"step": 1015
},
{
"epoch": 0.8935611038107752,
"grad_norm": 0.19868031904477793,
"learning_rate": 6.785347668330777e-07,
"loss": 1.1385,
"step": 1020
},
{
"epoch": 0.8979413053000438,
"grad_norm": 0.18822804076907482,
"learning_rate": 6.241950513246931e-07,
"loss": 1.1236,
"step": 1025
},
{
"epoch": 0.9023215067893123,
"grad_norm": 0.18768750481368643,
"learning_rate": 5.720529179385659e-07,
"loss": 1.1096,
"step": 1030
},
{
"epoch": 0.9067017082785808,
"grad_norm": 0.18565141772239874,
"learning_rate": 5.221205881981594e-07,
"loss": 1.1197,
"step": 1035
},
{
"epoch": 0.9110819097678493,
"grad_norm": 0.2034853308480849,
"learning_rate": 4.7440976567407096e-07,
"loss": 1.1371,
"step": 1040
},
{
"epoch": 0.9154621112571178,
"grad_norm": 0.1998255782368389,
"learning_rate": 4.2893163324085886e-07,
"loss": 1.1115,
"step": 1045
},
{
"epoch": 0.9198423127463863,
"grad_norm": 0.19439905516574385,
"learning_rate": 3.856968504558989e-07,
"loss": 1.1156,
"step": 1050
},
{
"epoch": 0.9242225142356548,
"grad_norm": 0.18518716914861474,
"learning_rate": 3.4471555106090573e-07,
"loss": 1.1316,
"step": 1055
},
{
"epoch": 0.9286027157249234,
"grad_norm": 0.18819573805967685,
"learning_rate": 3.059973406066963e-07,
"loss": 1.1215,
"step": 1060
},
{
"epoch": 0.9329829172141918,
"grad_norm": 0.18672133869392946,
"learning_rate": 2.6955129420176193e-07,
"loss": 1.1264,
"step": 1065
},
{
"epoch": 0.9373631187034603,
"grad_norm": 0.18848502790832367,
"learning_rate": 2.3538595438516442e-07,
"loss": 1.1215,
"step": 1070
},
{
"epoch": 0.9417433201927289,
"grad_norm": 0.18909475354670285,
"learning_rate": 2.035093291242607e-07,
"loss": 1.1324,
"step": 1075
},
{
"epoch": 0.9461235216819974,
"grad_norm": 0.18392034150187955,
"learning_rate": 1.7392888993773005e-07,
"loss": 1.0989,
"step": 1080
},
{
"epoch": 0.9505037231712659,
"grad_norm": 0.18422655351430084,
"learning_rate": 1.466515701443294e-07,
"loss": 1.149,
"step": 1085
},
{
"epoch": 0.9548839246605344,
"grad_norm": 0.18472721089921113,
"learning_rate": 1.2168376323780652e-07,
"loss": 1.1338,
"step": 1090
},
{
"epoch": 0.9592641261498029,
"grad_norm": 0.18618589859950402,
"learning_rate": 9.90313213883376e-08,
"loss": 1.1488,
"step": 1095
},
{
"epoch": 0.9636443276390714,
"grad_norm": 0.1841956829600993,
"learning_rate": 7.86995540708424e-08,
"loss": 1.1058,
"step": 1100
},
{
"epoch": 0.9680245291283399,
"grad_norm": 0.18737860582967023,
"learning_rate": 6.069322682050516e-08,
"loss": 1.1347,
"step": 1105
},
{
"epoch": 0.9724047306176085,
"grad_norm": 0.18327423894477116,
"learning_rate": 4.501656011579037e-08,
"loss": 1.1223,
"step": 1110
},
{
"epoch": 0.9767849321068769,
"grad_norm": 0.18664413828439025,
"learning_rate": 3.167322838920406e-08,
"loss": 1.1036,
"step": 1115
},
{
"epoch": 0.9811651335961454,
"grad_norm": 0.18891847587451832,
"learning_rate": 2.066635916605386e-08,
"loss": 1.1229,
"step": 1120
},
{
"epoch": 0.985545335085414,
"grad_norm": 0.19664758693417972,
"learning_rate": 1.1998532331389812e-08,
"loss": 1.156,
"step": 1125
},
{
"epoch": 0.9899255365746824,
"grad_norm": 0.18716549597123566,
"learning_rate": 5.671779525311394e-09,
"loss": 1.1146,
"step": 1130
},
{
"epoch": 0.994305738063951,
"grad_norm": 0.1847881495731737,
"learning_rate": 1.6875836667729073e-09,
"loss": 1.1207,
"step": 1135
},
{
"epoch": 0.9986859395532195,
"grad_norm": 0.18567353384044155,
"learning_rate": 4.687860599927874e-11,
"loss": 1.1244,
"step": 1140
},
{
"epoch": 0.9995619798510732,
"step": 1141,
"total_flos": 215836199485440.0,
"train_loss": 1.3199829850460123,
"train_runtime": 8951.0623,
"train_samples_per_second": 16.317,
"train_steps_per_second": 0.127
}
],
"logging_steps": 5,
"max_steps": 1141,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 215836199485440.0,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}