tinygpt2-javanese / trainer_state.json
akahana's picture
End of training
9d6ea3c verified
raw
history blame
53.6 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 30.0,
"eval_steps": 500,
"global_step": 150420,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0997207818109294,
"grad_norm": 0.9261826276779175,
"learning_rate": 4.975069804547268e-05,
"loss": 9.3488,
"step": 500
},
{
"epoch": 0.1994415636218588,
"grad_norm": 0.5372676253318787,
"learning_rate": 4.9501396090945354e-05,
"loss": 8.0556,
"step": 1000
},
{
"epoch": 0.2991623454327882,
"grad_norm": 0.5985580086708069,
"learning_rate": 4.925209413641803e-05,
"loss": 7.7905,
"step": 1500
},
{
"epoch": 0.3988831272437176,
"grad_norm": 0.7738422751426697,
"learning_rate": 4.900279218189071e-05,
"loss": 7.5996,
"step": 2000
},
{
"epoch": 0.49860390905464697,
"grad_norm": 0.8470643162727356,
"learning_rate": 4.875349022736339e-05,
"loss": 7.4439,
"step": 2500
},
{
"epoch": 0.5983246908655764,
"grad_norm": 0.9632411003112793,
"learning_rate": 4.850418827283606e-05,
"loss": 7.2826,
"step": 3000
},
{
"epoch": 0.6980454726765057,
"grad_norm": 0.9540568590164185,
"learning_rate": 4.825488631830874e-05,
"loss": 7.1712,
"step": 3500
},
{
"epoch": 0.7977662544874352,
"grad_norm": 0.9605555534362793,
"learning_rate": 4.8005584363781416e-05,
"loss": 7.0881,
"step": 4000
},
{
"epoch": 0.8974870362983646,
"grad_norm": 1.101860523223877,
"learning_rate": 4.775678101316314e-05,
"loss": 7.0052,
"step": 4500
},
{
"epoch": 0.9972078181092939,
"grad_norm": 1.8117992877960205,
"learning_rate": 4.750747905863582e-05,
"loss": 6.9103,
"step": 5000
},
{
"epoch": 1.0969285999202234,
"grad_norm": 1.2837625741958618,
"learning_rate": 4.72581771041085e-05,
"loss": 6.8231,
"step": 5500
},
{
"epoch": 1.1966493817311528,
"grad_norm": 1.3095475435256958,
"learning_rate": 4.7008875149581175e-05,
"loss": 6.7796,
"step": 6000
},
{
"epoch": 1.2963701635420821,
"grad_norm": 1.4652228355407715,
"learning_rate": 4.6760071798962906e-05,
"loss": 6.707,
"step": 6500
},
{
"epoch": 1.3960909453530115,
"grad_norm": 1.357987403869629,
"learning_rate": 4.651076984443558e-05,
"loss": 6.6568,
"step": 7000
},
{
"epoch": 1.4958117271639408,
"grad_norm": 1.31229829788208,
"learning_rate": 4.626146788990826e-05,
"loss": 6.6144,
"step": 7500
},
{
"epoch": 1.5955325089748702,
"grad_norm": 1.4246258735656738,
"learning_rate": 4.6012165935380934e-05,
"loss": 6.5669,
"step": 8000
},
{
"epoch": 1.6952532907857998,
"grad_norm": 1.3963290452957153,
"learning_rate": 4.576286398085361e-05,
"loss": 6.508,
"step": 8500
},
{
"epoch": 1.7949740725967291,
"grad_norm": 1.5991405248641968,
"learning_rate": 4.551406063023534e-05,
"loss": 6.4651,
"step": 9000
},
{
"epoch": 1.8946948544076585,
"grad_norm": 1.6727421283721924,
"learning_rate": 4.526475867570802e-05,
"loss": 6.4129,
"step": 9500
},
{
"epoch": 1.994415636218588,
"grad_norm": 1.5657908916473389,
"learning_rate": 4.50154567211807e-05,
"loss": 6.3859,
"step": 10000
},
{
"epoch": 2.0941364180295174,
"grad_norm": 1.4079279899597168,
"learning_rate": 4.4766154766653375e-05,
"loss": 6.3136,
"step": 10500
},
{
"epoch": 2.193857199840447,
"grad_norm": 1.4724199771881104,
"learning_rate": 4.45173514160351e-05,
"loss": 6.2935,
"step": 11000
},
{
"epoch": 2.293577981651376,
"grad_norm": 1.7075014114379883,
"learning_rate": 4.426804946150778e-05,
"loss": 6.2724,
"step": 11500
},
{
"epoch": 2.3932987634623055,
"grad_norm": 1.550031304359436,
"learning_rate": 4.401874750698046e-05,
"loss": 6.2526,
"step": 12000
},
{
"epoch": 2.493019545273235,
"grad_norm": 1.5938421487808228,
"learning_rate": 4.376944555245313e-05,
"loss": 6.2218,
"step": 12500
},
{
"epoch": 2.5927403270841642,
"grad_norm": 1.853521704673767,
"learning_rate": 4.3520642201834866e-05,
"loss": 6.1679,
"step": 13000
},
{
"epoch": 2.6924611088950936,
"grad_norm": 1.7296770811080933,
"learning_rate": 4.327134024730754e-05,
"loss": 6.1442,
"step": 13500
},
{
"epoch": 2.792181890706023,
"grad_norm": 1.752852201461792,
"learning_rate": 4.302203829278022e-05,
"loss": 6.1241,
"step": 14000
},
{
"epoch": 2.8919026725169523,
"grad_norm": 1.6265596151351929,
"learning_rate": 4.2772736338252893e-05,
"loss": 6.0973,
"step": 14500
},
{
"epoch": 2.9916234543278817,
"grad_norm": 1.7537871599197388,
"learning_rate": 4.2523932987634625e-05,
"loss": 6.0858,
"step": 15000
},
{
"epoch": 3.0913442361388115,
"grad_norm": 1.5949361324310303,
"learning_rate": 4.22746310331073e-05,
"loss": 6.0188,
"step": 15500
},
{
"epoch": 3.191065017949741,
"grad_norm": 1.907575011253357,
"learning_rate": 4.202532907857998e-05,
"loss": 6.0118,
"step": 16000
},
{
"epoch": 3.29078579976067,
"grad_norm": 1.8919939994812012,
"learning_rate": 4.177602712405265e-05,
"loss": 5.9883,
"step": 16500
},
{
"epoch": 3.3905065815715996,
"grad_norm": 1.8701094388961792,
"learning_rate": 4.1527223773434384e-05,
"loss": 5.9725,
"step": 17000
},
{
"epoch": 3.490227363382529,
"grad_norm": 2.043443202972412,
"learning_rate": 4.127792181890706e-05,
"loss": 5.9474,
"step": 17500
},
{
"epoch": 3.5899481451934583,
"grad_norm": 1.852910041809082,
"learning_rate": 4.102861986437974e-05,
"loss": 5.9103,
"step": 18000
},
{
"epoch": 3.6896689270043876,
"grad_norm": 1.7579346895217896,
"learning_rate": 4.077931790985242e-05,
"loss": 5.9047,
"step": 18500
},
{
"epoch": 3.789389708815317,
"grad_norm": 1.8655468225479126,
"learning_rate": 4.053051455923414e-05,
"loss": 5.9043,
"step": 19000
},
{
"epoch": 3.8891104906262464,
"grad_norm": 2.0585408210754395,
"learning_rate": 4.0281212604706826e-05,
"loss": 5.8705,
"step": 19500
},
{
"epoch": 3.988831272437176,
"grad_norm": 2.088595151901245,
"learning_rate": 4.00319106501795e-05,
"loss": 5.8608,
"step": 20000
},
{
"epoch": 4.0885520542481055,
"grad_norm": 1.8989760875701904,
"learning_rate": 3.978260869565217e-05,
"loss": 5.8265,
"step": 20500
},
{
"epoch": 4.188272836059035,
"grad_norm": 1.8559260368347168,
"learning_rate": 3.953330674112485e-05,
"loss": 5.8052,
"step": 21000
},
{
"epoch": 4.287993617869964,
"grad_norm": 2.0016090869903564,
"learning_rate": 3.9284503390506585e-05,
"loss": 5.7947,
"step": 21500
},
{
"epoch": 4.387714399680894,
"grad_norm": 2.0224192142486572,
"learning_rate": 3.903520143597926e-05,
"loss": 5.7605,
"step": 22000
},
{
"epoch": 4.487435181491823,
"grad_norm": 1.9289922714233398,
"learning_rate": 3.8785899481451936e-05,
"loss": 5.7315,
"step": 22500
},
{
"epoch": 4.587155963302752,
"grad_norm": 2.1070337295532227,
"learning_rate": 3.853659752692461e-05,
"loss": 5.7517,
"step": 23000
},
{
"epoch": 4.686876745113682,
"grad_norm": 2.091681718826294,
"learning_rate": 3.8287794176306343e-05,
"loss": 5.7206,
"step": 23500
},
{
"epoch": 4.786597526924611,
"grad_norm": 2.0619523525238037,
"learning_rate": 3.803849222177902e-05,
"loss": 5.6829,
"step": 24000
},
{
"epoch": 4.88631830873554,
"grad_norm": 2.1252663135528564,
"learning_rate": 3.7789190267251695e-05,
"loss": 5.7028,
"step": 24500
},
{
"epoch": 4.98603909054647,
"grad_norm": 2.179452657699585,
"learning_rate": 3.753988831272438e-05,
"loss": 5.6975,
"step": 25000
},
{
"epoch": 5.085759872357399,
"grad_norm": 2.054488182067871,
"learning_rate": 3.72910849621061e-05,
"loss": 5.6527,
"step": 25500
},
{
"epoch": 5.1854806541683285,
"grad_norm": 2.3839542865753174,
"learning_rate": 3.704178300757878e-05,
"loss": 5.6498,
"step": 26000
},
{
"epoch": 5.285201435979258,
"grad_norm": 1.9893797636032104,
"learning_rate": 3.679248105305146e-05,
"loss": 5.6103,
"step": 26500
},
{
"epoch": 5.384922217790187,
"grad_norm": 2.089535713195801,
"learning_rate": 3.654317909852413e-05,
"loss": 5.6011,
"step": 27000
},
{
"epoch": 5.484642999601117,
"grad_norm": 1.9748643636703491,
"learning_rate": 3.629387714399681e-05,
"loss": 5.6206,
"step": 27500
},
{
"epoch": 5.584363781412046,
"grad_norm": 2.1696887016296387,
"learning_rate": 3.6045073793378544e-05,
"loss": 5.607,
"step": 28000
},
{
"epoch": 5.684084563222975,
"grad_norm": 2.2261533737182617,
"learning_rate": 3.579577183885121e-05,
"loss": 5.5895,
"step": 28500
},
{
"epoch": 5.783805345033905,
"grad_norm": 1.9739435911178589,
"learning_rate": 3.5546469884323896e-05,
"loss": 5.5654,
"step": 29000
},
{
"epoch": 5.883526126844835,
"grad_norm": 2.3373613357543945,
"learning_rate": 3.529716792979657e-05,
"loss": 5.554,
"step": 29500
},
{
"epoch": 5.983246908655763,
"grad_norm": 2.0227203369140625,
"learning_rate": 3.50483645791783e-05,
"loss": 5.569,
"step": 30000
},
{
"epoch": 6.082967690466694,
"grad_norm": 2.1894445419311523,
"learning_rate": 3.479906262465098e-05,
"loss": 5.5268,
"step": 30500
},
{
"epoch": 6.182688472277623,
"grad_norm": 2.3545119762420654,
"learning_rate": 3.4549760670123655e-05,
"loss": 5.5102,
"step": 31000
},
{
"epoch": 6.282409254088552,
"grad_norm": 2.380277156829834,
"learning_rate": 3.430045871559634e-05,
"loss": 5.5301,
"step": 31500
},
{
"epoch": 6.382130035899482,
"grad_norm": 2.288188934326172,
"learning_rate": 3.405165536497806e-05,
"loss": 5.4927,
"step": 32000
},
{
"epoch": 6.481850817710411,
"grad_norm": 2.2211456298828125,
"learning_rate": 3.380235341045074e-05,
"loss": 5.4786,
"step": 32500
},
{
"epoch": 6.58157159952134,
"grad_norm": 2.5629711151123047,
"learning_rate": 3.355305145592342e-05,
"loss": 5.4932,
"step": 33000
},
{
"epoch": 6.68129238133227,
"grad_norm": 2.385563611984253,
"learning_rate": 3.330374950139609e-05,
"loss": 5.4715,
"step": 33500
},
{
"epoch": 6.781013163143199,
"grad_norm": 2.284985303878784,
"learning_rate": 3.305494615077782e-05,
"loss": 5.4599,
"step": 34000
},
{
"epoch": 6.8807339449541285,
"grad_norm": 2.3653366565704346,
"learning_rate": 3.2805644196250504e-05,
"loss": 5.4498,
"step": 34500
},
{
"epoch": 6.980454726765058,
"grad_norm": 2.311102867126465,
"learning_rate": 3.255634224172317e-05,
"loss": 5.4315,
"step": 35000
},
{
"epoch": 7.080175508575987,
"grad_norm": 2.4815216064453125,
"learning_rate": 3.2307040287195855e-05,
"loss": 5.4282,
"step": 35500
},
{
"epoch": 7.179896290386917,
"grad_norm": 2.228046178817749,
"learning_rate": 3.205823693657759e-05,
"loss": 5.4321,
"step": 36000
},
{
"epoch": 7.279617072197846,
"grad_norm": 2.459022283554077,
"learning_rate": 3.1808934982050256e-05,
"loss": 5.3886,
"step": 36500
},
{
"epoch": 7.379337854008775,
"grad_norm": 2.217167615890503,
"learning_rate": 3.155963302752294e-05,
"loss": 5.3924,
"step": 37000
},
{
"epoch": 7.479058635819705,
"grad_norm": 2.3231680393218994,
"learning_rate": 3.1310331072995614e-05,
"loss": 5.389,
"step": 37500
},
{
"epoch": 7.578779417630634,
"grad_norm": 2.220628261566162,
"learning_rate": 3.1061527722377346e-05,
"loss": 5.3856,
"step": 38000
},
{
"epoch": 7.678500199441563,
"grad_norm": 2.612741708755493,
"learning_rate": 3.081222576785002e-05,
"loss": 5.3851,
"step": 38500
},
{
"epoch": 7.778220981252493,
"grad_norm": 2.194031000137329,
"learning_rate": 3.05629238133227e-05,
"loss": 5.3744,
"step": 39000
},
{
"epoch": 7.877941763063422,
"grad_norm": 2.342750310897827,
"learning_rate": 3.0313621858795377e-05,
"loss": 5.3644,
"step": 39500
},
{
"epoch": 7.9776625448743514,
"grad_norm": 2.273401975631714,
"learning_rate": 3.0064818508177105e-05,
"loss": 5.3828,
"step": 40000
},
{
"epoch": 8.07738332668528,
"grad_norm": 2.5998456478118896,
"learning_rate": 2.981551655364978e-05,
"loss": 5.3399,
"step": 40500
},
{
"epoch": 8.177104108496211,
"grad_norm": 2.4312164783477783,
"learning_rate": 2.956621459912246e-05,
"loss": 5.3491,
"step": 41000
},
{
"epoch": 8.27682489030714,
"grad_norm": 2.1767194271087646,
"learning_rate": 2.9316912644595136e-05,
"loss": 5.318,
"step": 41500
},
{
"epoch": 8.37654567211807,
"grad_norm": 2.546261787414551,
"learning_rate": 2.9068109293976864e-05,
"loss": 5.3169,
"step": 42000
},
{
"epoch": 8.476266453928998,
"grad_norm": 2.5187346935272217,
"learning_rate": 2.8818807339449543e-05,
"loss": 5.3051,
"step": 42500
},
{
"epoch": 8.575987235739928,
"grad_norm": 2.4358792304992676,
"learning_rate": 2.856950538492222e-05,
"loss": 5.3063,
"step": 43000
},
{
"epoch": 8.675708017550857,
"grad_norm": 2.22619891166687,
"learning_rate": 2.8320203430394898e-05,
"loss": 5.3241,
"step": 43500
},
{
"epoch": 8.775428799361787,
"grad_norm": 2.6035451889038086,
"learning_rate": 2.8071400079776626e-05,
"loss": 5.2964,
"step": 44000
},
{
"epoch": 8.875149581172716,
"grad_norm": 2.5391156673431396,
"learning_rate": 2.7822098125249302e-05,
"loss": 5.2914,
"step": 44500
},
{
"epoch": 8.974870362983646,
"grad_norm": 2.4130935668945312,
"learning_rate": 2.757279617072198e-05,
"loss": 5.2883,
"step": 45000
},
{
"epoch": 9.074591144794574,
"grad_norm": 2.411205530166626,
"learning_rate": 2.7323494216194657e-05,
"loss": 5.2893,
"step": 45500
},
{
"epoch": 9.174311926605505,
"grad_norm": 2.484266757965088,
"learning_rate": 2.7074690865576385e-05,
"loss": 5.2649,
"step": 46000
},
{
"epoch": 9.274032708416435,
"grad_norm": 2.446840524673462,
"learning_rate": 2.6825388911049064e-05,
"loss": 5.2579,
"step": 46500
},
{
"epoch": 9.373753490227363,
"grad_norm": 2.2476446628570557,
"learning_rate": 2.657608695652174e-05,
"loss": 5.2616,
"step": 47000
},
{
"epoch": 9.473474272038294,
"grad_norm": 2.36161732673645,
"learning_rate": 2.632678500199442e-05,
"loss": 5.2458,
"step": 47500
},
{
"epoch": 9.573195053849222,
"grad_norm": 2.4564807415008545,
"learning_rate": 2.6077981651376147e-05,
"loss": 5.2522,
"step": 48000
},
{
"epoch": 9.672915835660152,
"grad_norm": 2.477536678314209,
"learning_rate": 2.5828679696848823e-05,
"loss": 5.249,
"step": 48500
},
{
"epoch": 9.77263661747108,
"grad_norm": 2.8510327339172363,
"learning_rate": 2.5579377742321503e-05,
"loss": 5.2397,
"step": 49000
},
{
"epoch": 9.872357399282011,
"grad_norm": 2.4770243167877197,
"learning_rate": 2.533007578779418e-05,
"loss": 5.2172,
"step": 49500
},
{
"epoch": 9.97207818109294,
"grad_norm": 2.492191791534424,
"learning_rate": 2.5081272437175906e-05,
"loss": 5.236,
"step": 50000
},
{
"epoch": 10.07179896290387,
"grad_norm": 2.5560014247894287,
"learning_rate": 2.4831970482648582e-05,
"loss": 5.2159,
"step": 50500
},
{
"epoch": 10.171519744714798,
"grad_norm": 2.550168752670288,
"learning_rate": 2.458266852812126e-05,
"loss": 5.2026,
"step": 51000
},
{
"epoch": 10.271240526525728,
"grad_norm": 2.562626600265503,
"learning_rate": 2.4333366573593937e-05,
"loss": 5.2121,
"step": 51500
},
{
"epoch": 10.370961308336657,
"grad_norm": 2.389833927154541,
"learning_rate": 2.408456322297567e-05,
"loss": 5.1963,
"step": 52000
},
{
"epoch": 10.470682090147587,
"grad_norm": 2.617138385772705,
"learning_rate": 2.3835261268448345e-05,
"loss": 5.1838,
"step": 52500
},
{
"epoch": 10.570402871958516,
"grad_norm": 2.6732029914855957,
"learning_rate": 2.358595931392102e-05,
"loss": 5.2135,
"step": 53000
},
{
"epoch": 10.670123653769446,
"grad_norm": 2.509752035140991,
"learning_rate": 2.33366573593937e-05,
"loss": 5.1937,
"step": 53500
},
{
"epoch": 10.769844435580374,
"grad_norm": 2.732623815536499,
"learning_rate": 2.3087355404866376e-05,
"loss": 5.2013,
"step": 54000
},
{
"epoch": 10.869565217391305,
"grad_norm": 2.7967655658721924,
"learning_rate": 2.2838552054248104e-05,
"loss": 5.1751,
"step": 54500
},
{
"epoch": 10.969285999202233,
"grad_norm": 2.6768581867218018,
"learning_rate": 2.2589250099720783e-05,
"loss": 5.1728,
"step": 55000
},
{
"epoch": 11.069006781013163,
"grad_norm": 2.3465123176574707,
"learning_rate": 2.233994814519346e-05,
"loss": 5.1869,
"step": 55500
},
{
"epoch": 11.168727562824092,
"grad_norm": 2.3460209369659424,
"learning_rate": 2.2090646190666138e-05,
"loss": 5.1704,
"step": 56000
},
{
"epoch": 11.268448344635022,
"grad_norm": 2.7022573947906494,
"learning_rate": 2.1841842840047866e-05,
"loss": 5.1508,
"step": 56500
},
{
"epoch": 11.36816912644595,
"grad_norm": 2.5259013175964355,
"learning_rate": 2.1592540885520542e-05,
"loss": 5.1441,
"step": 57000
},
{
"epoch": 11.46788990825688,
"grad_norm": 2.6938321590423584,
"learning_rate": 2.134323893099322e-05,
"loss": 5.1628,
"step": 57500
},
{
"epoch": 11.56761069006781,
"grad_norm": 2.874973773956299,
"learning_rate": 2.1093936976465897e-05,
"loss": 5.1405,
"step": 58000
},
{
"epoch": 11.66733147187874,
"grad_norm": 2.762739896774292,
"learning_rate": 2.0845133625847625e-05,
"loss": 5.1264,
"step": 58500
},
{
"epoch": 11.76705225368967,
"grad_norm": 2.617100954055786,
"learning_rate": 2.0595831671320304e-05,
"loss": 5.1297,
"step": 59000
},
{
"epoch": 11.866773035500598,
"grad_norm": 2.754258632659912,
"learning_rate": 2.034652971679298e-05,
"loss": 5.1349,
"step": 59500
},
{
"epoch": 11.966493817311529,
"grad_norm": 2.57446551322937,
"learning_rate": 2.009722776226566e-05,
"loss": 5.1401,
"step": 60000
},
{
"epoch": 12.066214599122457,
"grad_norm": 2.7069363594055176,
"learning_rate": 1.9848424411647387e-05,
"loss": 5.1093,
"step": 60500
},
{
"epoch": 12.165935380933387,
"grad_norm": 2.5617587566375732,
"learning_rate": 1.9599122457120063e-05,
"loss": 5.1071,
"step": 61000
},
{
"epoch": 12.265656162744316,
"grad_norm": 2.7849984169006348,
"learning_rate": 1.9349820502592742e-05,
"loss": 5.1298,
"step": 61500
},
{
"epoch": 12.365376944555246,
"grad_norm": 2.6736953258514404,
"learning_rate": 1.910051854806542e-05,
"loss": 5.0828,
"step": 62000
},
{
"epoch": 12.465097726366174,
"grad_norm": 2.623760938644409,
"learning_rate": 1.8851715197447146e-05,
"loss": 5.1166,
"step": 62500
},
{
"epoch": 12.564818508177105,
"grad_norm": 3.202988624572754,
"learning_rate": 1.8602413242919826e-05,
"loss": 5.1128,
"step": 63000
},
{
"epoch": 12.664539289988033,
"grad_norm": 2.4918911457061768,
"learning_rate": 1.83531112883925e-05,
"loss": 5.1181,
"step": 63500
},
{
"epoch": 12.764260071798963,
"grad_norm": 2.7274303436279297,
"learning_rate": 1.810380933386518e-05,
"loss": 5.1089,
"step": 64000
},
{
"epoch": 12.863980853609892,
"grad_norm": 2.760390520095825,
"learning_rate": 1.785500598324691e-05,
"loss": 5.1175,
"step": 64500
},
{
"epoch": 12.963701635420822,
"grad_norm": 2.7950050830841064,
"learning_rate": 1.7605704028719585e-05,
"loss": 5.1173,
"step": 65000
},
{
"epoch": 13.06342241723175,
"grad_norm": 2.5247349739074707,
"learning_rate": 1.7356402074192264e-05,
"loss": 5.0765,
"step": 65500
},
{
"epoch": 13.16314319904268,
"grad_norm": 2.5236001014709473,
"learning_rate": 1.710710011966494e-05,
"loss": 5.0699,
"step": 66000
},
{
"epoch": 13.26286398085361,
"grad_norm": 2.407404661178589,
"learning_rate": 1.6858296769046668e-05,
"loss": 5.0999,
"step": 66500
},
{
"epoch": 13.36258476266454,
"grad_norm": 2.665024518966675,
"learning_rate": 1.6608994814519347e-05,
"loss": 5.0852,
"step": 67000
},
{
"epoch": 13.462305544475468,
"grad_norm": 2.7694313526153564,
"learning_rate": 1.6359692859992023e-05,
"loss": 5.0785,
"step": 67500
},
{
"epoch": 13.562026326286398,
"grad_norm": 2.839297294616699,
"learning_rate": 1.6110390905464702e-05,
"loss": 5.0826,
"step": 68000
},
{
"epoch": 13.661747108097327,
"grad_norm": 2.831908941268921,
"learning_rate": 1.586158755484643e-05,
"loss": 5.0877,
"step": 68500
},
{
"epoch": 13.761467889908257,
"grad_norm": 2.9526407718658447,
"learning_rate": 1.5612285600319106e-05,
"loss": 5.0692,
"step": 69000
},
{
"epoch": 13.861188671719185,
"grad_norm": 2.832224130630493,
"learning_rate": 1.5362983645791785e-05,
"loss": 5.0687,
"step": 69500
},
{
"epoch": 13.960909453530116,
"grad_norm": 2.622544050216675,
"learning_rate": 1.5113681691264461e-05,
"loss": 5.065,
"step": 70000
},
{
"epoch": 14.060630235341046,
"grad_norm": 2.9850549697875977,
"learning_rate": 1.486487834064619e-05,
"loss": 5.0665,
"step": 70500
},
{
"epoch": 14.160351017151974,
"grad_norm": 2.7051777839660645,
"learning_rate": 1.4615576386118868e-05,
"loss": 5.0509,
"step": 71000
},
{
"epoch": 14.260071798962905,
"grad_norm": 2.6535110473632812,
"learning_rate": 1.4366274431591544e-05,
"loss": 5.0426,
"step": 71500
},
{
"epoch": 14.359792580773833,
"grad_norm": 2.8298914432525635,
"learning_rate": 1.4116972477064222e-05,
"loss": 5.0348,
"step": 72000
},
{
"epoch": 14.459513362584763,
"grad_norm": 2.776466131210327,
"learning_rate": 1.3868169126445951e-05,
"loss": 5.0486,
"step": 72500
},
{
"epoch": 14.559234144395692,
"grad_norm": 2.762392044067383,
"learning_rate": 1.3618867171918629e-05,
"loss": 5.0478,
"step": 73000
},
{
"epoch": 14.658954926206622,
"grad_norm": 2.7879889011383057,
"learning_rate": 1.3369565217391305e-05,
"loss": 5.0453,
"step": 73500
},
{
"epoch": 14.75867570801755,
"grad_norm": 2.7736377716064453,
"learning_rate": 1.3120263262863982e-05,
"loss": 5.0538,
"step": 74000
},
{
"epoch": 14.85839648982848,
"grad_norm": 3.0703177452087402,
"learning_rate": 1.2871459912245712e-05,
"loss": 5.0649,
"step": 74500
},
{
"epoch": 14.95811727163941,
"grad_norm": 2.8057234287261963,
"learning_rate": 1.262215795771839e-05,
"loss": 5.0558,
"step": 75000
},
{
"epoch": 15.05783805345034,
"grad_norm": 2.9843761920928955,
"learning_rate": 1.2372856003191066e-05,
"loss": 5.0367,
"step": 75500
},
{
"epoch": 15.157558835261268,
"grad_norm": 2.723043918609619,
"learning_rate": 1.2123554048663741e-05,
"loss": 5.0407,
"step": 76000
},
{
"epoch": 15.257279617072198,
"grad_norm": 2.729093551635742,
"learning_rate": 1.1874750698045473e-05,
"loss": 5.0261,
"step": 76500
},
{
"epoch": 15.357000398883127,
"grad_norm": 2.7686829566955566,
"learning_rate": 1.162544874351815e-05,
"loss": 5.0189,
"step": 77000
},
{
"epoch": 15.456721180694057,
"grad_norm": 2.7151553630828857,
"learning_rate": 1.1376146788990826e-05,
"loss": 5.0186,
"step": 77500
},
{
"epoch": 15.556441962504985,
"grad_norm": 2.6453444957733154,
"learning_rate": 1.1126844834463502e-05,
"loss": 5.0252,
"step": 78000
},
{
"epoch": 15.656162744315916,
"grad_norm": 2.6896181106567383,
"learning_rate": 1.0878041483845234e-05,
"loss": 5.0129,
"step": 78500
},
{
"epoch": 15.755883526126844,
"grad_norm": 2.6944217681884766,
"learning_rate": 1.0628739529317911e-05,
"loss": 5.0443,
"step": 79000
},
{
"epoch": 15.855604307937774,
"grad_norm": 2.782818555831909,
"learning_rate": 1.0379437574790587e-05,
"loss": 5.0325,
"step": 79500
},
{
"epoch": 15.955325089748703,
"grad_norm": 2.8319053649902344,
"learning_rate": 1.0130135620263263e-05,
"loss": 5.0277,
"step": 80000
},
{
"epoch": 16.05504587155963,
"grad_norm": 2.616605758666992,
"learning_rate": 9.881332269644994e-06,
"loss": 5.0127,
"step": 80500
},
{
"epoch": 16.15476665337056,
"grad_norm": 2.8563239574432373,
"learning_rate": 9.632030315117672e-06,
"loss": 5.0094,
"step": 81000
},
{
"epoch": 16.254487435181492,
"grad_norm": 2.536868095397949,
"learning_rate": 9.382728360590348e-06,
"loss": 4.9974,
"step": 81500
},
{
"epoch": 16.354208216992422,
"grad_norm": 2.8117527961730957,
"learning_rate": 9.133426406063023e-06,
"loss": 5.0044,
"step": 82000
},
{
"epoch": 16.453928998803352,
"grad_norm": 2.776693344116211,
"learning_rate": 8.884623055444755e-06,
"loss": 4.994,
"step": 82500
},
{
"epoch": 16.55364978061428,
"grad_norm": 2.7791900634765625,
"learning_rate": 8.635321100917432e-06,
"loss": 5.0239,
"step": 83000
},
{
"epoch": 16.65337056242521,
"grad_norm": 2.8289597034454346,
"learning_rate": 8.386019146390108e-06,
"loss": 5.0059,
"step": 83500
},
{
"epoch": 16.75309134423614,
"grad_norm": 2.89103102684021,
"learning_rate": 8.136717191862784e-06,
"loss": 5.0237,
"step": 84000
},
{
"epoch": 16.85281212604707,
"grad_norm": 2.7034354209899902,
"learning_rate": 7.887913841244516e-06,
"loss": 5.0107,
"step": 84500
},
{
"epoch": 16.952532907857996,
"grad_norm": 3.348228693008423,
"learning_rate": 7.638611886717193e-06,
"loss": 5.0128,
"step": 85000
},
{
"epoch": 17.052253689668927,
"grad_norm": 2.9482028484344482,
"learning_rate": 7.389309932189868e-06,
"loss": 5.0143,
"step": 85500
},
{
"epoch": 17.151974471479857,
"grad_norm": 2.8442418575286865,
"learning_rate": 7.140007977662546e-06,
"loss": 4.9986,
"step": 86000
},
{
"epoch": 17.251695253290787,
"grad_norm": 2.7483792304992676,
"learning_rate": 6.891204627044276e-06,
"loss": 5.0003,
"step": 86500
},
{
"epoch": 17.351416035101714,
"grad_norm": 2.8354785442352295,
"learning_rate": 6.641902672516953e-06,
"loss": 5.0106,
"step": 87000
},
{
"epoch": 17.451136816912644,
"grad_norm": 3.0782690048217773,
"learning_rate": 6.392600717989629e-06,
"loss": 4.9894,
"step": 87500
},
{
"epoch": 17.550857598723574,
"grad_norm": 2.572624444961548,
"learning_rate": 6.143298763462306e-06,
"loss": 4.9901,
"step": 88000
},
{
"epoch": 17.650578380534505,
"grad_norm": 2.726686477661133,
"learning_rate": 5.894495412844037e-06,
"loss": 5.0045,
"step": 88500
},
{
"epoch": 17.75029916234543,
"grad_norm": 2.79811429977417,
"learning_rate": 5.645193458316714e-06,
"loss": 4.9927,
"step": 89000
},
{
"epoch": 17.85001994415636,
"grad_norm": 2.7528791427612305,
"learning_rate": 5.39589150378939e-06,
"loss": 4.9834,
"step": 89500
},
{
"epoch": 17.949740725967292,
"grad_norm": 2.85099196434021,
"learning_rate": 5.146589549262067e-06,
"loss": 4.9967,
"step": 90000
},
{
"epoch": 18.049461507778222,
"grad_norm": 2.686501979827881,
"learning_rate": 4.897786198643798e-06,
"loss": 4.9948,
"step": 90500
},
{
"epoch": 18.14918228958915,
"grad_norm": 3.057145357131958,
"learning_rate": 4.648484244116474e-06,
"loss": 4.9885,
"step": 91000
},
{
"epoch": 18.24890307140008,
"grad_norm": 2.915149211883545,
"learning_rate": 4.399182289589151e-06,
"loss": 4.9876,
"step": 91500
},
{
"epoch": 18.34862385321101,
"grad_norm": 2.907449960708618,
"learning_rate": 4.149880335061828e-06,
"loss": 5.0049,
"step": 92000
},
{
"epoch": 18.44834463502194,
"grad_norm": 2.798488140106201,
"learning_rate": 3.901076984443558e-06,
"loss": 4.9643,
"step": 92500
},
{
"epoch": 18.54806541683287,
"grad_norm": 2.8539681434631348,
"learning_rate": 3.6517750299162346e-06,
"loss": 4.9917,
"step": 93000
},
{
"epoch": 18.647786198643796,
"grad_norm": 2.7598862648010254,
"learning_rate": 3.4024730753889117e-06,
"loss": 4.9894,
"step": 93500
},
{
"epoch": 18.747506980454727,
"grad_norm": 2.577714443206787,
"learning_rate": 3.153171120861588e-06,
"loss": 4.9607,
"step": 94000
},
{
"epoch": 18.847227762265657,
"grad_norm": 2.8153915405273438,
"learning_rate": 2.904367770243319e-06,
"loss": 4.9905,
"step": 94500
},
{
"epoch": 18.946948544076587,
"grad_norm": 2.7925517559051514,
"learning_rate": 2.6550658157159952e-06,
"loss": 5.0036,
"step": 95000
},
{
"epoch": 19.046669325887514,
"grad_norm": 2.60158371925354,
"learning_rate": 2.405763861188672e-06,
"loss": 4.9877,
"step": 95500
},
{
"epoch": 19.146390107698444,
"grad_norm": 2.7144620418548584,
"learning_rate": 2.156461906661348e-06,
"loss": 5.0011,
"step": 96000
},
{
"epoch": 19.246110889509374,
"grad_norm": 2.779771327972412,
"learning_rate": 1.9076585560430792e-06,
"loss": 4.9869,
"step": 96500
},
{
"epoch": 19.345831671320305,
"grad_norm": 2.8586442470550537,
"learning_rate": 1.6583566015157561e-06,
"loss": 4.9858,
"step": 97000
},
{
"epoch": 19.44555245313123,
"grad_norm": 2.572789430618286,
"learning_rate": 1.4090546469884324e-06,
"loss": 4.9934,
"step": 97500
},
{
"epoch": 19.54527323494216,
"grad_norm": 2.7267684936523438,
"learning_rate": 1.159752692461109e-06,
"loss": 4.9714,
"step": 98000
},
{
"epoch": 19.644994016753092,
"grad_norm": 2.7596216201782227,
"learning_rate": 9.109493418428401e-07,
"loss": 4.9698,
"step": 98500
},
{
"epoch": 19.744714798564022,
"grad_norm": 2.850459575653076,
"learning_rate": 6.616473873155166e-07,
"loss": 4.9653,
"step": 99000
},
{
"epoch": 19.84443558037495,
"grad_norm": 3.2066328525543213,
"learning_rate": 4.1234543278819307e-07,
"loss": 4.964,
"step": 99500
},
{
"epoch": 19.94415636218588,
"grad_norm": 3.170915365219116,
"learning_rate": 1.6304347826086955e-07,
"loss": 4.9803,
"step": 100000
},
{
"epoch": 20.0,
"step": 100280,
"total_flos": 488922611712000.0,
"train_loss": 5.48259629872295,
"train_runtime": 4495.3437,
"train_samples_per_second": 356.898,
"train_steps_per_second": 22.308
},
{
"epoch": 20.04387714399681,
"grad_norm": 2.977564573287964,
"learning_rate": 4.992687142667199e-05,
"loss": 5.0172,
"step": 100500
},
{
"epoch": 20.14359792580774,
"grad_norm": 2.9825730323791504,
"learning_rate": 4.976067012365378e-05,
"loss": 5.0479,
"step": 101000
},
{
"epoch": 20.243318707618666,
"grad_norm": 3.019160747528076,
"learning_rate": 4.959446882063555e-05,
"loss": 5.0469,
"step": 101500
},
{
"epoch": 20.343039489429597,
"grad_norm": 3.1838183403015137,
"learning_rate": 4.942826751761734e-05,
"loss": 5.0334,
"step": 102000
},
{
"epoch": 20.442760271240527,
"grad_norm": 3.0762367248535156,
"learning_rate": 4.926206621459912e-05,
"loss": 5.0445,
"step": 102500
},
{
"epoch": 20.542481053051457,
"grad_norm": 3.038895606994629,
"learning_rate": 4.909586491158091e-05,
"loss": 4.9971,
"step": 103000
},
{
"epoch": 20.642201834862384,
"grad_norm": 3.1863033771514893,
"learning_rate": 4.892966360856269e-05,
"loss": 5.0347,
"step": 103500
},
{
"epoch": 20.741922616673314,
"grad_norm": 2.8989017009735107,
"learning_rate": 4.876346230554448e-05,
"loss": 5.0353,
"step": 104000
},
{
"epoch": 20.841643398484244,
"grad_norm": 2.9284589290618896,
"learning_rate": 4.85975934051323e-05,
"loss": 5.025,
"step": 104500
},
{
"epoch": 20.941364180295174,
"grad_norm": 2.8213396072387695,
"learning_rate": 4.843139210211408e-05,
"loss": 5.0041,
"step": 105000
},
{
"epoch": 21.041084962106105,
"grad_norm": 3.0717594623565674,
"learning_rate": 4.826519079909587e-05,
"loss": 4.9886,
"step": 105500
},
{
"epoch": 21.14080574391703,
"grad_norm": 2.702904224395752,
"learning_rate": 4.809898949607765e-05,
"loss": 4.956,
"step": 106000
},
{
"epoch": 21.24052652572796,
"grad_norm": 2.5885391235351562,
"learning_rate": 4.793312059566547e-05,
"loss": 4.9718,
"step": 106500
},
{
"epoch": 21.340247307538892,
"grad_norm": 2.9367082118988037,
"learning_rate": 4.776691929264726e-05,
"loss": 4.9589,
"step": 107000
},
{
"epoch": 21.439968089349822,
"grad_norm": 2.8302552700042725,
"learning_rate": 4.760071798962904e-05,
"loss": 4.9731,
"step": 107500
},
{
"epoch": 21.53968887116075,
"grad_norm": 3.23287296295166,
"learning_rate": 4.743451668661083e-05,
"loss": 4.9309,
"step": 108000
},
{
"epoch": 21.63940965297168,
"grad_norm": 2.868462562561035,
"learning_rate": 4.7268647786198647e-05,
"loss": 4.9436,
"step": 108500
},
{
"epoch": 21.73913043478261,
"grad_norm": 2.8602261543273926,
"learning_rate": 4.710244648318043e-05,
"loss": 4.9688,
"step": 109000
},
{
"epoch": 21.83885121659354,
"grad_norm": 3.121124505996704,
"learning_rate": 4.6936245180162217e-05,
"loss": 4.9444,
"step": 109500
},
{
"epoch": 21.938571998404466,
"grad_norm": 2.777409553527832,
"learning_rate": 4.6770043877144e-05,
"loss": 4.9165,
"step": 110000
},
{
"epoch": 22.038292780215397,
"grad_norm": 2.9804909229278564,
"learning_rate": 4.660417497673182e-05,
"loss": 4.9534,
"step": 110500
},
{
"epoch": 22.138013562026327,
"grad_norm": 3.034639596939087,
"learning_rate": 4.6437973673713605e-05,
"loss": 4.9069,
"step": 111000
},
{
"epoch": 22.237734343837257,
"grad_norm": 2.7168800830841064,
"learning_rate": 4.627177237069539e-05,
"loss": 4.8861,
"step": 111500
},
{
"epoch": 22.337455125648184,
"grad_norm": 2.8833560943603516,
"learning_rate": 4.6105571067677175e-05,
"loss": 4.884,
"step": 112000
},
{
"epoch": 22.437175907459114,
"grad_norm": 2.8463797569274902,
"learning_rate": 4.5939702167264994e-05,
"loss": 4.9167,
"step": 112500
},
{
"epoch": 22.536896689270044,
"grad_norm": 2.765068531036377,
"learning_rate": 4.5773500864246776e-05,
"loss": 4.8929,
"step": 113000
},
{
"epoch": 22.636617471080974,
"grad_norm": 2.7801401615142822,
"learning_rate": 4.5607299561228564e-05,
"loss": 4.8934,
"step": 113500
},
{
"epoch": 22.7363382528919,
"grad_norm": 2.883640766143799,
"learning_rate": 4.5441098258210346e-05,
"loss": 4.8918,
"step": 114000
},
{
"epoch": 22.83605903470283,
"grad_norm": 3.0070436000823975,
"learning_rate": 4.5275229357798165e-05,
"loss": 4.894,
"step": 114500
},
{
"epoch": 22.93577981651376,
"grad_norm": 3.1484322547912598,
"learning_rate": 4.510902805477995e-05,
"loss": 4.8752,
"step": 115000
},
{
"epoch": 23.035500598324692,
"grad_norm": 3.016380786895752,
"learning_rate": 4.4942826751761735e-05,
"loss": 4.8612,
"step": 115500
},
{
"epoch": 23.13522138013562,
"grad_norm": 3.0375137329101562,
"learning_rate": 4.477662544874352e-05,
"loss": 4.8517,
"step": 116000
},
{
"epoch": 23.23494216194655,
"grad_norm": 2.926248073577881,
"learning_rate": 4.461075654833134e-05,
"loss": 4.834,
"step": 116500
},
{
"epoch": 23.33466294375748,
"grad_norm": 2.898101806640625,
"learning_rate": 4.444455524531312e-05,
"loss": 4.8456,
"step": 117000
},
{
"epoch": 23.43438372556841,
"grad_norm": 2.9906890392303467,
"learning_rate": 4.427835394229491e-05,
"loss": 4.8431,
"step": 117500
},
{
"epoch": 23.53410450737934,
"grad_norm": 2.9021828174591064,
"learning_rate": 4.4112152639276693e-05,
"loss": 4.8362,
"step": 118000
},
{
"epoch": 23.633825289190266,
"grad_norm": 2.9854063987731934,
"learning_rate": 4.394628373886451e-05,
"loss": 4.8508,
"step": 118500
},
{
"epoch": 23.733546071001197,
"grad_norm": 2.959423780441284,
"learning_rate": 4.37800824358463e-05,
"loss": 4.8395,
"step": 119000
},
{
"epoch": 23.833266852812127,
"grad_norm": 3.267308235168457,
"learning_rate": 4.361388113282808e-05,
"loss": 4.8467,
"step": 119500
},
{
"epoch": 23.932987634623057,
"grad_norm": 2.9600274562835693,
"learning_rate": 4.344767982980987e-05,
"loss": 4.8405,
"step": 120000
},
{
"epoch": 24.032708416433984,
"grad_norm": 3.0417428016662598,
"learning_rate": 4.328181092939769e-05,
"loss": 4.8078,
"step": 120500
},
{
"epoch": 24.132429198244914,
"grad_norm": 3.029172897338867,
"learning_rate": 4.311560962637947e-05,
"loss": 4.8216,
"step": 121000
},
{
"epoch": 24.232149980055844,
"grad_norm": 2.846696376800537,
"learning_rate": 4.294940832336126e-05,
"loss": 4.814,
"step": 121500
},
{
"epoch": 24.331870761866774,
"grad_norm": 3.2993550300598145,
"learning_rate": 4.278320702034304e-05,
"loss": 4.7863,
"step": 122000
},
{
"epoch": 24.4315915436777,
"grad_norm": 3.039426803588867,
"learning_rate": 4.261733811993086e-05,
"loss": 4.7851,
"step": 122500
},
{
"epoch": 24.53131232548863,
"grad_norm": 2.8034543991088867,
"learning_rate": 4.245113681691265e-05,
"loss": 4.7997,
"step": 123000
},
{
"epoch": 24.63103310729956,
"grad_norm": 3.0070390701293945,
"learning_rate": 4.228493551389443e-05,
"loss": 4.8009,
"step": 123500
},
{
"epoch": 24.730753889110492,
"grad_norm": 2.9534358978271484,
"learning_rate": 4.211873421087622e-05,
"loss": 4.8079,
"step": 124000
},
{
"epoch": 24.83047467092142,
"grad_norm": 3.184213638305664,
"learning_rate": 4.195286531046404e-05,
"loss": 4.7826,
"step": 124500
},
{
"epoch": 24.93019545273235,
"grad_norm": 2.946760416030884,
"learning_rate": 4.178666400744582e-05,
"loss": 4.7941,
"step": 125000
},
{
"epoch": 25.02991623454328,
"grad_norm": 2.929389238357544,
"learning_rate": 4.162046270442761e-05,
"loss": 4.7783,
"step": 125500
},
{
"epoch": 25.12963701635421,
"grad_norm": 2.9876906871795654,
"learning_rate": 4.145426140140939e-05,
"loss": 4.7433,
"step": 126000
},
{
"epoch": 25.229357798165136,
"grad_norm": 2.9121735095977783,
"learning_rate": 4.128839250099721e-05,
"loss": 4.7545,
"step": 126500
},
{
"epoch": 25.329078579976066,
"grad_norm": 2.848165273666382,
"learning_rate": 4.1122191197978996e-05,
"loss": 4.7756,
"step": 127000
},
{
"epoch": 25.428799361786997,
"grad_norm": 2.955857515335083,
"learning_rate": 4.095598989496078e-05,
"loss": 4.7686,
"step": 127500
},
{
"epoch": 25.528520143597927,
"grad_norm": 3.084696054458618,
"learning_rate": 4.0789788591942566e-05,
"loss": 4.756,
"step": 128000
},
{
"epoch": 25.628240925408853,
"grad_norm": 2.993539571762085,
"learning_rate": 4.0623919691530384e-05,
"loss": 4.7699,
"step": 128500
},
{
"epoch": 25.727961707219784,
"grad_norm": 3.0663325786590576,
"learning_rate": 4.0457718388512166e-05,
"loss": 4.7594,
"step": 129000
},
{
"epoch": 25.827682489030714,
"grad_norm": 3.0915310382843018,
"learning_rate": 4.0291517085493954e-05,
"loss": 4.768,
"step": 129500
},
{
"epoch": 25.927403270841644,
"grad_norm": 2.790329933166504,
"learning_rate": 4.0125315782475736e-05,
"loss": 4.7519,
"step": 130000
},
{
"epoch": 26.027124052652574,
"grad_norm": 3.1589112281799316,
"learning_rate": 3.9959446882063555e-05,
"loss": 4.7383,
"step": 130500
},
{
"epoch": 26.1268448344635,
"grad_norm": 2.9991183280944824,
"learning_rate": 3.979324557904534e-05,
"loss": 4.7297,
"step": 131000
},
{
"epoch": 26.22656561627443,
"grad_norm": 2.959322452545166,
"learning_rate": 3.9627044276027125e-05,
"loss": 4.723,
"step": 131500
},
{
"epoch": 26.32628639808536,
"grad_norm": 2.9168314933776855,
"learning_rate": 3.946084297300891e-05,
"loss": 4.7143,
"step": 132000
},
{
"epoch": 26.426007179896292,
"grad_norm": 2.9729034900665283,
"learning_rate": 3.929497407259673e-05,
"loss": 4.7176,
"step": 132500
},
{
"epoch": 26.52572796170722,
"grad_norm": 2.762373685836792,
"learning_rate": 3.9128772769578514e-05,
"loss": 4.7353,
"step": 133000
},
{
"epoch": 26.62544874351815,
"grad_norm": 3.2931153774261475,
"learning_rate": 3.89625714665603e-05,
"loss": 4.7069,
"step": 133500
},
{
"epoch": 26.72516952532908,
"grad_norm": 3.129920482635498,
"learning_rate": 3.8796370163542084e-05,
"loss": 4.7253,
"step": 134000
},
{
"epoch": 26.82489030714001,
"grad_norm": 3.0690855979919434,
"learning_rate": 3.86305012631299e-05,
"loss": 4.744,
"step": 134500
},
{
"epoch": 26.924611088950936,
"grad_norm": 2.957228183746338,
"learning_rate": 3.846429996011169e-05,
"loss": 4.7287,
"step": 135000
},
{
"epoch": 27.024331870761866,
"grad_norm": 2.922133445739746,
"learning_rate": 3.829809865709347e-05,
"loss": 4.7231,
"step": 135500
},
{
"epoch": 27.124052652572797,
"grad_norm": 3.0305354595184326,
"learning_rate": 3.813189735407526e-05,
"loss": 4.6755,
"step": 136000
},
{
"epoch": 27.223773434383727,
"grad_norm": 2.9898860454559326,
"learning_rate": 3.796602845366308e-05,
"loss": 4.6737,
"step": 136500
},
{
"epoch": 27.323494216194653,
"grad_norm": 3.0518152713775635,
"learning_rate": 3.779982715064486e-05,
"loss": 4.7161,
"step": 137000
},
{
"epoch": 27.423214998005584,
"grad_norm": 3.14530086517334,
"learning_rate": 3.763362584762665e-05,
"loss": 4.6827,
"step": 137500
},
{
"epoch": 27.522935779816514,
"grad_norm": 2.9844906330108643,
"learning_rate": 3.746742454460843e-05,
"loss": 4.7015,
"step": 138000
},
{
"epoch": 27.622656561627444,
"grad_norm": 3.1187822818756104,
"learning_rate": 3.730155564419625e-05,
"loss": 4.6907,
"step": 138500
},
{
"epoch": 27.72237734343837,
"grad_norm": 3.1447060108184814,
"learning_rate": 3.713535434117804e-05,
"loss": 4.6979,
"step": 139000
},
{
"epoch": 27.8220981252493,
"grad_norm": 3.2830941677093506,
"learning_rate": 3.696915303815982e-05,
"loss": 4.7023,
"step": 139500
},
{
"epoch": 27.92181890706023,
"grad_norm": 2.969634532928467,
"learning_rate": 3.680295173514161e-05,
"loss": 4.7053,
"step": 140000
},
{
"epoch": 28.02153968887116,
"grad_norm": 3.082902431488037,
"learning_rate": 3.663708283472943e-05,
"loss": 4.6914,
"step": 140500
},
{
"epoch": 28.121260470682092,
"grad_norm": 3.165813446044922,
"learning_rate": 3.647088153171121e-05,
"loss": 4.6726,
"step": 141000
},
{
"epoch": 28.22098125249302,
"grad_norm": 3.1427435874938965,
"learning_rate": 3.6304680228693e-05,
"loss": 4.6586,
"step": 141500
},
{
"epoch": 28.32070203430395,
"grad_norm": 3.179264545440674,
"learning_rate": 3.613847892567478e-05,
"loss": 4.6576,
"step": 142000
},
{
"epoch": 28.42042281611488,
"grad_norm": 3.1044764518737793,
"learning_rate": 3.59726100252626e-05,
"loss": 4.6713,
"step": 142500
},
{
"epoch": 28.52014359792581,
"grad_norm": 3.049412488937378,
"learning_rate": 3.5806408722244386e-05,
"loss": 4.6702,
"step": 143000
},
{
"epoch": 28.619864379736736,
"grad_norm": 3.128653049468994,
"learning_rate": 3.564020741922617e-05,
"loss": 4.6872,
"step": 143500
},
{
"epoch": 28.719585161547666,
"grad_norm": 3.13429856300354,
"learning_rate": 3.5474006116207956e-05,
"loss": 4.6487,
"step": 144000
},
{
"epoch": 28.819305943358597,
"grad_norm": 3.0185248851776123,
"learning_rate": 3.530780481318974e-05,
"loss": 4.68,
"step": 144500
},
{
"epoch": 28.919026725169527,
"grad_norm": 2.990931749343872,
"learning_rate": 3.5141935912777556e-05,
"loss": 4.669,
"step": 145000
},
{
"epoch": 29.018747506980453,
"grad_norm": 2.9707412719726562,
"learning_rate": 3.4975734609759345e-05,
"loss": 4.663,
"step": 145500
},
{
"epoch": 29.118468288791384,
"grad_norm": 3.247962713241577,
"learning_rate": 3.4809533306741126e-05,
"loss": 4.6391,
"step": 146000
},
{
"epoch": 29.218189070602314,
"grad_norm": 3.135483503341675,
"learning_rate": 3.4643332003722915e-05,
"loss": 4.6368,
"step": 146500
},
{
"epoch": 29.317909852413244,
"grad_norm": 3.4479868412017822,
"learning_rate": 3.4477463103310734e-05,
"loss": 4.6437,
"step": 147000
},
{
"epoch": 29.41763063422417,
"grad_norm": 3.3987677097320557,
"learning_rate": 3.4311261800292515e-05,
"loss": 4.6635,
"step": 147500
},
{
"epoch": 29.5173514160351,
"grad_norm": 3.153754234313965,
"learning_rate": 3.4145060497274304e-05,
"loss": 4.6128,
"step": 148000
},
{
"epoch": 29.61707219784603,
"grad_norm": 3.29654860496521,
"learning_rate": 3.3978859194256085e-05,
"loss": 4.664,
"step": 148500
},
{
"epoch": 29.71679297965696,
"grad_norm": 3.0110297203063965,
"learning_rate": 3.3812990293843904e-05,
"loss": 4.6438,
"step": 149000
},
{
"epoch": 29.81651376146789,
"grad_norm": 3.0456008911132812,
"learning_rate": 3.364678899082569e-05,
"loss": 4.6476,
"step": 149500
},
{
"epoch": 29.91623454327882,
"grad_norm": 3.3188984394073486,
"learning_rate": 3.3480587687807474e-05,
"loss": 4.6508,
"step": 150000
},
{
"epoch": 30.0,
"step": 150420,
"total_flos": 733383917568000.0,
"train_loss": 1.6002090492649228,
"train_runtime": 2253.0761,
"train_samples_per_second": 1068.126,
"train_steps_per_second": 66.762
}
],
"logging_steps": 500,
"max_steps": 150420,
"num_input_tokens_seen": 0,
"num_train_epochs": 30,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 733383917568000.0,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}