125k_refsom / trainer_state.json
cjfcsjt's picture
Upload folder using huggingface_hub
1c2cd29 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.9999666544399614,
"eval_steps": 500,
"global_step": 7497,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.001333822401547234,
"grad_norm": 72.53171450734499,
"learning_rate": 4e-06,
"loss": 1.0439,
"step": 10
},
{
"epoch": 0.002667644803094468,
"grad_norm": 7.72170877312484,
"learning_rate": 8e-06,
"loss": 1.0385,
"step": 20
},
{
"epoch": 0.004001467204641702,
"grad_norm": 28.037294890647996,
"learning_rate": 1.2e-05,
"loss": 1.0328,
"step": 30
},
{
"epoch": 0.005335289606188936,
"grad_norm": 10.980963099753343,
"learning_rate": 1.6e-05,
"loss": 0.9608,
"step": 40
},
{
"epoch": 0.00666911200773617,
"grad_norm": 9.853684997615185,
"learning_rate": 1.9999999999999998e-05,
"loss": 0.8941,
"step": 50
},
{
"epoch": 0.008002934409283404,
"grad_norm": 33.396994669126855,
"learning_rate": 2.4e-05,
"loss": 0.9174,
"step": 60
},
{
"epoch": 0.009336756810830639,
"grad_norm": 5.621190624979587,
"learning_rate": 2.8e-05,
"loss": 0.8146,
"step": 70
},
{
"epoch": 0.010670579212377872,
"grad_norm": 2.4646688601781843,
"learning_rate": 2.9999966406213696e-05,
"loss": 0.7774,
"step": 80
},
{
"epoch": 0.012004401613925107,
"grad_norm": 2.1272107013027215,
"learning_rate": 2.9999697656826056e-05,
"loss": 0.7426,
"step": 90
},
{
"epoch": 0.01333822401547234,
"grad_norm": 2.0001927521328575,
"learning_rate": 2.9999160162865885e-05,
"loss": 0.7251,
"step": 100
},
{
"epoch": 0.014672046417019574,
"grad_norm": 4.278494734910313,
"learning_rate": 2.9998353933963273e-05,
"loss": 0.7283,
"step": 110
},
{
"epoch": 0.016005868818566808,
"grad_norm": 84.74138622339441,
"learning_rate": 2.999727898456315e-05,
"loss": 0.721,
"step": 120
},
{
"epoch": 0.017339691220114042,
"grad_norm": 2.5596285292704533,
"learning_rate": 2.999593533392503e-05,
"loss": 0.7476,
"step": 130
},
{
"epoch": 0.018673513621661277,
"grad_norm": 3.167163073798878,
"learning_rate": 2.9994323006122654e-05,
"loss": 0.7701,
"step": 140
},
{
"epoch": 0.02000733602320851,
"grad_norm": 12.240132620099228,
"learning_rate": 2.9992442030043557e-05,
"loss": 0.7042,
"step": 150
},
{
"epoch": 0.021341158424755743,
"grad_norm": 2.266079140233148,
"learning_rate": 2.9990292439388565e-05,
"loss": 0.6945,
"step": 160
},
{
"epoch": 0.022674980826302978,
"grad_norm": 4.493189214061043,
"learning_rate": 2.9987874272671168e-05,
"loss": 0.7811,
"step": 170
},
{
"epoch": 0.024008803227850213,
"grad_norm": 3.8513915234854132,
"learning_rate": 2.9985187573216855e-05,
"loss": 0.7229,
"step": 180
},
{
"epoch": 0.025342625629397444,
"grad_norm": 2.1061250870336896,
"learning_rate": 2.998223238916232e-05,
"loss": 0.7253,
"step": 190
},
{
"epoch": 0.02667644803094468,
"grad_norm": 2.8326879963181373,
"learning_rate": 2.9979008773454618e-05,
"loss": 0.7576,
"step": 200
},
{
"epoch": 0.028010270432491914,
"grad_norm": 3.0294324007099402,
"learning_rate": 2.997551678385019e-05,
"loss": 0.7362,
"step": 210
},
{
"epoch": 0.02934409283403915,
"grad_norm": 1.5691302199679913,
"learning_rate": 2.997175648291384e-05,
"loss": 0.7212,
"step": 220
},
{
"epoch": 0.03067791523558638,
"grad_norm": 4.379709379404857,
"learning_rate": 2.996772793801763e-05,
"loss": 0.7382,
"step": 230
},
{
"epoch": 0.032011737637133615,
"grad_norm": 1.899348727085658,
"learning_rate": 2.996343122133965e-05,
"loss": 0.7056,
"step": 240
},
{
"epoch": 0.033345560038680847,
"grad_norm": 21.481584335801955,
"learning_rate": 2.9958866409862745e-05,
"loss": 0.697,
"step": 250
},
{
"epoch": 0.034679382440228085,
"grad_norm": 3.022034785435926,
"learning_rate": 2.9954033585373108e-05,
"loss": 0.69,
"step": 260
},
{
"epoch": 0.036013204841775316,
"grad_norm": 2.869274575136443,
"learning_rate": 2.994893283445885e-05,
"loss": 0.7026,
"step": 270
},
{
"epoch": 0.037347027243322554,
"grad_norm": 7.8785627268474,
"learning_rate": 2.9943564248508415e-05,
"loss": 0.7021,
"step": 280
},
{
"epoch": 0.038680849644869786,
"grad_norm": 4.450751706186503,
"learning_rate": 2.9937927923708966e-05,
"loss": 0.6814,
"step": 290
},
{
"epoch": 0.04001467204641702,
"grad_norm": 11.613541400971123,
"learning_rate": 2.993202396104465e-05,
"loss": 0.7043,
"step": 300
},
{
"epoch": 0.041348494447964255,
"grad_norm": 2.0602161594051536,
"learning_rate": 2.9925852466294795e-05,
"loss": 0.7073,
"step": 310
},
{
"epoch": 0.04268231684951149,
"grad_norm": 19.383133688390824,
"learning_rate": 2.9919413550032014e-05,
"loss": 0.6965,
"step": 320
},
{
"epoch": 0.04401613925105872,
"grad_norm": 7.9603948136759906,
"learning_rate": 2.991270732762022e-05,
"loss": 0.7029,
"step": 330
},
{
"epoch": 0.045349961652605957,
"grad_norm": 2.810319267000959,
"learning_rate": 2.990573391921255e-05,
"loss": 0.7412,
"step": 340
},
{
"epoch": 0.04668378405415319,
"grad_norm": 6.082487526538632,
"learning_rate": 2.989849344974924e-05,
"loss": 0.6913,
"step": 350
},
{
"epoch": 0.048017606455700426,
"grad_norm": 1.3651265332328906,
"learning_rate": 2.9890986048955368e-05,
"loss": 0.6802,
"step": 360
},
{
"epoch": 0.04935142885724766,
"grad_norm": 2.7766095141894023,
"learning_rate": 2.9883211851338516e-05,
"loss": 0.7254,
"step": 370
},
{
"epoch": 0.05068525125879489,
"grad_norm": 2.2284551774435033,
"learning_rate": 2.9875170996186392e-05,
"loss": 0.7026,
"step": 380
},
{
"epoch": 0.05201907366034213,
"grad_norm": 2.1609680967998957,
"learning_rate": 2.986686362756431e-05,
"loss": 0.7415,
"step": 390
},
{
"epoch": 0.05335289606188936,
"grad_norm": 2.071743222471935,
"learning_rate": 2.9858289894312617e-05,
"loss": 0.7081,
"step": 400
},
{
"epoch": 0.0546867184634366,
"grad_norm": 7.349693198515479,
"learning_rate": 2.9849449950044036e-05,
"loss": 0.7212,
"step": 410
},
{
"epoch": 0.05602054086498383,
"grad_norm": 12.990346405367537,
"learning_rate": 2.984034395314088e-05,
"loss": 0.7187,
"step": 420
},
{
"epoch": 0.05735436326653106,
"grad_norm": 2.4106321189642723,
"learning_rate": 2.983097206675227e-05,
"loss": 0.6881,
"step": 430
},
{
"epoch": 0.0586881856680783,
"grad_norm": 3.1647778114563163,
"learning_rate": 2.9821334458791156e-05,
"loss": 0.7529,
"step": 440
},
{
"epoch": 0.06002200806962553,
"grad_norm": 2.7769936556882713,
"learning_rate": 2.9811431301931344e-05,
"loss": 0.7073,
"step": 450
},
{
"epoch": 0.06135583047117276,
"grad_norm": 2.834916196253906,
"learning_rate": 2.9801262773604377e-05,
"loss": 0.6649,
"step": 460
},
{
"epoch": 0.06268965287271999,
"grad_norm": 2.431783869557187,
"learning_rate": 2.9790829055996398e-05,
"loss": 0.6889,
"step": 470
},
{
"epoch": 0.06402347527426723,
"grad_norm": 3.7325728788625825,
"learning_rate": 2.978013033604483e-05,
"loss": 0.6771,
"step": 480
},
{
"epoch": 0.06535729767581447,
"grad_norm": 10.194734403509758,
"learning_rate": 2.976916680543506e-05,
"loss": 0.6917,
"step": 490
},
{
"epoch": 0.06669112007736169,
"grad_norm": 6.4699962355353655,
"learning_rate": 2.975793866059701e-05,
"loss": 0.6656,
"step": 500
},
{
"epoch": 0.06802494247890893,
"grad_norm": 7.664735661456941,
"learning_rate": 2.9746446102701606e-05,
"loss": 0.7268,
"step": 510
},
{
"epoch": 0.06935876488045617,
"grad_norm": 2.926294421836929,
"learning_rate": 2.9734689337657157e-05,
"loss": 0.7498,
"step": 520
},
{
"epoch": 0.07069258728200341,
"grad_norm": 8.95556138792813,
"learning_rate": 2.9722668576105703e-05,
"loss": 0.7237,
"step": 530
},
{
"epoch": 0.07202640968355063,
"grad_norm": 2.725769364825004,
"learning_rate": 2.971038403341921e-05,
"loss": 0.701,
"step": 540
},
{
"epoch": 0.07336023208509787,
"grad_norm": 2.458130413236654,
"learning_rate": 2.9697835929695727e-05,
"loss": 0.6775,
"step": 550
},
{
"epoch": 0.07469405448664511,
"grad_norm": 1.908510150241819,
"learning_rate": 2.968502448975544e-05,
"loss": 0.7042,
"step": 560
},
{
"epoch": 0.07602787688819233,
"grad_norm": 26.340984665432142,
"learning_rate": 2.967194994313663e-05,
"loss": 0.7234,
"step": 570
},
{
"epoch": 0.07736169928973957,
"grad_norm": 5.677162249188698,
"learning_rate": 2.9658612524091594e-05,
"loss": 0.7159,
"step": 580
},
{
"epoch": 0.07869552169128681,
"grad_norm": 1.7975075976512287,
"learning_rate": 2.9645012471582406e-05,
"loss": 0.7268,
"step": 590
},
{
"epoch": 0.08002934409283403,
"grad_norm": 3.031655038731278,
"learning_rate": 2.9631150029276662e-05,
"loss": 0.6904,
"step": 600
},
{
"epoch": 0.08136316649438127,
"grad_norm": 13.019769162242698,
"learning_rate": 2.9617025445543114e-05,
"loss": 0.683,
"step": 610
},
{
"epoch": 0.08269698889592851,
"grad_norm": 5.982097939523946,
"learning_rate": 2.9602638973447218e-05,
"loss": 0.7511,
"step": 620
},
{
"epoch": 0.08403081129747574,
"grad_norm": 2.7343884277014054,
"learning_rate": 2.9587990870746574e-05,
"loss": 0.7114,
"step": 630
},
{
"epoch": 0.08536463369902297,
"grad_norm": 5.586409371459215,
"learning_rate": 2.9573081399886356e-05,
"loss": 0.6907,
"step": 640
},
{
"epoch": 0.08669845610057021,
"grad_norm": 9.87255558712134,
"learning_rate": 2.9557910827994568e-05,
"loss": 0.7319,
"step": 650
},
{
"epoch": 0.08803227850211744,
"grad_norm": 3.9867937712251664,
"learning_rate": 2.9542479426877283e-05,
"loss": 0.6759,
"step": 660
},
{
"epoch": 0.08936610090366467,
"grad_norm": 3.111751395898768,
"learning_rate": 2.9526787473013753e-05,
"loss": 0.7086,
"step": 670
},
{
"epoch": 0.09069992330521191,
"grad_norm": 3.0578210081080535,
"learning_rate": 2.9510835247551485e-05,
"loss": 0.7209,
"step": 680
},
{
"epoch": 0.09203374570675915,
"grad_norm": 16.575220829800145,
"learning_rate": 2.949462303630116e-05,
"loss": 0.7125,
"step": 690
},
{
"epoch": 0.09336756810830638,
"grad_norm": 1.8236956939037314,
"learning_rate": 2.9478151129731567e-05,
"loss": 0.6591,
"step": 700
},
{
"epoch": 0.09470139050985361,
"grad_norm": 1.9746052093914703,
"learning_rate": 2.9461419822964348e-05,
"loss": 0.7048,
"step": 710
},
{
"epoch": 0.09603521291140085,
"grad_norm": 5.9710918845264,
"learning_rate": 2.9444429415768726e-05,
"loss": 0.725,
"step": 720
},
{
"epoch": 0.09736903531294808,
"grad_norm": 9.961068856364284,
"learning_rate": 2.942718021255617e-05,
"loss": 0.7155,
"step": 730
},
{
"epoch": 0.09870285771449532,
"grad_norm": 9.195914431100427,
"learning_rate": 2.940967252237488e-05,
"loss": 0.7171,
"step": 740
},
{
"epoch": 0.10003668011604255,
"grad_norm": 5.972259927477824,
"learning_rate": 2.9391906658904296e-05,
"loss": 0.7088,
"step": 750
},
{
"epoch": 0.10137050251758978,
"grad_norm": 3.0581670197906226,
"learning_rate": 2.937388294044946e-05,
"loss": 0.6645,
"step": 760
},
{
"epoch": 0.10270432491913702,
"grad_norm": 3.2614276687575945,
"learning_rate": 2.9355601689935315e-05,
"loss": 0.6997,
"step": 770
},
{
"epoch": 0.10403814732068425,
"grad_norm": 3.212661762840432,
"learning_rate": 2.933706323490092e-05,
"loss": 0.7005,
"step": 780
},
{
"epoch": 0.10537196972223148,
"grad_norm": 10.732038885056795,
"learning_rate": 2.9318267907493583e-05,
"loss": 0.6588,
"step": 790
},
{
"epoch": 0.10670579212377872,
"grad_norm": 2.495380329507228,
"learning_rate": 2.9299216044462903e-05,
"loss": 0.6965,
"step": 800
},
{
"epoch": 0.10803961452532596,
"grad_norm": 4.60295956813534,
"learning_rate": 2.927990798715475e-05,
"loss": 0.6959,
"step": 810
},
{
"epoch": 0.1093734369268732,
"grad_norm": 5.95010483593304,
"learning_rate": 2.926034408150513e-05,
"loss": 0.7083,
"step": 820
},
{
"epoch": 0.11070725932842042,
"grad_norm": 10.48663715000287,
"learning_rate": 2.9240524678034016e-05,
"loss": 0.6977,
"step": 830
},
{
"epoch": 0.11204108172996766,
"grad_norm": 3.159424497856185,
"learning_rate": 2.9220450131839037e-05,
"loss": 0.7129,
"step": 840
},
{
"epoch": 0.1133749041315149,
"grad_norm": 3.984530217756339,
"learning_rate": 2.920012080258912e-05,
"loss": 0.6618,
"step": 850
},
{
"epoch": 0.11470872653306212,
"grad_norm": 4.212436954015185,
"learning_rate": 2.9179537054518085e-05,
"loss": 0.6958,
"step": 860
},
{
"epoch": 0.11604254893460936,
"grad_norm": 2.5397356754848657,
"learning_rate": 2.9158699256418056e-05,
"loss": 0.6593,
"step": 870
},
{
"epoch": 0.1173763713361566,
"grad_norm": 7.8192219616904115,
"learning_rate": 2.9137607781632913e-05,
"loss": 0.6932,
"step": 880
},
{
"epoch": 0.11871019373770382,
"grad_norm": 3.890974772100691,
"learning_rate": 2.911626300805155e-05,
"loss": 0.674,
"step": 890
},
{
"epoch": 0.12004401613925106,
"grad_norm": 2.241784461612369,
"learning_rate": 2.9094665318101155e-05,
"loss": 0.6808,
"step": 900
},
{
"epoch": 0.1213778385407983,
"grad_norm": 2.8295407624221456,
"learning_rate": 2.9072815098740326e-05,
"loss": 0.7074,
"step": 910
},
{
"epoch": 0.12271166094234552,
"grad_norm": 2.8191065019379455,
"learning_rate": 2.9050712741452136e-05,
"loss": 0.7218,
"step": 920
},
{
"epoch": 0.12404548334389276,
"grad_norm": 2.1348105213857362,
"learning_rate": 2.902835864223715e-05,
"loss": 0.6901,
"step": 930
},
{
"epoch": 0.12537930574543998,
"grad_norm": 2.547712785034466,
"learning_rate": 2.9005753201606287e-05,
"loss": 0.7024,
"step": 940
},
{
"epoch": 0.12671312814698724,
"grad_norm": 6.0714243848770195,
"learning_rate": 2.8982896824573678e-05,
"loss": 0.665,
"step": 950
},
{
"epoch": 0.12804695054853446,
"grad_norm": 2.928297684665459,
"learning_rate": 2.8959789920649394e-05,
"loss": 0.7178,
"step": 960
},
{
"epoch": 0.12938077295008168,
"grad_norm": 9.456759373673547,
"learning_rate": 2.893643290383212e-05,
"loss": 0.6759,
"step": 970
},
{
"epoch": 0.13071459535162894,
"grad_norm": 3.9700721280634648,
"learning_rate": 2.891282619260172e-05,
"loss": 0.6701,
"step": 980
},
{
"epoch": 0.13204841775317616,
"grad_norm": 1.9216628539066036,
"learning_rate": 2.8888970209911754e-05,
"loss": 0.6725,
"step": 990
},
{
"epoch": 0.13338224015472339,
"grad_norm": 6.51104150647371,
"learning_rate": 2.8864865383181893e-05,
"loss": 0.6811,
"step": 1000
},
{
"epoch": 0.13471606255627064,
"grad_norm": 7.1765693144596705,
"learning_rate": 2.8840512144290273e-05,
"loss": 0.7085,
"step": 1010
},
{
"epoch": 0.13604988495781786,
"grad_norm": 4.136786758396937,
"learning_rate": 2.8815910929565734e-05,
"loss": 0.7128,
"step": 1020
},
{
"epoch": 0.1373837073593651,
"grad_norm": 3.407893493053672,
"learning_rate": 2.879106217978002e-05,
"loss": 0.6782,
"step": 1030
},
{
"epoch": 0.13871752976091234,
"grad_norm": 2.5759617833329544,
"learning_rate": 2.8765966340139892e-05,
"loss": 0.7387,
"step": 1040
},
{
"epoch": 0.14005135216245956,
"grad_norm": 8.496713788438253,
"learning_rate": 2.8740623860279116e-05,
"loss": 0.6988,
"step": 1050
},
{
"epoch": 0.14138517456400682,
"grad_norm": 1.438197837288145,
"learning_rate": 2.871503519425044e-05,
"loss": 0.6797,
"step": 1060
},
{
"epoch": 0.14271899696555404,
"grad_norm": 3.305947931404651,
"learning_rate": 2.8689200800517448e-05,
"loss": 0.727,
"step": 1070
},
{
"epoch": 0.14405281936710126,
"grad_norm": 4.384984204254061,
"learning_rate": 2.866312114194634e-05,
"loss": 0.7073,
"step": 1080
},
{
"epoch": 0.14538664176864852,
"grad_norm": 3.0424755710445455,
"learning_rate": 2.8636796685797657e-05,
"loss": 0.6712,
"step": 1090
},
{
"epoch": 0.14672046417019574,
"grad_norm": 3.7562374375017415,
"learning_rate": 2.8610227903717876e-05,
"loss": 0.6811,
"step": 1100
},
{
"epoch": 0.14805428657174297,
"grad_norm": 18.148320631618972,
"learning_rate": 2.8583415271730994e-05,
"loss": 0.669,
"step": 1110
},
{
"epoch": 0.14938810897329022,
"grad_norm": 10.095266883651389,
"learning_rate": 2.855635927022998e-05,
"loss": 0.7034,
"step": 1120
},
{
"epoch": 0.15072193137483744,
"grad_norm": 6.298167661594497,
"learning_rate": 2.8529060383968175e-05,
"loss": 0.6736,
"step": 1130
},
{
"epoch": 0.15205575377638467,
"grad_norm": 1.7112633381805116,
"learning_rate": 2.850151910205061e-05,
"loss": 0.7072,
"step": 1140
},
{
"epoch": 0.15338957617793192,
"grad_norm": 4.478017946548926,
"learning_rate": 2.847373591792523e-05,
"loss": 0.679,
"step": 1150
},
{
"epoch": 0.15472339857947914,
"grad_norm": 6.208223721690807,
"learning_rate": 2.844571132937407e-05,
"loss": 0.688,
"step": 1160
},
{
"epoch": 0.15605722098102637,
"grad_norm": 3.5734211349612974,
"learning_rate": 2.841744583850431e-05,
"loss": 0.7054,
"step": 1170
},
{
"epoch": 0.15739104338257362,
"grad_norm": 2.9418941665585874,
"learning_rate": 2.838893995173932e-05,
"loss": 0.7051,
"step": 1180
},
{
"epoch": 0.15872486578412084,
"grad_norm": 2.8796543096657055,
"learning_rate": 2.836019417980955e-05,
"loss": 0.6856,
"step": 1190
},
{
"epoch": 0.16005868818566807,
"grad_norm": 1.3218998593276705,
"learning_rate": 2.8331209037743387e-05,
"loss": 0.69,
"step": 1200
},
{
"epoch": 0.16139251058721532,
"grad_norm": 1.566163155963456,
"learning_rate": 2.8301985044857947e-05,
"loss": 0.6384,
"step": 1210
},
{
"epoch": 0.16272633298876255,
"grad_norm": 6.731577281026319,
"learning_rate": 2.8272522724749743e-05,
"loss": 0.7241,
"step": 1220
},
{
"epoch": 0.16406015539030977,
"grad_norm": 3.3521245738531813,
"learning_rate": 2.8242822605285323e-05,
"loss": 0.6931,
"step": 1230
},
{
"epoch": 0.16539397779185702,
"grad_norm": 11.826633616347152,
"learning_rate": 2.8212885218591812e-05,
"loss": 0.702,
"step": 1240
},
{
"epoch": 0.16672780019340425,
"grad_norm": 1.826627133990473,
"learning_rate": 2.8182711101047362e-05,
"loss": 0.7061,
"step": 1250
},
{
"epoch": 0.16806162259495147,
"grad_norm": 2.573810458184169,
"learning_rate": 2.815230079327156e-05,
"loss": 0.7369,
"step": 1260
},
{
"epoch": 0.16939544499649872,
"grad_norm": 3.2839708945300665,
"learning_rate": 2.8121654840115734e-05,
"loss": 0.6966,
"step": 1270
},
{
"epoch": 0.17072926739804595,
"grad_norm": 2.3799212816187136,
"learning_rate": 2.809077379065319e-05,
"loss": 0.6586,
"step": 1280
},
{
"epoch": 0.17206308979959317,
"grad_norm": 8.449506341181594,
"learning_rate": 2.805965819816937e-05,
"loss": 0.6759,
"step": 1290
},
{
"epoch": 0.17339691220114042,
"grad_norm": 19.59186920922371,
"learning_rate": 2.802830862015196e-05,
"loss": 0.6537,
"step": 1300
},
{
"epoch": 0.17473073460268765,
"grad_norm": 1.9822479021376653,
"learning_rate": 2.799672561828087e-05,
"loss": 0.6854,
"step": 1310
},
{
"epoch": 0.17606455700423487,
"grad_norm": 34.5695029919346,
"learning_rate": 2.79649097584182e-05,
"loss": 0.696,
"step": 1320
},
{
"epoch": 0.17739837940578213,
"grad_norm": 1.886275024045834,
"learning_rate": 2.7932861610598077e-05,
"loss": 0.7308,
"step": 1330
},
{
"epoch": 0.17873220180732935,
"grad_norm": 12.128310547971918,
"learning_rate": 2.7900581749016466e-05,
"loss": 0.6775,
"step": 1340
},
{
"epoch": 0.1800660242088766,
"grad_norm": 5.164562576250181,
"learning_rate": 2.7868070752020865e-05,
"loss": 0.7096,
"step": 1350
},
{
"epoch": 0.18139984661042383,
"grad_norm": 10.603161437547913,
"learning_rate": 2.7835329202099944e-05,
"loss": 0.7128,
"step": 1360
},
{
"epoch": 0.18273366901197105,
"grad_norm": 4.184026615605208,
"learning_rate": 2.7802357685873117e-05,
"loss": 0.6958,
"step": 1370
},
{
"epoch": 0.1840674914135183,
"grad_norm": 4.135408851494439,
"learning_rate": 2.7769156794080033e-05,
"loss": 0.6933,
"step": 1380
},
{
"epoch": 0.18540131381506553,
"grad_norm": 1.7535535971850658,
"learning_rate": 2.7735727121569967e-05,
"loss": 0.7088,
"step": 1390
},
{
"epoch": 0.18673513621661275,
"grad_norm": 5.354882317397714,
"learning_rate": 2.770206926729121e-05,
"loss": 0.6777,
"step": 1400
},
{
"epoch": 0.18806895861816,
"grad_norm": 3.2593978573810523,
"learning_rate": 2.7668183834280284e-05,
"loss": 0.6535,
"step": 1410
},
{
"epoch": 0.18940278101970723,
"grad_norm": 2.48929554913285,
"learning_rate": 2.763407142965117e-05,
"loss": 0.6853,
"step": 1420
},
{
"epoch": 0.19073660342125445,
"grad_norm": 7.550246976134938,
"learning_rate": 2.759973266458444e-05,
"loss": 0.679,
"step": 1430
},
{
"epoch": 0.1920704258228017,
"grad_norm": 5.033109901639584,
"learning_rate": 2.756516815431627e-05,
"loss": 0.6875,
"step": 1440
},
{
"epoch": 0.19340424822434893,
"grad_norm": 6.1146492204468545,
"learning_rate": 2.7530378518127445e-05,
"loss": 0.6762,
"step": 1450
},
{
"epoch": 0.19473807062589615,
"grad_norm": 72.48946246157955,
"learning_rate": 2.7495364379332256e-05,
"loss": 0.6766,
"step": 1460
},
{
"epoch": 0.1960718930274434,
"grad_norm": 2.9676182245937426,
"learning_rate": 2.7460126365267335e-05,
"loss": 0.6731,
"step": 1470
},
{
"epoch": 0.19740571542899063,
"grad_norm": 2.31300235137622,
"learning_rate": 2.7424665107280402e-05,
"loss": 0.6936,
"step": 1480
},
{
"epoch": 0.19873953783053785,
"grad_norm": 2.594823775876976,
"learning_rate": 2.738898124071898e-05,
"loss": 0.6703,
"step": 1490
},
{
"epoch": 0.2000733602320851,
"grad_norm": 2.8354629666044575,
"learning_rate": 2.735307540491898e-05,
"loss": 0.7022,
"step": 1500
},
{
"epoch": 0.20140718263363233,
"grad_norm": 4.553033073507926,
"learning_rate": 2.7316948243193273e-05,
"loss": 0.6948,
"step": 1510
},
{
"epoch": 0.20274100503517956,
"grad_norm": 10.052746707722667,
"learning_rate": 2.7280600402820146e-05,
"loss": 0.7216,
"step": 1520
},
{
"epoch": 0.2040748274367268,
"grad_norm": 2.3783106567256977,
"learning_rate": 2.724403253503171e-05,
"loss": 0.6955,
"step": 1530
},
{
"epoch": 0.20540864983827403,
"grad_norm": 2.26171864458522,
"learning_rate": 2.7207245295002242e-05,
"loss": 0.7062,
"step": 1540
},
{
"epoch": 0.20674247223982126,
"grad_norm": 3.223919319241339,
"learning_rate": 2.7170239341836436e-05,
"loss": 0.6659,
"step": 1550
},
{
"epoch": 0.2080762946413685,
"grad_norm": 1.6810934171118763,
"learning_rate": 2.7133015338557585e-05,
"loss": 0.7069,
"step": 1560
},
{
"epoch": 0.20941011704291573,
"grad_norm": 2.1432477935562515,
"learning_rate": 2.7095573952095727e-05,
"loss": 0.703,
"step": 1570
},
{
"epoch": 0.21074393944446296,
"grad_norm": 1.5654642597457342,
"learning_rate": 2.705791585327568e-05,
"loss": 0.7037,
"step": 1580
},
{
"epoch": 0.2120777618460102,
"grad_norm": 2.5797486712147992,
"learning_rate": 2.7020041716805014e-05,
"loss": 0.6945,
"step": 1590
},
{
"epoch": 0.21341158424755743,
"grad_norm": 6.993511462468879,
"learning_rate": 2.6981952221261986e-05,
"loss": 0.6965,
"step": 1600
},
{
"epoch": 0.21474540664910466,
"grad_norm": 1.9963649259483505,
"learning_rate": 2.6943648049083366e-05,
"loss": 0.7052,
"step": 1610
},
{
"epoch": 0.2160792290506519,
"grad_norm": 13.563069274486924,
"learning_rate": 2.6905129886552208e-05,
"loss": 0.6925,
"step": 1620
},
{
"epoch": 0.21741305145219914,
"grad_norm": 1.8811719402961335,
"learning_rate": 2.6866398423785568e-05,
"loss": 0.6739,
"step": 1630
},
{
"epoch": 0.2187468738537464,
"grad_norm": 3.400454288874876,
"learning_rate": 2.682745435472212e-05,
"loss": 0.7115,
"step": 1640
},
{
"epoch": 0.2200806962552936,
"grad_norm": 1.7511437629316988,
"learning_rate": 2.6788298377109748e-05,
"loss": 0.6939,
"step": 1650
},
{
"epoch": 0.22141451865684084,
"grad_norm": 7.03338780434775,
"learning_rate": 2.6748931192493017e-05,
"loss": 0.7081,
"step": 1660
},
{
"epoch": 0.2227483410583881,
"grad_norm": 7.485978307196749,
"learning_rate": 2.670935350620063e-05,
"loss": 0.7483,
"step": 1670
},
{
"epoch": 0.2240821634599353,
"grad_norm": 11.472423495255576,
"learning_rate": 2.6669566027332767e-05,
"loss": 0.6653,
"step": 1680
},
{
"epoch": 0.22541598586148254,
"grad_norm": 2.8967622506628388,
"learning_rate": 2.6629569468748404e-05,
"loss": 0.6994,
"step": 1690
},
{
"epoch": 0.2267498082630298,
"grad_norm": 3.689173173355099,
"learning_rate": 2.658936454705251e-05,
"loss": 0.7121,
"step": 1700
},
{
"epoch": 0.22808363066457701,
"grad_norm": 2.5714663434660414,
"learning_rate": 2.6548951982583246e-05,
"loss": 0.679,
"step": 1710
},
{
"epoch": 0.22941745306612424,
"grad_norm": 2.6005673266384854,
"learning_rate": 2.650833249939903e-05,
"loss": 0.6826,
"step": 1720
},
{
"epoch": 0.2307512754676715,
"grad_norm": 25.941043620884358,
"learning_rate": 2.6467506825265573e-05,
"loss": 0.7202,
"step": 1730
},
{
"epoch": 0.23208509786921871,
"grad_norm": 2.9541333611408396,
"learning_rate": 2.642647569164284e-05,
"loss": 0.6855,
"step": 1740
},
{
"epoch": 0.23341892027076594,
"grad_norm": 1.6808077602808311,
"learning_rate": 2.638523983367194e-05,
"loss": 0.6901,
"step": 1750
},
{
"epoch": 0.2347527426723132,
"grad_norm": 8.145841190370119,
"learning_rate": 2.634379999016198e-05,
"loss": 0.7,
"step": 1760
},
{
"epoch": 0.23608656507386042,
"grad_norm": 17.930965708795497,
"learning_rate": 2.6302156903576784e-05,
"loss": 0.6315,
"step": 1770
},
{
"epoch": 0.23742038747540764,
"grad_norm": 3.8449192904768186,
"learning_rate": 2.6260311320021628e-05,
"loss": 0.6369,
"step": 1780
},
{
"epoch": 0.2387542098769549,
"grad_norm": 1.7681330868700245,
"learning_rate": 2.6218263989229855e-05,
"loss": 0.674,
"step": 1790
},
{
"epoch": 0.24008803227850212,
"grad_norm": 2.1491823327138317,
"learning_rate": 2.617601566454944e-05,
"loss": 0.6979,
"step": 1800
},
{
"epoch": 0.24142185468004934,
"grad_norm": 4.667708666590814,
"learning_rate": 2.613356710292951e-05,
"loss": 0.6932,
"step": 1810
},
{
"epoch": 0.2427556770815966,
"grad_norm": 2.70251113889306,
"learning_rate": 2.6090919064906766e-05,
"loss": 0.6842,
"step": 1820
},
{
"epoch": 0.24408949948314382,
"grad_norm": 4.187423249865094,
"learning_rate": 2.6048072314591854e-05,
"loss": 0.6642,
"step": 1830
},
{
"epoch": 0.24542332188469104,
"grad_norm": 1.6832068899399832,
"learning_rate": 2.600502761965569e-05,
"loss": 0.6984,
"step": 1840
},
{
"epoch": 0.2467571442862383,
"grad_norm": 5.37697139339205,
"learning_rate": 2.59617857513157e-05,
"loss": 0.6489,
"step": 1850
},
{
"epoch": 0.24809096668778552,
"grad_norm": 2.6302546632419186,
"learning_rate": 2.591834748432198e-05,
"loss": 0.6923,
"step": 1860
},
{
"epoch": 0.24942478908933274,
"grad_norm": 3.8259011796471927,
"learning_rate": 2.5874713596943465e-05,
"loss": 0.7163,
"step": 1870
},
{
"epoch": 0.25075861149087997,
"grad_norm": 5.322548910178761,
"learning_rate": 2.5830884870953933e-05,
"loss": 0.7188,
"step": 1880
},
{
"epoch": 0.25209243389242725,
"grad_norm": 1.5202627897221725,
"learning_rate": 2.578686209161803e-05,
"loss": 0.6975,
"step": 1890
},
{
"epoch": 0.2534262562939745,
"grad_norm": 2.7405063442524966,
"learning_rate": 2.5742646047677186e-05,
"loss": 0.7257,
"step": 1900
},
{
"epoch": 0.2547600786955217,
"grad_norm": 3.7267206506339225,
"learning_rate": 2.5698237531335493e-05,
"loss": 0.7078,
"step": 1910
},
{
"epoch": 0.2560939010970689,
"grad_norm": 1.7323421214775496,
"learning_rate": 2.56536373382455e-05,
"loss": 0.6739,
"step": 1920
},
{
"epoch": 0.25742772349861615,
"grad_norm": 5.812235836466133,
"learning_rate": 2.5608846267493974e-05,
"loss": 0.668,
"step": 1930
},
{
"epoch": 0.25876154590016337,
"grad_norm": 5.482474929884452,
"learning_rate": 2.5563865121587563e-05,
"loss": 0.6633,
"step": 1940
},
{
"epoch": 0.26009536830171065,
"grad_norm": 10.990988768370446,
"learning_rate": 2.5518694706438445e-05,
"loss": 0.7351,
"step": 1950
},
{
"epoch": 0.2614291907032579,
"grad_norm": 5.066143279063377,
"learning_rate": 2.5473335831349842e-05,
"loss": 0.6479,
"step": 1960
},
{
"epoch": 0.2627630131048051,
"grad_norm": 1.8538054067880405,
"learning_rate": 2.5427789309001577e-05,
"loss": 0.691,
"step": 1970
},
{
"epoch": 0.2640968355063523,
"grad_norm": 3.642801968239499,
"learning_rate": 2.538205595543548e-05,
"loss": 0.6981,
"step": 1980
},
{
"epoch": 0.26543065790789955,
"grad_norm": 4.027276498286695,
"learning_rate": 2.5336136590040767e-05,
"loss": 0.6471,
"step": 1990
},
{
"epoch": 0.26676448030944677,
"grad_norm": 5.225146913060103,
"learning_rate": 2.529003203553937e-05,
"loss": 0.6768,
"step": 2000
},
{
"epoch": 0.26809830271099405,
"grad_norm": 3.253526388391669,
"learning_rate": 2.5243743117971186e-05,
"loss": 0.7292,
"step": 2010
},
{
"epoch": 0.2694321251125413,
"grad_norm": 3.595165338865758,
"learning_rate": 2.5197270666679295e-05,
"loss": 0.7153,
"step": 2020
},
{
"epoch": 0.2707659475140885,
"grad_norm": 3.041241411415701,
"learning_rate": 2.515061551429509e-05,
"loss": 0.6911,
"step": 2030
},
{
"epoch": 0.2720997699156357,
"grad_norm": 1.8998842731166448,
"learning_rate": 2.5103778496723334e-05,
"loss": 0.7129,
"step": 2040
},
{
"epoch": 0.27343359231718295,
"grad_norm": 2.421232470526233,
"learning_rate": 2.5056760453127242e-05,
"loss": 0.7009,
"step": 2050
},
{
"epoch": 0.2747674147187302,
"grad_norm": 1.6780177844017854,
"learning_rate": 2.5009562225913385e-05,
"loss": 0.6772,
"step": 2060
},
{
"epoch": 0.27610123712027745,
"grad_norm": 4.807962830195143,
"learning_rate": 2.4962184660716645e-05,
"loss": 0.7037,
"step": 2070
},
{
"epoch": 0.2774350595218247,
"grad_norm": 2.9514583345109253,
"learning_rate": 2.4914628606385022e-05,
"loss": 0.7142,
"step": 2080
},
{
"epoch": 0.2787688819233719,
"grad_norm": 7.40404674094443,
"learning_rate": 2.4866894914964462e-05,
"loss": 0.6811,
"step": 2090
},
{
"epoch": 0.2801027043249191,
"grad_norm": 2.925771960255195,
"learning_rate": 2.481898444168357e-05,
"loss": 0.6545,
"step": 2100
},
{
"epoch": 0.28143652672646635,
"grad_norm": 5.444798449384416,
"learning_rate": 2.4770898044938284e-05,
"loss": 0.7043,
"step": 2110
},
{
"epoch": 0.28277034912801363,
"grad_norm": 9.858110881841988,
"learning_rate": 2.4722636586276522e-05,
"loss": 0.665,
"step": 2120
},
{
"epoch": 0.28410417152956086,
"grad_norm": 5.298660202543338,
"learning_rate": 2.4674200930382712e-05,
"loss": 0.6881,
"step": 2130
},
{
"epoch": 0.2854379939311081,
"grad_norm": 3.705317238676521,
"learning_rate": 2.4625591945062326e-05,
"loss": 0.7149,
"step": 2140
},
{
"epoch": 0.2867718163326553,
"grad_norm": 3.4572860020453855,
"learning_rate": 2.4576810501226318e-05,
"loss": 0.6714,
"step": 2150
},
{
"epoch": 0.28810563873420253,
"grad_norm": 2.603696480584208,
"learning_rate": 2.4527857472875515e-05,
"loss": 0.6706,
"step": 2160
},
{
"epoch": 0.28943946113574975,
"grad_norm": 3.6276512973605524,
"learning_rate": 2.447873373708498e-05,
"loss": 0.6915,
"step": 2170
},
{
"epoch": 0.29077328353729703,
"grad_norm": 3.2247665616500414,
"learning_rate": 2.4429440173988275e-05,
"loss": 0.698,
"step": 2180
},
{
"epoch": 0.29210710593884426,
"grad_norm": 2.902123587923174,
"learning_rate": 2.43799776667617e-05,
"loss": 0.6566,
"step": 2190
},
{
"epoch": 0.2934409283403915,
"grad_norm": 2.1614031926364214,
"learning_rate": 2.4330347101608492e-05,
"loss": 0.7077,
"step": 2200
},
{
"epoch": 0.2947747507419387,
"grad_norm": 9.153437367973524,
"learning_rate": 2.428054936774289e-05,
"loss": 0.6163,
"step": 2210
},
{
"epoch": 0.29610857314348593,
"grad_norm": 1.9839793734516538,
"learning_rate": 2.423058535737427e-05,
"loss": 0.6525,
"step": 2220
},
{
"epoch": 0.29744239554503316,
"grad_norm": 7.536682448957617,
"learning_rate": 2.418045596569111e-05,
"loss": 0.6892,
"step": 2230
},
{
"epoch": 0.29877621794658044,
"grad_norm": 7.678647037780194,
"learning_rate": 2.4130162090844976e-05,
"loss": 0.6787,
"step": 2240
},
{
"epoch": 0.30011004034812766,
"grad_norm": 4.791025776780419,
"learning_rate": 2.4079704633934427e-05,
"loss": 0.6458,
"step": 2250
},
{
"epoch": 0.3014438627496749,
"grad_norm": 2.274268244339417,
"learning_rate": 2.4029084498988864e-05,
"loss": 0.6962,
"step": 2260
},
{
"epoch": 0.3027776851512221,
"grad_norm": 2.156854296054033,
"learning_rate": 2.3978302592952332e-05,
"loss": 0.6852,
"step": 2270
},
{
"epoch": 0.30411150755276933,
"grad_norm": 3.294748786762216,
"learning_rate": 2.392735982566728e-05,
"loss": 0.7508,
"step": 2280
},
{
"epoch": 0.30544532995431656,
"grad_norm": 1.6409137290114766,
"learning_rate": 2.387625710985826e-05,
"loss": 0.7167,
"step": 2290
},
{
"epoch": 0.30677915235586384,
"grad_norm": 3.5000955023678713,
"learning_rate": 2.3824995361115552e-05,
"loss": 0.6903,
"step": 2300
},
{
"epoch": 0.30811297475741106,
"grad_norm": 1.085755861309128,
"learning_rate": 2.3773575497878784e-05,
"loss": 0.7182,
"step": 2310
},
{
"epoch": 0.3094467971589583,
"grad_norm": 2.6834548847634445,
"learning_rate": 2.372199844142048e-05,
"loss": 0.6984,
"step": 2320
},
{
"epoch": 0.3107806195605055,
"grad_norm": 2.493536604926798,
"learning_rate": 2.3670265115829523e-05,
"loss": 0.6536,
"step": 2330
},
{
"epoch": 0.31211444196205274,
"grad_norm": 1.7721327632872044,
"learning_rate": 2.3618376447994633e-05,
"loss": 0.6561,
"step": 2340
},
{
"epoch": 0.31344826436359996,
"grad_norm": 2.171596262218261,
"learning_rate": 2.3566333367587737e-05,
"loss": 0.7056,
"step": 2350
},
{
"epoch": 0.31478208676514724,
"grad_norm": 36.36759112960759,
"learning_rate": 2.3514136807047318e-05,
"loss": 0.7015,
"step": 2360
},
{
"epoch": 0.31611590916669446,
"grad_norm": 2.1581747337993993,
"learning_rate": 2.3461787701561724e-05,
"loss": 0.7136,
"step": 2370
},
{
"epoch": 0.3174497315682417,
"grad_norm": 4.627684355473749,
"learning_rate": 2.340928698905239e-05,
"loss": 0.6856,
"step": 2380
},
{
"epoch": 0.3187835539697889,
"grad_norm": 23.008362949697226,
"learning_rate": 2.335663561015704e-05,
"loss": 0.6535,
"step": 2390
},
{
"epoch": 0.32011737637133614,
"grad_norm": 1.4483305067525718,
"learning_rate": 2.3303834508212845e-05,
"loss": 0.6929,
"step": 2400
},
{
"epoch": 0.3214511987728834,
"grad_norm": 2.4145146906270134,
"learning_rate": 2.325088462923951e-05,
"loss": 0.6859,
"step": 2410
},
{
"epoch": 0.32278502117443064,
"grad_norm": 1.6323554754616574,
"learning_rate": 2.319778692192233e-05,
"loss": 0.6885,
"step": 2420
},
{
"epoch": 0.32411884357597787,
"grad_norm": 4.22840962266095,
"learning_rate": 2.3144542337595196e-05,
"loss": 0.6799,
"step": 2430
},
{
"epoch": 0.3254526659775251,
"grad_norm": 9.344155395286913,
"learning_rate": 2.3091151830223537e-05,
"loss": 0.7125,
"step": 2440
},
{
"epoch": 0.3267864883790723,
"grad_norm": 6.426921263330431,
"learning_rate": 2.3037616356387237e-05,
"loss": 0.6844,
"step": 2450
},
{
"epoch": 0.32812031078061954,
"grad_norm": 2.508003013718542,
"learning_rate": 2.2983936875263495e-05,
"loss": 0.6695,
"step": 2460
},
{
"epoch": 0.3294541331821668,
"grad_norm": 5.676502368376463,
"learning_rate": 2.2930114348609655e-05,
"loss": 0.656,
"step": 2470
},
{
"epoch": 0.33078795558371404,
"grad_norm": 2.9206555111493597,
"learning_rate": 2.2876149740745935e-05,
"loss": 0.7357,
"step": 2480
},
{
"epoch": 0.33212177798526127,
"grad_norm": 3.944142868858924,
"learning_rate": 2.28220440185382e-05,
"loss": 0.709,
"step": 2490
},
{
"epoch": 0.3334556003868085,
"grad_norm": 15.707981311418816,
"learning_rate": 2.2767798151380597e-05,
"loss": 0.685,
"step": 2500
},
{
"epoch": 0.3347894227883557,
"grad_norm": 4.850415178599164,
"learning_rate": 2.27134131111782e-05,
"loss": 0.7258,
"step": 2510
},
{
"epoch": 0.33612324518990294,
"grad_norm": 1.5546940942365648,
"learning_rate": 2.2658889872329628e-05,
"loss": 0.6849,
"step": 2520
},
{
"epoch": 0.3374570675914502,
"grad_norm": 7.608111830875221,
"learning_rate": 2.2604229411709518e-05,
"loss": 0.689,
"step": 2530
},
{
"epoch": 0.33879088999299745,
"grad_norm": 3.5234760696769922,
"learning_rate": 2.25494327086511e-05,
"loss": 0.7067,
"step": 2540
},
{
"epoch": 0.34012471239454467,
"grad_norm": 3.050573051274483,
"learning_rate": 2.2494500744928583e-05,
"loss": 0.6535,
"step": 2550
},
{
"epoch": 0.3414585347960919,
"grad_norm": 1.8068660121233675,
"learning_rate": 2.243943450473963e-05,
"loss": 0.7171,
"step": 2560
},
{
"epoch": 0.3427923571976391,
"grad_norm": 5.878470836242925,
"learning_rate": 2.2384234974687658e-05,
"loss": 0.6732,
"step": 2570
},
{
"epoch": 0.34412617959918634,
"grad_norm": 2.419571993629191,
"learning_rate": 2.2328903143764216e-05,
"loss": 0.6553,
"step": 2580
},
{
"epoch": 0.3454600020007336,
"grad_norm": 5.4457590803918166,
"learning_rate": 2.2273440003331237e-05,
"loss": 0.6963,
"step": 2590
},
{
"epoch": 0.34679382440228085,
"grad_norm": 1.611198577297041,
"learning_rate": 2.2217846547103275e-05,
"loss": 0.7075,
"step": 2600
},
{
"epoch": 0.3481276468038281,
"grad_norm": 1.5058788491262667,
"learning_rate": 2.216212377112972e-05,
"loss": 0.67,
"step": 2610
},
{
"epoch": 0.3494614692053753,
"grad_norm": 12.998071380752632,
"learning_rate": 2.2106272673776934e-05,
"loss": 0.6878,
"step": 2620
},
{
"epoch": 0.3507952916069225,
"grad_norm": 15.038084891115052,
"learning_rate": 2.2050294255710375e-05,
"loss": 0.7104,
"step": 2630
},
{
"epoch": 0.35212911400846975,
"grad_norm": 2.6541518957025474,
"learning_rate": 2.1994189519876663e-05,
"loss": 0.6643,
"step": 2640
},
{
"epoch": 0.353462936410017,
"grad_norm": 2.9600345573470044,
"learning_rate": 2.19379594714856e-05,
"loss": 0.7138,
"step": 2650
},
{
"epoch": 0.35479675881156425,
"grad_norm": 3.6541318055900107,
"learning_rate": 2.188160511799219e-05,
"loss": 0.7098,
"step": 2660
},
{
"epoch": 0.3561305812131115,
"grad_norm": 3.5267634965358887,
"learning_rate": 2.1825127469078555e-05,
"loss": 0.69,
"step": 2670
},
{
"epoch": 0.3574644036146587,
"grad_norm": 4.281448471035455,
"learning_rate": 2.1768527536635868e-05,
"loss": 0.66,
"step": 2680
},
{
"epoch": 0.3587982260162059,
"grad_norm": 1.8979727093671077,
"learning_rate": 2.171180633474621e-05,
"loss": 0.6655,
"step": 2690
},
{
"epoch": 0.3601320484177532,
"grad_norm": 38.260538083174715,
"learning_rate": 2.1654964879664407e-05,
"loss": 0.6907,
"step": 2700
},
{
"epoch": 0.3614658708193004,
"grad_norm": 4.096124324506218,
"learning_rate": 2.1598004189799826e-05,
"loss": 0.674,
"step": 2710
},
{
"epoch": 0.36279969322084765,
"grad_norm": 3.481931505321646,
"learning_rate": 2.1540925285698122e-05,
"loss": 0.7113,
"step": 2720
},
{
"epoch": 0.3641335156223949,
"grad_norm": 2.0982067444268684,
"learning_rate": 2.148372919002295e-05,
"loss": 0.696,
"step": 2730
},
{
"epoch": 0.3654673380239421,
"grad_norm": 3.3092373308718934,
"learning_rate": 2.142641692753765e-05,
"loss": 0.6315,
"step": 2740
},
{
"epoch": 0.3668011604254893,
"grad_norm": 3.186705396754706,
"learning_rate": 2.1368989525086893e-05,
"loss": 0.6677,
"step": 2750
},
{
"epoch": 0.3681349828270366,
"grad_norm": 6.164987008853785,
"learning_rate": 2.1311448011578255e-05,
"loss": 0.6913,
"step": 2760
},
{
"epoch": 0.36946880522858383,
"grad_norm": 1.3538856165789028,
"learning_rate": 2.125379341796382e-05,
"loss": 0.7127,
"step": 2770
},
{
"epoch": 0.37080262763013105,
"grad_norm": 5.87554442135488,
"learning_rate": 2.1196026777221684e-05,
"loss": 0.7016,
"step": 2780
},
{
"epoch": 0.3721364500316783,
"grad_norm": 1.7777083138299317,
"learning_rate": 2.1138149124337448e-05,
"loss": 0.7295,
"step": 2790
},
{
"epoch": 0.3734702724332255,
"grad_norm": 2.385434968259032,
"learning_rate": 2.108016149628569e-05,
"loss": 0.6589,
"step": 2800
},
{
"epoch": 0.3748040948347727,
"grad_norm": 6.6511179163621446,
"learning_rate": 2.102206493201137e-05,
"loss": 0.659,
"step": 2810
},
{
"epoch": 0.37613791723632,
"grad_norm": 2.7897652560781876,
"learning_rate": 2.096386047241123e-05,
"loss": 0.6482,
"step": 2820
},
{
"epoch": 0.37747173963786723,
"grad_norm": 2.953800669076903,
"learning_rate": 2.0905549160315116e-05,
"loss": 0.675,
"step": 2830
},
{
"epoch": 0.37880556203941446,
"grad_norm": 13.84190642263868,
"learning_rate": 2.084713204046734e-05,
"loss": 0.6558,
"step": 2840
},
{
"epoch": 0.3801393844409617,
"grad_norm": 4.521658802993485,
"learning_rate": 2.078861015950793e-05,
"loss": 0.6861,
"step": 2850
},
{
"epoch": 0.3814732068425089,
"grad_norm": 2.5357270384377646,
"learning_rate": 2.072998456595387e-05,
"loss": 0.6781,
"step": 2860
},
{
"epoch": 0.38280702924405613,
"grad_norm": 1.694744102718741,
"learning_rate": 2.0671256310180334e-05,
"loss": 0.6923,
"step": 2870
},
{
"epoch": 0.3841408516456034,
"grad_norm": 2.335196387059918,
"learning_rate": 2.0612426444401874e-05,
"loss": 0.6473,
"step": 2880
},
{
"epoch": 0.38547467404715063,
"grad_norm": 4.2888708297621605,
"learning_rate": 2.0553496022653535e-05,
"loss": 0.7116,
"step": 2890
},
{
"epoch": 0.38680849644869786,
"grad_norm": 2.586725028631528,
"learning_rate": 2.0494466100772006e-05,
"loss": 0.6388,
"step": 2900
},
{
"epoch": 0.3881423188502451,
"grad_norm": 6.7632601439437625,
"learning_rate": 2.0435337736376677e-05,
"loss": 0.6663,
"step": 2910
},
{
"epoch": 0.3894761412517923,
"grad_norm": 5.0897618694848115,
"learning_rate": 2.03761119888507e-05,
"loss": 0.686,
"step": 2920
},
{
"epoch": 0.39080996365333953,
"grad_norm": 3.84563964047397,
"learning_rate": 2.031678991932201e-05,
"loss": 0.6763,
"step": 2930
},
{
"epoch": 0.3921437860548868,
"grad_norm": 1.3602515715842554,
"learning_rate": 2.0257372590644314e-05,
"loss": 0.7278,
"step": 2940
},
{
"epoch": 0.39347760845643404,
"grad_norm": 3.0517076558378293,
"learning_rate": 2.0197861067378044e-05,
"loss": 0.6734,
"step": 2950
},
{
"epoch": 0.39481143085798126,
"grad_norm": 2.0564006437177826,
"learning_rate": 2.0138256415771275e-05,
"loss": 0.6861,
"step": 2960
},
{
"epoch": 0.3961452532595285,
"grad_norm": 3.0150939790113522,
"learning_rate": 2.0078559703740654e-05,
"loss": 0.6745,
"step": 2970
},
{
"epoch": 0.3974790756610757,
"grad_norm": 2.3696016899226495,
"learning_rate": 2.0018772000852216e-05,
"loss": 0.6812,
"step": 2980
},
{
"epoch": 0.398812898062623,
"grad_norm": 1.8364192249492444,
"learning_rate": 1.9958894378302265e-05,
"loss": 0.6631,
"step": 2990
},
{
"epoch": 0.4001467204641702,
"grad_norm": 2.3337386891806595,
"learning_rate": 1.989892790889817e-05,
"loss": 0.7276,
"step": 3000
},
{
"epoch": 0.40148054286571744,
"grad_norm": 2.225372210455397,
"learning_rate": 1.9838873667039134e-05,
"loss": 0.6782,
"step": 3010
},
{
"epoch": 0.40281436526726466,
"grad_norm": 4.9169439894919265,
"learning_rate": 1.9778732728696937e-05,
"loss": 0.6787,
"step": 3020
},
{
"epoch": 0.4041481876688119,
"grad_norm": 2.335178009370853,
"learning_rate": 1.9718506171396694e-05,
"loss": 0.7036,
"step": 3030
},
{
"epoch": 0.4054820100703591,
"grad_norm": 2.2036269677969327,
"learning_rate": 1.965819507419751e-05,
"loss": 0.6649,
"step": 3040
},
{
"epoch": 0.4068158324719064,
"grad_norm": 4.7578038809060885,
"learning_rate": 1.9597800517673165e-05,
"loss": 0.6682,
"step": 3050
},
{
"epoch": 0.4081496548734536,
"grad_norm": 16.429386587829253,
"learning_rate": 1.9537323583892753e-05,
"loss": 0.6875,
"step": 3060
},
{
"epoch": 0.40948347727500084,
"grad_norm": 2.9803066784550953,
"learning_rate": 1.9476765356401304e-05,
"loss": 0.7022,
"step": 3070
},
{
"epoch": 0.41081729967654806,
"grad_norm": 1.7088579752363298,
"learning_rate": 1.9416126920200344e-05,
"loss": 0.7335,
"step": 3080
},
{
"epoch": 0.4121511220780953,
"grad_norm": 1.8641120769753927,
"learning_rate": 1.9355409361728482e-05,
"loss": 0.7195,
"step": 3090
},
{
"epoch": 0.4134849444796425,
"grad_norm": 2.887584179728711,
"learning_rate": 1.9294613768841932e-05,
"loss": 0.6694,
"step": 3100
},
{
"epoch": 0.4148187668811898,
"grad_norm": 3.3517946816430486,
"learning_rate": 1.9233741230795022e-05,
"loss": 0.6763,
"step": 3110
},
{
"epoch": 0.416152589282737,
"grad_norm": 15.627345038869116,
"learning_rate": 1.9172792838220686e-05,
"loss": 0.6508,
"step": 3120
},
{
"epoch": 0.41748641168428424,
"grad_norm": 8.325170511784908,
"learning_rate": 1.9111769683110914e-05,
"loss": 0.6638,
"step": 3130
},
{
"epoch": 0.41882023408583147,
"grad_norm": 2.2227223257423576,
"learning_rate": 1.905067285879719e-05,
"loss": 0.6702,
"step": 3140
},
{
"epoch": 0.4201540564873787,
"grad_norm": 2.151930199856048,
"learning_rate": 1.8989503459930908e-05,
"loss": 0.6702,
"step": 3150
},
{
"epoch": 0.4214878788889259,
"grad_norm": 2.0853572424146445,
"learning_rate": 1.892826258246376e-05,
"loss": 0.6687,
"step": 3160
},
{
"epoch": 0.4228217012904732,
"grad_norm": 12.723619283478206,
"learning_rate": 1.886695132362808e-05,
"loss": 0.6736,
"step": 3170
},
{
"epoch": 0.4241555236920204,
"grad_norm": 2.2341551772603223,
"learning_rate": 1.8805570781917228e-05,
"loss": 0.6979,
"step": 3180
},
{
"epoch": 0.42548934609356764,
"grad_norm": 2.7010833438865482,
"learning_rate": 1.8744122057065856e-05,
"loss": 0.6589,
"step": 3190
},
{
"epoch": 0.42682316849511487,
"grad_norm": 3.764700495141046,
"learning_rate": 1.868260625003024e-05,
"loss": 0.6951,
"step": 3200
},
{
"epoch": 0.4281569908966621,
"grad_norm": 1.5720755325934035,
"learning_rate": 1.8621024462968553e-05,
"loss": 0.6837,
"step": 3210
},
{
"epoch": 0.4294908132982093,
"grad_norm": 3.9200536138410382,
"learning_rate": 1.85593777992211e-05,
"loss": 0.7135,
"step": 3220
},
{
"epoch": 0.4308246356997566,
"grad_norm": 2.1410220258311203,
"learning_rate": 1.849766736329056e-05,
"loss": 0.6935,
"step": 3230
},
{
"epoch": 0.4321584581013038,
"grad_norm": 3.306117548750314,
"learning_rate": 1.8435894260822208e-05,
"loss": 0.6947,
"step": 3240
},
{
"epoch": 0.43349228050285105,
"grad_norm": 3.906418209268947,
"learning_rate": 1.8374059598584084e-05,
"loss": 0.657,
"step": 3250
},
{
"epoch": 0.43482610290439827,
"grad_norm": 1.3332307398585346,
"learning_rate": 1.831216448444717e-05,
"loss": 0.6456,
"step": 3260
},
{
"epoch": 0.4361599253059455,
"grad_norm": 20.93039175046958,
"learning_rate": 1.8250210027365562e-05,
"loss": 0.7233,
"step": 3270
},
{
"epoch": 0.4374937477074928,
"grad_norm": 4.495787238072399,
"learning_rate": 1.818819733735657e-05,
"loss": 0.7438,
"step": 3280
},
{
"epoch": 0.43882757010904,
"grad_norm": 2.8059359776214166,
"learning_rate": 1.812612752548084e-05,
"loss": 0.654,
"step": 3290
},
{
"epoch": 0.4401613925105872,
"grad_norm": 2.226805342431407,
"learning_rate": 1.806400170382246e-05,
"loss": 0.6823,
"step": 3300
},
{
"epoch": 0.44149521491213445,
"grad_norm": 1.5989661035338567,
"learning_rate": 1.8001820985469026e-05,
"loss": 0.6665,
"step": 3310
},
{
"epoch": 0.4428290373136817,
"grad_norm": 44.45501423929167,
"learning_rate": 1.7939586484491704e-05,
"loss": 0.6901,
"step": 3320
},
{
"epoch": 0.4441628597152289,
"grad_norm": 3.526154343998946,
"learning_rate": 1.787729931592525e-05,
"loss": 0.734,
"step": 3330
},
{
"epoch": 0.4454966821167762,
"grad_norm": 1.4841336505836662,
"learning_rate": 1.781496059574807e-05,
"loss": 0.7174,
"step": 3340
},
{
"epoch": 0.4468305045183234,
"grad_norm": 1.7136084206360864,
"learning_rate": 1.7752571440862178e-05,
"loss": 0.6697,
"step": 3350
},
{
"epoch": 0.4481643269198706,
"grad_norm": 1.1822744135781105,
"learning_rate": 1.7690132969073223e-05,
"loss": 0.6754,
"step": 3360
},
{
"epoch": 0.44949814932141785,
"grad_norm": 3.1896570424325943,
"learning_rate": 1.7627646299070457e-05,
"loss": 0.7047,
"step": 3370
},
{
"epoch": 0.4508319717229651,
"grad_norm": 3.246445692977348,
"learning_rate": 1.7565112550406663e-05,
"loss": 0.6758,
"step": 3380
},
{
"epoch": 0.4521657941245123,
"grad_norm": 1.8894085083670678,
"learning_rate": 1.7502532843478134e-05,
"loss": 0.626,
"step": 3390
},
{
"epoch": 0.4534996165260596,
"grad_norm": 1.9836472505954585,
"learning_rate": 1.743990829950458e-05,
"loss": 0.7355,
"step": 3400
},
{
"epoch": 0.4548334389276068,
"grad_norm": 2.0265270785939817,
"learning_rate": 1.737724004050903e-05,
"loss": 0.6602,
"step": 3410
},
{
"epoch": 0.45616726132915403,
"grad_norm": 37.752331390135694,
"learning_rate": 1.731452918929774e-05,
"loss": 0.7096,
"step": 3420
},
{
"epoch": 0.45750108373070125,
"grad_norm": 3.069288243097065,
"learning_rate": 1.7251776869440097e-05,
"loss": 0.6894,
"step": 3430
},
{
"epoch": 0.4588349061322485,
"grad_norm": 3.806122244959967,
"learning_rate": 1.718898420524845e-05,
"loss": 0.6717,
"step": 3440
},
{
"epoch": 0.4601687285337957,
"grad_norm": 5.422119486900359,
"learning_rate": 1.7126152321757985e-05,
"loss": 0.7123,
"step": 3450
},
{
"epoch": 0.461502550935343,
"grad_norm": 8.913213423628779,
"learning_rate": 1.7063282344706577e-05,
"loss": 0.6943,
"step": 3460
},
{
"epoch": 0.4628363733368902,
"grad_norm": 2.36239995996906,
"learning_rate": 1.7000375400514602e-05,
"loss": 0.6472,
"step": 3470
},
{
"epoch": 0.46417019573843743,
"grad_norm": 2.256697435474481,
"learning_rate": 1.693743261626476e-05,
"loss": 0.6981,
"step": 3480
},
{
"epoch": 0.46550401813998465,
"grad_norm": 4.819999301324691,
"learning_rate": 1.68744551196819e-05,
"loss": 0.6436,
"step": 3490
},
{
"epoch": 0.4668378405415319,
"grad_norm": 1.6840417535820236,
"learning_rate": 1.6811444039112787e-05,
"loss": 0.6993,
"step": 3500
},
{
"epoch": 0.4681716629430791,
"grad_norm": 2.544379480255388,
"learning_rate": 1.6748400503505905e-05,
"loss": 0.6722,
"step": 3510
},
{
"epoch": 0.4695054853446264,
"grad_norm": 3.7390416566783493,
"learning_rate": 1.6685325642391223e-05,
"loss": 0.6785,
"step": 3520
},
{
"epoch": 0.4708393077461736,
"grad_norm": 3.855083968142357,
"learning_rate": 1.662222058585996e-05,
"loss": 0.6952,
"step": 3530
},
{
"epoch": 0.47217313014772083,
"grad_norm": 9.342710099035981,
"learning_rate": 1.6559086464544334e-05,
"loss": 0.6772,
"step": 3540
},
{
"epoch": 0.47350695254926806,
"grad_norm": 1.7318075611168393,
"learning_rate": 1.6495924409597305e-05,
"loss": 0.6757,
"step": 3550
},
{
"epoch": 0.4748407749508153,
"grad_norm": 5.54593274347221,
"learning_rate": 1.6432735552672317e-05,
"loss": 0.704,
"step": 3560
},
{
"epoch": 0.4761745973523625,
"grad_norm": 9.782086516371113,
"learning_rate": 1.636952102590301e-05,
"loss": 0.6993,
"step": 3570
},
{
"epoch": 0.4775084197539098,
"grad_norm": 3.0266652780549257,
"learning_rate": 1.630628196188295e-05,
"loss": 0.7198,
"step": 3580
},
{
"epoch": 0.478842242155457,
"grad_norm": 5.519301272688082,
"learning_rate": 1.6243019493645315e-05,
"loss": 0.7222,
"step": 3590
},
{
"epoch": 0.48017606455700423,
"grad_norm": 2.2909343474167008,
"learning_rate": 1.617973475464262e-05,
"loss": 0.6569,
"step": 3600
},
{
"epoch": 0.48150988695855146,
"grad_norm": 7.436045528492052,
"learning_rate": 1.6116428878726396e-05,
"loss": 0.6938,
"step": 3610
},
{
"epoch": 0.4828437093600987,
"grad_norm": 2.658185186612875,
"learning_rate": 1.6053103000126874e-05,
"loss": 0.667,
"step": 3620
},
{
"epoch": 0.48417753176164596,
"grad_norm": 3.1099903513767915,
"learning_rate": 1.598975825343267e-05,
"loss": 0.6666,
"step": 3630
},
{
"epoch": 0.4855113541631932,
"grad_norm": 2.844884960349376,
"learning_rate": 1.5926395773570447e-05,
"loss": 0.6715,
"step": 3640
},
{
"epoch": 0.4868451765647404,
"grad_norm": 3.5867549321919827,
"learning_rate": 1.5863016695784604e-05,
"loss": 0.6938,
"step": 3650
},
{
"epoch": 0.48817899896628764,
"grad_norm": 6.440074516955184,
"learning_rate": 1.5799622155616887e-05,
"loss": 0.6575,
"step": 3660
},
{
"epoch": 0.48951282136783486,
"grad_norm": 5.454278599829281,
"learning_rate": 1.5736213288886112e-05,
"loss": 0.6925,
"step": 3670
},
{
"epoch": 0.4908466437693821,
"grad_norm": 1.3038190132520797,
"learning_rate": 1.567279123166776e-05,
"loss": 0.6827,
"step": 3680
},
{
"epoch": 0.49218046617092936,
"grad_norm": 2.2843915654324127,
"learning_rate": 1.560935712027364e-05,
"loss": 0.7272,
"step": 3690
},
{
"epoch": 0.4935142885724766,
"grad_norm": 5.235529803525943,
"learning_rate": 1.5545912091231543e-05,
"loss": 0.6859,
"step": 3700
},
{
"epoch": 0.4948481109740238,
"grad_norm": 67.2928715868646,
"learning_rate": 1.548245728126486e-05,
"loss": 0.6755,
"step": 3710
},
{
"epoch": 0.49618193337557104,
"grad_norm": 1.8808319311625479,
"learning_rate": 1.5418993827272224e-05,
"loss": 0.6827,
"step": 3720
},
{
"epoch": 0.49751575577711826,
"grad_norm": 3.665691607326232,
"learning_rate": 1.5355522866307144e-05,
"loss": 0.729,
"step": 3730
},
{
"epoch": 0.4988495781786655,
"grad_norm": 3.6256043818215864,
"learning_rate": 1.529204553555762e-05,
"loss": 0.674,
"step": 3740
},
{
"epoch": 0.5001834005802127,
"grad_norm": 5.335543393148527,
"learning_rate": 1.522856297232579e-05,
"loss": 0.683,
"step": 3750
},
{
"epoch": 0.5015172229817599,
"grad_norm": 1.768660779664017,
"learning_rate": 1.5165076314007529e-05,
"loss": 0.6607,
"step": 3760
},
{
"epoch": 0.5028510453833072,
"grad_norm": 3.6829482612300515,
"learning_rate": 1.5101586698072095e-05,
"loss": 0.6778,
"step": 3770
},
{
"epoch": 0.5041848677848545,
"grad_norm": 2.016271324919982,
"learning_rate": 1.5038095262041725e-05,
"loss": 0.6758,
"step": 3780
},
{
"epoch": 0.5055186901864017,
"grad_norm": 4.828462163022634,
"learning_rate": 1.4974603143471268e-05,
"loss": 0.7128,
"step": 3790
},
{
"epoch": 0.506852512587949,
"grad_norm": 2.1222493938328792,
"learning_rate": 1.4911111479927804e-05,
"loss": 0.6626,
"step": 3800
},
{
"epoch": 0.5081863349894962,
"grad_norm": 7.554329738410342,
"learning_rate": 1.4847621408970266e-05,
"loss": 0.691,
"step": 3810
},
{
"epoch": 0.5095201573910434,
"grad_norm": 1.1718744405109343,
"learning_rate": 1.4784134068129043e-05,
"loss": 0.7085,
"step": 3820
},
{
"epoch": 0.5108539797925906,
"grad_norm": 4.3974979135617875,
"learning_rate": 1.4720650594885614e-05,
"loss": 0.6888,
"step": 3830
},
{
"epoch": 0.5121878021941378,
"grad_norm": 3.056822672044886,
"learning_rate": 1.4657172126652167e-05,
"loss": 0.7112,
"step": 3840
},
{
"epoch": 0.5135216245956851,
"grad_norm": 2.9519833573179786,
"learning_rate": 1.459369980075121e-05,
"loss": 0.7143,
"step": 3850
},
{
"epoch": 0.5148554469972323,
"grad_norm": 2.7879172145478277,
"learning_rate": 1.4530234754395207e-05,
"loss": 0.73,
"step": 3860
},
{
"epoch": 0.5161892693987795,
"grad_norm": 2.689401796470298,
"learning_rate": 1.4466778124666192e-05,
"loss": 0.6802,
"step": 3870
},
{
"epoch": 0.5175230918003267,
"grad_norm": 2.1741383786138297,
"learning_rate": 1.4403331048495404e-05,
"loss": 0.6404,
"step": 3880
},
{
"epoch": 0.5188569142018741,
"grad_norm": 6.3241834633627985,
"learning_rate": 1.4339894662642914e-05,
"loss": 0.678,
"step": 3890
},
{
"epoch": 0.5201907366034213,
"grad_norm": 6.491040731017525,
"learning_rate": 1.4276470103677257e-05,
"loss": 0.6339,
"step": 3900
},
{
"epoch": 0.5215245590049685,
"grad_norm": 2.043861558768385,
"learning_rate": 1.4213058507955072e-05,
"loss": 0.6655,
"step": 3910
},
{
"epoch": 0.5228583814065157,
"grad_norm": 1.6513869669590875,
"learning_rate": 1.4149661011600734e-05,
"loss": 0.688,
"step": 3920
},
{
"epoch": 0.524192203808063,
"grad_norm": 3.541890141037579,
"learning_rate": 1.4086278750486017e-05,
"loss": 0.6899,
"step": 3930
},
{
"epoch": 0.5255260262096102,
"grad_norm": 6.194603743545731,
"learning_rate": 1.4022912860209709e-05,
"loss": 0.669,
"step": 3940
},
{
"epoch": 0.5268598486111574,
"grad_norm": 2.0486629917737873,
"learning_rate": 1.3959564476077308e-05,
"loss": 0.7003,
"step": 3950
},
{
"epoch": 0.5281936710127046,
"grad_norm": 2.7696033032598693,
"learning_rate": 1.389623473308065e-05,
"loss": 0.7083,
"step": 3960
},
{
"epoch": 0.5295274934142519,
"grad_norm": 2.4390436810800566,
"learning_rate": 1.3832924765877587e-05,
"loss": 0.6827,
"step": 3970
},
{
"epoch": 0.5308613158157991,
"grad_norm": 2.152867968184263,
"learning_rate": 1.3769635708771654e-05,
"loss": 0.6739,
"step": 3980
},
{
"epoch": 0.5321951382173463,
"grad_norm": 7.177870428132333,
"learning_rate": 1.3706368695691745e-05,
"loss": 0.6652,
"step": 3990
},
{
"epoch": 0.5335289606188935,
"grad_norm": 2.2804181398144987,
"learning_rate": 1.3643124860171801e-05,
"loss": 0.6592,
"step": 4000
},
{
"epoch": 0.5348627830204409,
"grad_norm": 2.0999660171302312,
"learning_rate": 1.35799053353305e-05,
"loss": 0.6905,
"step": 4010
},
{
"epoch": 0.5361966054219881,
"grad_norm": 1.8299441945056365,
"learning_rate": 1.3516711253850949e-05,
"loss": 0.6714,
"step": 4020
},
{
"epoch": 0.5375304278235353,
"grad_norm": 5.370538678704669,
"learning_rate": 1.3453543747960393e-05,
"loss": 0.7027,
"step": 4030
},
{
"epoch": 0.5388642502250826,
"grad_norm": 5.933915833300909,
"learning_rate": 1.3390403949409943e-05,
"loss": 0.7126,
"step": 4040
},
{
"epoch": 0.5401980726266298,
"grad_norm": 2.927835677762918,
"learning_rate": 1.3327292989454273e-05,
"loss": 0.652,
"step": 4050
},
{
"epoch": 0.541531895028177,
"grad_norm": 2.3060298824971297,
"learning_rate": 1.3264211998831374e-05,
"loss": 0.6363,
"step": 4060
},
{
"epoch": 0.5428657174297242,
"grad_norm": 8.939144383392009,
"learning_rate": 1.3201162107742285e-05,
"loss": 0.6518,
"step": 4070
},
{
"epoch": 0.5441995398312715,
"grad_norm": 2.1217426691379253,
"learning_rate": 1.3138144445830841e-05,
"loss": 0.6764,
"step": 4080
},
{
"epoch": 0.5455333622328187,
"grad_norm": 3.455459703040737,
"learning_rate": 1.3075160142163442e-05,
"loss": 0.669,
"step": 4090
},
{
"epoch": 0.5468671846343659,
"grad_norm": 1.0556174805250833,
"learning_rate": 1.3012210325208818e-05,
"loss": 0.6653,
"step": 4100
},
{
"epoch": 0.5482010070359131,
"grad_norm": 2.5622275603077846,
"learning_rate": 1.2949296122817813e-05,
"loss": 0.6999,
"step": 4110
},
{
"epoch": 0.5495348294374603,
"grad_norm": 1.311339273822971,
"learning_rate": 1.2886418662203174e-05,
"loss": 0.6542,
"step": 4120
},
{
"epoch": 0.5508686518390077,
"grad_norm": 2.0314733529170548,
"learning_rate": 1.282357906991936e-05,
"loss": 0.7086,
"step": 4130
},
{
"epoch": 0.5522024742405549,
"grad_norm": 1.3992167597958347,
"learning_rate": 1.276077847184236e-05,
"loss": 0.6647,
"step": 4140
},
{
"epoch": 0.5535362966421021,
"grad_norm": 2.8142228677605243,
"learning_rate": 1.2698017993149504e-05,
"loss": 0.6679,
"step": 4150
},
{
"epoch": 0.5548701190436494,
"grad_norm": 53.450181243846124,
"learning_rate": 1.2635298758299336e-05,
"loss": 0.6677,
"step": 4160
},
{
"epoch": 0.5562039414451966,
"grad_norm": 34.73442771588442,
"learning_rate": 1.2572621891011426e-05,
"loss": 0.6358,
"step": 4170
},
{
"epoch": 0.5575377638467438,
"grad_norm": 2.25939959543013,
"learning_rate": 1.2509988514246272e-05,
"loss": 0.7153,
"step": 4180
},
{
"epoch": 0.558871586248291,
"grad_norm": 28.104396750316823,
"learning_rate": 1.2447399750185166e-05,
"loss": 0.7164,
"step": 4190
},
{
"epoch": 0.5602054086498383,
"grad_norm": 2.2840282297009913,
"learning_rate": 1.2384856720210086e-05,
"loss": 0.6526,
"step": 4200
},
{
"epoch": 0.5615392310513855,
"grad_norm": 1.6880048773636342,
"learning_rate": 1.2322360544883608e-05,
"loss": 0.6928,
"step": 4210
},
{
"epoch": 0.5628730534529327,
"grad_norm": 2.507578235748276,
"learning_rate": 1.2259912343928831e-05,
"loss": 0.6805,
"step": 4220
},
{
"epoch": 0.5642068758544799,
"grad_norm": 2.2070032708749183,
"learning_rate": 1.2197513236209312e-05,
"loss": 0.6833,
"step": 4230
},
{
"epoch": 0.5655406982560273,
"grad_norm": 3.2628445264349515,
"learning_rate": 1.213516433970902e-05,
"loss": 0.6706,
"step": 4240
},
{
"epoch": 0.5668745206575745,
"grad_norm": 5.645591683747128,
"learning_rate": 1.2072866771512306e-05,
"loss": 0.6812,
"step": 4250
},
{
"epoch": 0.5682083430591217,
"grad_norm": 2.7073660859499684,
"learning_rate": 1.201062164778389e-05,
"loss": 0.6623,
"step": 4260
},
{
"epoch": 0.5695421654606689,
"grad_norm": 5.201872987152117,
"learning_rate": 1.1948430083748864e-05,
"loss": 0.6562,
"step": 4270
},
{
"epoch": 0.5708759878622162,
"grad_norm": 1.3559544817056064,
"learning_rate": 1.1886293193672707e-05,
"loss": 0.6906,
"step": 4280
},
{
"epoch": 0.5722098102637634,
"grad_norm": 24.227173434331696,
"learning_rate": 1.1824212090841321e-05,
"loss": 0.642,
"step": 4290
},
{
"epoch": 0.5735436326653106,
"grad_norm": 5.123165453557631,
"learning_rate": 1.1762187887541088e-05,
"loss": 0.7297,
"step": 4300
},
{
"epoch": 0.5748774550668578,
"grad_norm": 5.724684674103886,
"learning_rate": 1.1700221695038944e-05,
"loss": 0.6524,
"step": 4310
},
{
"epoch": 0.5762112774684051,
"grad_norm": 9.965228089412879,
"learning_rate": 1.1638314623562459e-05,
"loss": 0.704,
"step": 4320
},
{
"epoch": 0.5775450998699523,
"grad_norm": 3.232350328466124,
"learning_rate": 1.1576467782279953e-05,
"loss": 0.6391,
"step": 4330
},
{
"epoch": 0.5788789222714995,
"grad_norm": 3.1098394765854707,
"learning_rate": 1.1514682279280621e-05,
"loss": 0.7083,
"step": 4340
},
{
"epoch": 0.5802127446730467,
"grad_norm": 4.287688599823072,
"learning_rate": 1.1452959221554684e-05,
"loss": 0.7003,
"step": 4350
},
{
"epoch": 0.5815465670745941,
"grad_norm": 3.889095266103746,
"learning_rate": 1.1391299714973553e-05,
"loss": 0.6915,
"step": 4360
},
{
"epoch": 0.5828803894761413,
"grad_norm": 28.63862172481202,
"learning_rate": 1.1329704864270005e-05,
"loss": 0.639,
"step": 4370
},
{
"epoch": 0.5842142118776885,
"grad_norm": 2.8068799133869353,
"learning_rate": 1.1268175773018409e-05,
"loss": 0.675,
"step": 4380
},
{
"epoch": 0.5855480342792357,
"grad_norm": 10.272361368305473,
"learning_rate": 1.1206713543614942e-05,
"loss": 0.707,
"step": 4390
},
{
"epoch": 0.586881856680783,
"grad_norm": 1.9200541236321964,
"learning_rate": 1.1145319277257834e-05,
"loss": 0.7177,
"step": 4400
},
{
"epoch": 0.5882156790823302,
"grad_norm": 2.933892165040151,
"learning_rate": 1.108399407392765e-05,
"loss": 0.6991,
"step": 4410
},
{
"epoch": 0.5895495014838774,
"grad_norm": 2.898606287727756,
"learning_rate": 1.1022739032367572e-05,
"loss": 0.6697,
"step": 4420
},
{
"epoch": 0.5908833238854246,
"grad_norm": 1.8428061704250374,
"learning_rate": 1.0961555250063718e-05,
"loss": 0.6939,
"step": 4430
},
{
"epoch": 0.5922171462869719,
"grad_norm": 2.93674459281119,
"learning_rate": 1.090044382322548e-05,
"loss": 0.6926,
"step": 4440
},
{
"epoch": 0.5935509686885191,
"grad_norm": 1.50823929038625,
"learning_rate": 1.083940584676588e-05,
"loss": 0.6757,
"step": 4450
},
{
"epoch": 0.5948847910900663,
"grad_norm": 2.0214873235561224,
"learning_rate": 1.077844241428195e-05,
"loss": 0.6794,
"step": 4460
},
{
"epoch": 0.5962186134916136,
"grad_norm": 1.87436887927708,
"learning_rate": 1.071755461803515e-05,
"loss": 0.6882,
"step": 4470
},
{
"epoch": 0.5975524358931609,
"grad_norm": 1.4982260705038484,
"learning_rate": 1.0656743548931784e-05,
"loss": 0.6843,
"step": 4480
},
{
"epoch": 0.5988862582947081,
"grad_norm": 1.8602173123516976,
"learning_rate": 1.0596010296503469e-05,
"loss": 0.6553,
"step": 4490
},
{
"epoch": 0.6002200806962553,
"grad_norm": 13.381873530435502,
"learning_rate": 1.0535355948887598e-05,
"loss": 0.6708,
"step": 4500
},
{
"epoch": 0.6015539030978025,
"grad_norm": 1.9185796879534898,
"learning_rate": 1.0474781592807854e-05,
"loss": 0.6806,
"step": 4510
},
{
"epoch": 0.6028877254993498,
"grad_norm": 3.364824709584871,
"learning_rate": 1.0414288313554746e-05,
"loss": 0.6847,
"step": 4520
},
{
"epoch": 0.604221547900897,
"grad_norm": 3.497292970388317,
"learning_rate": 1.0353877194966152e-05,
"loss": 0.6635,
"step": 4530
},
{
"epoch": 0.6055553703024442,
"grad_norm": 4.099439787389805,
"learning_rate": 1.0293549319407901e-05,
"loss": 0.7056,
"step": 4540
},
{
"epoch": 0.6068891927039914,
"grad_norm": 4.726371219011721,
"learning_rate": 1.0233305767754391e-05,
"loss": 0.6746,
"step": 4550
},
{
"epoch": 0.6082230151055387,
"grad_norm": 1.9002791525912257,
"learning_rate": 1.0173147619369212e-05,
"loss": 0.6843,
"step": 4560
},
{
"epoch": 0.6095568375070859,
"grad_norm": 2.1361565277382026,
"learning_rate": 1.0113075952085815e-05,
"loss": 0.716,
"step": 4570
},
{
"epoch": 0.6108906599086331,
"grad_norm": 2.7505558492527284,
"learning_rate": 1.0053091842188196e-05,
"loss": 0.6676,
"step": 4580
},
{
"epoch": 0.6122244823101805,
"grad_norm": 7.936649787603975,
"learning_rate": 9.993196364391614e-06,
"loss": 0.7174,
"step": 4590
},
{
"epoch": 0.6135583047117277,
"grad_norm": 1.9962614403326984,
"learning_rate": 9.93339059182334e-06,
"loss": 0.7116,
"step": 4600
},
{
"epoch": 0.6148921271132749,
"grad_norm": 1.6832414510723148,
"learning_rate": 9.873675596003424e-06,
"loss": 0.631,
"step": 4610
},
{
"epoch": 0.6162259495148221,
"grad_norm": 1.943682374421793,
"learning_rate": 9.8140524468255e-06,
"loss": 0.7083,
"step": 4620
},
{
"epoch": 0.6175597719163693,
"grad_norm": 3.6516090347661323,
"learning_rate": 9.754522212537614e-06,
"loss": 0.6983,
"step": 4630
},
{
"epoch": 0.6188935943179166,
"grad_norm": 2.245519627711479,
"learning_rate": 9.695085959723088e-06,
"loss": 0.664,
"step": 4640
},
{
"epoch": 0.6202274167194638,
"grad_norm": 1.9531032713635088,
"learning_rate": 9.63574475328141e-06,
"loss": 0.6853,
"step": 4650
},
{
"epoch": 0.621561239121011,
"grad_norm": 5.427236407190242,
"learning_rate": 9.576499656409158e-06,
"loss": 0.694,
"step": 4660
},
{
"epoch": 0.6228950615225582,
"grad_norm": 2.000287154564092,
"learning_rate": 9.517351730580939e-06,
"loss": 0.7371,
"step": 4670
},
{
"epoch": 0.6242288839241055,
"grad_norm": 1.8847269162961595,
"learning_rate": 9.458302035530384e-06,
"loss": 0.6643,
"step": 4680
},
{
"epoch": 0.6255627063256527,
"grad_norm": 2.7122993676306564,
"learning_rate": 9.399351629231154e-06,
"loss": 0.6912,
"step": 4690
},
{
"epoch": 0.6268965287271999,
"grad_norm": 3.1005319569612078,
"learning_rate": 9.340501567877989e-06,
"loss": 0.6798,
"step": 4700
},
{
"epoch": 0.6282303511287473,
"grad_norm": 3.9244802871969133,
"learning_rate": 9.281752905867778e-06,
"loss": 0.6383,
"step": 4710
},
{
"epoch": 0.6295641735302945,
"grad_norm": 2.435298414489433,
"learning_rate": 9.223106695780677e-06,
"loss": 0.6892,
"step": 4720
},
{
"epoch": 0.6308979959318417,
"grad_norm": 8.27385430371254,
"learning_rate": 9.164563988361242e-06,
"loss": 0.6359,
"step": 4730
},
{
"epoch": 0.6322318183333889,
"grad_norm": 1.53362121712898,
"learning_rate": 9.106125832499604e-06,
"loss": 0.6611,
"step": 4740
},
{
"epoch": 0.6335656407349362,
"grad_norm": 3.683488506541024,
"learning_rate": 9.047793275212686e-06,
"loss": 0.6755,
"step": 4750
},
{
"epoch": 0.6348994631364834,
"grad_norm": 5.130363634989403,
"learning_rate": 8.989567361625427e-06,
"loss": 0.6753,
"step": 4760
},
{
"epoch": 0.6362332855380306,
"grad_norm": 5.687746482790019,
"learning_rate": 8.931449134952075e-06,
"loss": 0.6841,
"step": 4770
},
{
"epoch": 0.6375671079395778,
"grad_norm": 6.9830588651752,
"learning_rate": 8.873439636477484e-06,
"loss": 0.7064,
"step": 4780
},
{
"epoch": 0.638900930341125,
"grad_norm": 6.829790484006074,
"learning_rate": 8.815539905538459e-06,
"loss": 0.6757,
"step": 4790
},
{
"epoch": 0.6402347527426723,
"grad_norm": 3.4577918643740753,
"learning_rate": 8.757750979505137e-06,
"loss": 0.6936,
"step": 4800
},
{
"epoch": 0.6415685751442195,
"grad_norm": 2.1055808524046715,
"learning_rate": 8.700073893762408e-06,
"loss": 0.6566,
"step": 4810
},
{
"epoch": 0.6429023975457668,
"grad_norm": 3.9440274100212,
"learning_rate": 8.642509681691347e-06,
"loss": 0.6858,
"step": 4820
},
{
"epoch": 0.6442362199473141,
"grad_norm": 7.842807181182078,
"learning_rate": 8.585059374650717e-06,
"loss": 0.6715,
"step": 4830
},
{
"epoch": 0.6455700423488613,
"grad_norm": 11.85414363990697,
"learning_rate": 8.527724001958476e-06,
"loss": 0.6761,
"step": 4840
},
{
"epoch": 0.6469038647504085,
"grad_norm": 4.506809169239302,
"learning_rate": 8.470504590873346e-06,
"loss": 0.6786,
"step": 4850
},
{
"epoch": 0.6482376871519557,
"grad_norm": 2.6051042521990246,
"learning_rate": 8.413402166576397e-06,
"loss": 0.687,
"step": 4860
},
{
"epoch": 0.649571509553503,
"grad_norm": 4.897894091481774,
"learning_rate": 8.3564177521527e-06,
"loss": 0.7139,
"step": 4870
},
{
"epoch": 0.6509053319550502,
"grad_norm": 3.187446743389348,
"learning_rate": 8.29955236857297e-06,
"loss": 0.7089,
"step": 4880
},
{
"epoch": 0.6522391543565974,
"grad_norm": 2.7935446405566817,
"learning_rate": 8.242807034675289e-06,
"loss": 0.6961,
"step": 4890
},
{
"epoch": 0.6535729767581446,
"grad_norm": 1.9852638957064885,
"learning_rate": 8.186182767146848e-06,
"loss": 0.6691,
"step": 4900
},
{
"epoch": 0.6549067991596919,
"grad_norm": 1.2955893152882823,
"learning_rate": 8.12968058050574e-06,
"loss": 0.6845,
"step": 4910
},
{
"epoch": 0.6562406215612391,
"grad_norm": 4.458278524947254,
"learning_rate": 8.073301487082768e-06,
"loss": 0.7084,
"step": 4920
},
{
"epoch": 0.6575744439627863,
"grad_norm": 1.6668716860875192,
"learning_rate": 8.017046497003308e-06,
"loss": 0.6545,
"step": 4930
},
{
"epoch": 0.6589082663643336,
"grad_norm": 7.614957105349221,
"learning_rate": 7.960916618169233e-06,
"loss": 0.6591,
"step": 4940
},
{
"epoch": 0.6602420887658809,
"grad_norm": 4.813371059308626,
"learning_rate": 7.904912856240833e-06,
"loss": 0.6835,
"step": 4950
},
{
"epoch": 0.6615759111674281,
"grad_norm": 3.6061538921487895,
"learning_rate": 7.849036214618802e-06,
"loss": 0.7259,
"step": 4960
},
{
"epoch": 0.6629097335689753,
"grad_norm": 1.8087311068420067,
"learning_rate": 7.793287694426263e-06,
"loss": 0.674,
"step": 4970
},
{
"epoch": 0.6642435559705225,
"grad_norm": 3.984673821706498,
"learning_rate": 7.737668294490834e-06,
"loss": 0.6533,
"step": 4980
},
{
"epoch": 0.6655773783720698,
"grad_norm": 5.258791329357682,
"learning_rate": 7.68217901132672e-06,
"loss": 0.6853,
"step": 4990
},
{
"epoch": 0.666911200773617,
"grad_norm": 2.741503247045013,
"learning_rate": 7.626820839116876e-06,
"loss": 0.6791,
"step": 5000
},
{
"epoch": 0.6682450231751642,
"grad_norm": 2.591787492035107,
"learning_rate": 7.571594769695181e-06,
"loss": 0.6794,
"step": 5010
},
{
"epoch": 0.6695788455767114,
"grad_norm": 30.913611357903495,
"learning_rate": 7.51650179252867e-06,
"loss": 0.6269,
"step": 5020
},
{
"epoch": 0.6709126679782587,
"grad_norm": 4.323386783391997,
"learning_rate": 7.461542894699818e-06,
"loss": 0.6601,
"step": 5030
},
{
"epoch": 0.6722464903798059,
"grad_norm": 6.048173474220757,
"learning_rate": 7.406719060888837e-06,
"loss": 0.6785,
"step": 5040
},
{
"epoch": 0.6735803127813531,
"grad_norm": 5.910287274448164,
"learning_rate": 7.352031273356045e-06,
"loss": 0.6739,
"step": 5050
},
{
"epoch": 0.6749141351829004,
"grad_norm": 3.8784452319972935,
"learning_rate": 7.297480511924263e-06,
"loss": 0.659,
"step": 5060
},
{
"epoch": 0.6762479575844477,
"grad_norm": 2.692653682505857,
"learning_rate": 7.243067753961267e-06,
"loss": 0.6619,
"step": 5070
},
{
"epoch": 0.6775817799859949,
"grad_norm": 3.3585820171455625,
"learning_rate": 7.188793974362254e-06,
"loss": 0.6491,
"step": 5080
},
{
"epoch": 0.6789156023875421,
"grad_norm": 5.633792532689823,
"learning_rate": 7.13466014553241e-06,
"loss": 0.6707,
"step": 5090
},
{
"epoch": 0.6802494247890893,
"grad_norm": 1.984366709774452,
"learning_rate": 7.080667237369468e-06,
"loss": 0.709,
"step": 5100
},
{
"epoch": 0.6815832471906366,
"grad_norm": 9.288816097446837,
"learning_rate": 7.0268162172463215e-06,
"loss": 0.7035,
"step": 5110
},
{
"epoch": 0.6829170695921838,
"grad_norm": 1.8165281193365694,
"learning_rate": 6.973108049993714e-06,
"loss": 0.6962,
"step": 5120
},
{
"epoch": 0.684250891993731,
"grad_norm": 1.9216061535869347,
"learning_rate": 6.919543697882938e-06,
"loss": 0.6288,
"step": 5130
},
{
"epoch": 0.6855847143952782,
"grad_norm": 4.700774051964428,
"learning_rate": 6.866124120608596e-06,
"loss": 0.69,
"step": 5140
},
{
"epoch": 0.6869185367968255,
"grad_norm": 2.6258085004850376,
"learning_rate": 6.812850275271412e-06,
"loss": 0.6972,
"step": 5150
},
{
"epoch": 0.6882523591983727,
"grad_norm": 1.8420624202751164,
"learning_rate": 6.759723116361077e-06,
"loss": 0.7003,
"step": 5160
},
{
"epoch": 0.68958618159992,
"grad_norm": 2.303984350495702,
"learning_rate": 6.706743595739151e-06,
"loss": 0.7121,
"step": 5170
},
{
"epoch": 0.6909200040014672,
"grad_norm": 8.917268916911917,
"learning_rate": 6.653912662622009e-06,
"loss": 0.6741,
"step": 5180
},
{
"epoch": 0.6922538264030145,
"grad_norm": 2.173292507049502,
"learning_rate": 6.601231263563832e-06,
"loss": 0.6714,
"step": 5190
},
{
"epoch": 0.6935876488045617,
"grad_norm": 2.081130872644949,
"learning_rate": 6.548700342439648e-06,
"loss": 0.7144,
"step": 5200
},
{
"epoch": 0.6949214712061089,
"grad_norm": 6.567757090287482,
"learning_rate": 6.496320840428426e-06,
"loss": 0.6768,
"step": 5210
},
{
"epoch": 0.6962552936076561,
"grad_norm": 3.696494173635367,
"learning_rate": 6.444093695996205e-06,
"loss": 0.658,
"step": 5220
},
{
"epoch": 0.6975891160092034,
"grad_norm": 7.188706802559419,
"learning_rate": 6.392019844879289e-06,
"loss": 0.6535,
"step": 5230
},
{
"epoch": 0.6989229384107506,
"grad_norm": 2.8103582916720034,
"learning_rate": 6.340100220067473e-06,
"loss": 0.7043,
"step": 5240
},
{
"epoch": 0.7002567608122978,
"grad_norm": 66.97949633796524,
"learning_rate": 6.28833575178733e-06,
"loss": 0.6148,
"step": 5250
},
{
"epoch": 0.701590583213845,
"grad_norm": 3.4174751317668894,
"learning_rate": 6.23672736748555e-06,
"loss": 0.7087,
"step": 5260
},
{
"epoch": 0.7029244056153923,
"grad_norm": 7.312380797889946,
"learning_rate": 6.1852759918123145e-06,
"loss": 0.7131,
"step": 5270
},
{
"epoch": 0.7042582280169395,
"grad_norm": 12.071522871150554,
"learning_rate": 6.133982546604735e-06,
"loss": 0.6769,
"step": 5280
},
{
"epoch": 0.7055920504184868,
"grad_norm": 11.379413796396078,
"learning_rate": 6.082847950870334e-06,
"loss": 0.6565,
"step": 5290
},
{
"epoch": 0.706925872820034,
"grad_norm": 1.9502749591546167,
"learning_rate": 6.031873120770585e-06,
"loss": 0.6806,
"step": 5300
},
{
"epoch": 0.7082596952215813,
"grad_norm": 5.770629034546035,
"learning_rate": 5.9810589696044935e-06,
"loss": 0.663,
"step": 5310
},
{
"epoch": 0.7095935176231285,
"grad_norm": 5.5025717184756235,
"learning_rate": 5.9304064077922274e-06,
"loss": 0.6485,
"step": 5320
},
{
"epoch": 0.7109273400246757,
"grad_norm": 2.5021143555339243,
"learning_rate": 5.879916342858821e-06,
"loss": 0.679,
"step": 5330
},
{
"epoch": 0.712261162426223,
"grad_norm": 4.647251317602692,
"learning_rate": 5.829589679417901e-06,
"loss": 0.6494,
"step": 5340
},
{
"epoch": 0.7135949848277702,
"grad_norm": 4.030828882740322,
"learning_rate": 5.779427319155485e-06,
"loss": 0.6978,
"step": 5350
},
{
"epoch": 0.7149288072293174,
"grad_norm": 5.554960820895111,
"learning_rate": 5.7294301608138274e-06,
"loss": 0.675,
"step": 5360
},
{
"epoch": 0.7162626296308646,
"grad_norm": 4.62525753644512,
"learning_rate": 5.679599100175312e-06,
"loss": 0.6484,
"step": 5370
},
{
"epoch": 0.7175964520324118,
"grad_norm": 3.971433798403304,
"learning_rate": 5.629935030046409e-06,
"loss": 0.6854,
"step": 5380
},
{
"epoch": 0.7189302744339591,
"grad_norm": 11.019036899581376,
"learning_rate": 5.580438840241671e-06,
"loss": 0.6781,
"step": 5390
},
{
"epoch": 0.7202640968355064,
"grad_norm": 2.7838894610164475,
"learning_rate": 5.531111417567799e-06,
"loss": 0.6475,
"step": 5400
},
{
"epoch": 0.7215979192370536,
"grad_norm": 4.108426873081711,
"learning_rate": 5.48195364580775e-06,
"loss": 0.6322,
"step": 5410
},
{
"epoch": 0.7229317416386009,
"grad_norm": 2.9697048860089366,
"learning_rate": 5.432966405704895e-06,
"loss": 0.6209,
"step": 5420
},
{
"epoch": 0.7242655640401481,
"grad_norm": 4.366189641352392,
"learning_rate": 5.384150574947258e-06,
"loss": 0.6849,
"step": 5430
},
{
"epoch": 0.7255993864416953,
"grad_norm": 2.399864840675308,
"learning_rate": 5.335507028151768e-06,
"loss": 0.6951,
"step": 5440
},
{
"epoch": 0.7269332088432425,
"grad_norm": 3.6271676784278446,
"learning_rate": 5.2870366368486074e-06,
"loss": 0.6634,
"step": 5450
},
{
"epoch": 0.7282670312447898,
"grad_norm": 3.012305295358332,
"learning_rate": 5.238740269465584e-06,
"loss": 0.6854,
"step": 5460
},
{
"epoch": 0.729600853646337,
"grad_norm": 2.9775903083692743,
"learning_rate": 5.190618791312581e-06,
"loss": 0.6886,
"step": 5470
},
{
"epoch": 0.7309346760478842,
"grad_norm": 2.031808277584667,
"learning_rate": 5.142673064566048e-06,
"loss": 0.7037,
"step": 5480
},
{
"epoch": 0.7322684984494314,
"grad_norm": 2.385560199356159,
"learning_rate": 5.094903948253557e-06,
"loss": 0.6927,
"step": 5490
},
{
"epoch": 0.7336023208509787,
"grad_norm": 26.12658744990137,
"learning_rate": 5.047312298238407e-06,
"loss": 0.6794,
"step": 5500
},
{
"epoch": 0.7349361432525259,
"grad_norm": 2.8502664207277917,
"learning_rate": 4.999898967204293e-06,
"loss": 0.673,
"step": 5510
},
{
"epoch": 0.7362699656540732,
"grad_norm": 16.84484248254399,
"learning_rate": 4.952664804640032e-06,
"loss": 0.6128,
"step": 5520
},
{
"epoch": 0.7376037880556204,
"grad_norm": 2.0315711134402465,
"learning_rate": 4.905610656824338e-06,
"loss": 0.6681,
"step": 5530
},
{
"epoch": 0.7389376104571677,
"grad_norm": 1.317875706995677,
"learning_rate": 4.858737366810661e-06,
"loss": 0.7112,
"step": 5540
},
{
"epoch": 0.7402714328587149,
"grad_norm": 2.4041957260658706,
"learning_rate": 4.812045774412074e-06,
"loss": 0.6377,
"step": 5550
},
{
"epoch": 0.7416052552602621,
"grad_norm": 5.928591821737227,
"learning_rate": 4.765536716186247e-06,
"loss": 0.7185,
"step": 5560
},
{
"epoch": 0.7429390776618093,
"grad_norm": 1.9918891832979637,
"learning_rate": 4.719211025420436e-06,
"loss": 0.6964,
"step": 5570
},
{
"epoch": 0.7442729000633566,
"grad_norm": 2.4824798841324114,
"learning_rate": 4.673069532116575e-06,
"loss": 0.6997,
"step": 5580
},
{
"epoch": 0.7456067224649038,
"grad_norm": 7.278229049915051,
"learning_rate": 4.627113062976379e-06,
"loss": 0.6518,
"step": 5590
},
{
"epoch": 0.746940544866451,
"grad_norm": 3.4509079664411044,
"learning_rate": 4.581342441386563e-06,
"loss": 0.7024,
"step": 5600
},
{
"epoch": 0.7482743672679982,
"grad_norm": 7.5846488007303225,
"learning_rate": 4.53575848740406e-06,
"loss": 0.6844,
"step": 5610
},
{
"epoch": 0.7496081896695455,
"grad_norm": 2.3829685847956634,
"learning_rate": 4.490362017741346e-06,
"loss": 0.6766,
"step": 5620
},
{
"epoch": 0.7509420120710927,
"grad_norm": 4.369992251181989,
"learning_rate": 4.445153845751808e-06,
"loss": 0.684,
"step": 5630
},
{
"epoch": 0.75227583447264,
"grad_norm": 9.195608284988928,
"learning_rate": 4.4001347814151625e-06,
"loss": 0.6831,
"step": 5640
},
{
"epoch": 0.7536096568741872,
"grad_norm": 2.441318748431944,
"learning_rate": 4.355305631322943e-06,
"loss": 0.6806,
"step": 5650
},
{
"epoch": 0.7549434792757345,
"grad_norm": 4.4040645292788065,
"learning_rate": 4.31066719866406e-06,
"loss": 0.6593,
"step": 5660
},
{
"epoch": 0.7562773016772817,
"grad_norm": 1.9804236051391217,
"learning_rate": 4.266220283210403e-06,
"loss": 0.7339,
"step": 5670
},
{
"epoch": 0.7576111240788289,
"grad_norm": 2.312746498785646,
"learning_rate": 4.221965681302506e-06,
"loss": 0.7081,
"step": 5680
},
{
"epoch": 0.7589449464803761,
"grad_norm": 7.515558254397616,
"learning_rate": 4.177904185835289e-06,
"loss": 0.6329,
"step": 5690
},
{
"epoch": 0.7602787688819234,
"grad_norm": 21.62874151948737,
"learning_rate": 4.134036586243852e-06,
"loss": 0.6996,
"step": 5700
},
{
"epoch": 0.7616125912834706,
"grad_norm": 3.1838537566371268,
"learning_rate": 4.0903636684893205e-06,
"loss": 0.6415,
"step": 5710
},
{
"epoch": 0.7629464136850178,
"grad_norm": 2.3997890379693403,
"learning_rate": 4.046886215044773e-06,
"loss": 0.6584,
"step": 5720
},
{
"epoch": 0.764280236086565,
"grad_norm": 1.452184967833434,
"learning_rate": 4.003605004881224e-06,
"loss": 0.6836,
"step": 5730
},
{
"epoch": 0.7656140584881123,
"grad_norm": 1.7135912376893925,
"learning_rate": 3.960520813453654e-06,
"loss": 0.6837,
"step": 5740
},
{
"epoch": 0.7669478808896596,
"grad_norm": 1.7888495305998886,
"learning_rate": 3.917634412687132e-06,
"loss": 0.7189,
"step": 5750
},
{
"epoch": 0.7682817032912068,
"grad_norm": 2.709363408927367,
"learning_rate": 3.874946570962977e-06,
"loss": 0.6595,
"step": 5760
},
{
"epoch": 0.769615525692754,
"grad_norm": 4.945992035905262,
"learning_rate": 3.832458053104985e-06,
"loss": 0.6135,
"step": 5770
},
{
"epoch": 0.7709493480943013,
"grad_norm": 4.370604350465238,
"learning_rate": 3.790169620365742e-06,
"loss": 0.6837,
"step": 5780
},
{
"epoch": 0.7722831704958485,
"grad_norm": 3.4065245071530708,
"learning_rate": 3.748082030412971e-06,
"loss": 0.66,
"step": 5790
},
{
"epoch": 0.7736169928973957,
"grad_norm": 1.4628266548461222,
"learning_rate": 3.7061960373159603e-06,
"loss": 0.7313,
"step": 5800
},
{
"epoch": 0.7749508152989429,
"grad_norm": 6.268190995276985,
"learning_rate": 3.66451239153206e-06,
"loss": 0.6803,
"step": 5810
},
{
"epoch": 0.7762846377004902,
"grad_norm": 3.813334139265851,
"learning_rate": 3.623031839893226e-06,
"loss": 0.6324,
"step": 5820
},
{
"epoch": 0.7776184601020374,
"grad_norm": 3.4484673067091554,
"learning_rate": 3.5817551255926473e-06,
"loss": 0.7098,
"step": 5830
},
{
"epoch": 0.7789522825035846,
"grad_norm": 4.283588522546624,
"learning_rate": 3.5406829881714254e-06,
"loss": 0.6927,
"step": 5840
},
{
"epoch": 0.7802861049051318,
"grad_norm": 257.50596170283126,
"learning_rate": 3.4998161635053274e-06,
"loss": 0.6828,
"step": 5850
},
{
"epoch": 0.7816199273066791,
"grad_norm": 2.8137402660823447,
"learning_rate": 3.459155383791601e-06,
"loss": 0.693,
"step": 5860
},
{
"epoch": 0.7829537497082264,
"grad_norm": 52.240446102463316,
"learning_rate": 3.4187013775358515e-06,
"loss": 0.6784,
"step": 5870
},
{
"epoch": 0.7842875721097736,
"grad_norm": 3.491897058890089,
"learning_rate": 3.3784548695389993e-06,
"loss": 0.6649,
"step": 5880
},
{
"epoch": 0.7856213945113208,
"grad_norm": 3.470103506693708,
"learning_rate": 3.338416580884284e-06,
"loss": 0.7164,
"step": 5890
},
{
"epoch": 0.7869552169128681,
"grad_norm": 7.94683049197377,
"learning_rate": 3.2985872289243466e-06,
"loss": 0.6886,
"step": 5900
},
{
"epoch": 0.7882890393144153,
"grad_norm": 3.545168697287073,
"learning_rate": 3.2589675272683855e-06,
"loss": 0.6441,
"step": 5910
},
{
"epoch": 0.7896228617159625,
"grad_norm": 4.898495906499881,
"learning_rate": 3.2195581857693595e-06,
"loss": 0.6705,
"step": 5920
},
{
"epoch": 0.7909566841175097,
"grad_norm": 1.8938102057930186,
"learning_rate": 3.180359910511275e-06,
"loss": 0.6599,
"step": 5930
},
{
"epoch": 0.792290506519057,
"grad_norm": 2.4611885063893735,
"learning_rate": 3.1413734037965386e-06,
"loss": 0.7021,
"step": 5940
},
{
"epoch": 0.7936243289206042,
"grad_norm": 2.1566765404798995,
"learning_rate": 3.102599364133366e-06,
"loss": 0.6646,
"step": 5950
},
{
"epoch": 0.7949581513221514,
"grad_norm": 2.799686404105686,
"learning_rate": 3.0640384862232756e-06,
"loss": 0.7032,
"step": 5960
},
{
"epoch": 0.7962919737236986,
"grad_norm": 2.436938000960657,
"learning_rate": 3.0256914609486367e-06,
"loss": 0.6924,
"step": 5970
},
{
"epoch": 0.797625796125246,
"grad_norm": 2.518426508043468,
"learning_rate": 2.9875589753602926e-06,
"loss": 0.6996,
"step": 5980
},
{
"epoch": 0.7989596185267932,
"grad_norm": 1.7862960548950564,
"learning_rate": 2.9496417126652476e-06,
"loss": 0.6573,
"step": 5990
},
{
"epoch": 0.8002934409283404,
"grad_norm": 3.6683752385169055,
"learning_rate": 2.911940352214437e-06,
"loss": 0.6958,
"step": 6000
},
{
"epoch": 0.8016272633298877,
"grad_norm": 2.093197194469284,
"learning_rate": 2.874455569490535e-06,
"loss": 0.64,
"step": 6010
},
{
"epoch": 0.8029610857314349,
"grad_norm": 1.8824775366183797,
"learning_rate": 2.8371880360958764e-06,
"loss": 0.6754,
"step": 6020
},
{
"epoch": 0.8042949081329821,
"grad_norm": 4.264170639774772,
"learning_rate": 2.800138419740408e-06,
"loss": 0.6511,
"step": 6030
},
{
"epoch": 0.8056287305345293,
"grad_norm": 1.402757631394318,
"learning_rate": 2.76330738422973e-06,
"loss": 0.6277,
"step": 6040
},
{
"epoch": 0.8069625529360765,
"grad_norm": 4.868808041815458,
"learning_rate": 2.7266955894532046e-06,
"loss": 0.6707,
"step": 6050
},
{
"epoch": 0.8082963753376238,
"grad_norm": 2.4343019682637714,
"learning_rate": 2.6903036913721285e-06,
"loss": 0.6736,
"step": 6060
},
{
"epoch": 0.809630197739171,
"grad_norm": 4.633267822534332,
"learning_rate": 2.6541323420079832e-06,
"loss": 0.7228,
"step": 6070
},
{
"epoch": 0.8109640201407182,
"grad_norm": 3.1749658417553843,
"learning_rate": 2.6181821894307534e-06,
"loss": 0.6677,
"step": 6080
},
{
"epoch": 0.8122978425422654,
"grad_norm": 4.472522983481309,
"learning_rate": 2.582453877747313e-06,
"loss": 0.6913,
"step": 6090
},
{
"epoch": 0.8136316649438128,
"grad_norm": 2.1838265228402074,
"learning_rate": 2.546948047089889e-06,
"loss": 0.6761,
"step": 6100
},
{
"epoch": 0.81496548734536,
"grad_norm": 2.8085691320961255,
"learning_rate": 2.5116653336045905e-06,
"loss": 0.6753,
"step": 6110
},
{
"epoch": 0.8162993097469072,
"grad_norm": 3.8295604272286785,
"learning_rate": 2.4766063694400064e-06,
"loss": 0.6811,
"step": 6120
},
{
"epoch": 0.8176331321484545,
"grad_norm": 7.960471486835727,
"learning_rate": 2.4417717827358895e-06,
"loss": 0.7005,
"step": 6130
},
{
"epoch": 0.8189669545500017,
"grad_norm": 3.169705405280151,
"learning_rate": 2.4071621976118928e-06,
"loss": 0.6861,
"step": 6140
},
{
"epoch": 0.8203007769515489,
"grad_norm": 4.24135068504656,
"learning_rate": 2.3727782341563915e-06,
"loss": 0.6636,
"step": 6150
},
{
"epoch": 0.8216345993530961,
"grad_norm": 1.9783804754132144,
"learning_rate": 2.3386205084153754e-06,
"loss": 0.6685,
"step": 6160
},
{
"epoch": 0.8229684217546434,
"grad_norm": 3.639094477144888,
"learning_rate": 2.304689632381407e-06,
"loss": 0.6876,
"step": 6170
},
{
"epoch": 0.8243022441561906,
"grad_norm": 2.8628953619038278,
"learning_rate": 2.2709862139826554e-06,
"loss": 0.6883,
"step": 6180
},
{
"epoch": 0.8256360665577378,
"grad_norm": 3.608349410822219,
"learning_rate": 2.237510857072013e-06,
"loss": 0.6932,
"step": 6190
},
{
"epoch": 0.826969888959285,
"grad_norm": 3.7496715401806493,
"learning_rate": 2.204264161416265e-06,
"loss": 0.6597,
"step": 6200
},
{
"epoch": 0.8283037113608323,
"grad_norm": 2.624308267913715,
"learning_rate": 2.171246722685354e-06,
"loss": 0.6713,
"step": 6210
},
{
"epoch": 0.8296375337623796,
"grad_norm": 2.0830735108163716,
"learning_rate": 2.1384591324417e-06,
"loss": 0.7392,
"step": 6220
},
{
"epoch": 0.8309713561639268,
"grad_norm": 1.5631711662762702,
"learning_rate": 2.1059019781296073e-06,
"loss": 0.7079,
"step": 6230
},
{
"epoch": 0.832305178565474,
"grad_norm": 6.203593584710221,
"learning_rate": 2.0735758430647316e-06,
"loss": 0.6517,
"step": 6240
},
{
"epoch": 0.8336390009670213,
"grad_norm": 3.1074307751483166,
"learning_rate": 2.041481306423638e-06,
"loss": 0.6712,
"step": 6250
},
{
"epoch": 0.8349728233685685,
"grad_norm": 2.3524003168080894,
"learning_rate": 2.0096189432334194e-06,
"loss": 0.6836,
"step": 6260
},
{
"epoch": 0.8363066457701157,
"grad_norm": 2.1471924659517234,
"learning_rate": 1.977989324361394e-06,
"loss": 0.6734,
"step": 6270
},
{
"epoch": 0.8376404681716629,
"grad_norm": 3.046975158610175,
"learning_rate": 1.946593016504877e-06,
"loss": 0.6724,
"step": 6280
},
{
"epoch": 0.8389742905732102,
"grad_norm": 4.930848095353691,
"learning_rate": 1.915430582181031e-06,
"loss": 0.6612,
"step": 6290
},
{
"epoch": 0.8403081129747574,
"grad_norm": 2.44008313756544,
"learning_rate": 1.8845025797167792e-06,
"loss": 0.6969,
"step": 6300
},
{
"epoch": 0.8416419353763046,
"grad_norm": 1.185846331764837,
"learning_rate": 1.8538095632388135e-06,
"loss": 0.7329,
"step": 6310
},
{
"epoch": 0.8429757577778518,
"grad_norm": 2.9014925831725087,
"learning_rate": 1.8233520826636563e-06,
"loss": 0.6814,
"step": 6320
},
{
"epoch": 0.8443095801793992,
"grad_norm": 3.389270001922989,
"learning_rate": 1.7931306836878154e-06,
"loss": 0.6671,
"step": 6330
},
{
"epoch": 0.8456434025809464,
"grad_norm": 1.82574503597048,
"learning_rate": 1.763145907777997e-06,
"loss": 0.6834,
"step": 6340
},
{
"epoch": 0.8469772249824936,
"grad_norm": 1.7341163373637425,
"learning_rate": 1.7333982921614194e-06,
"loss": 0.7042,
"step": 6350
},
{
"epoch": 0.8483110473840408,
"grad_norm": 2.0845613547866892,
"learning_rate": 1.703888369816174e-06,
"loss": 0.7236,
"step": 6360
},
{
"epoch": 0.8496448697855881,
"grad_norm": 2.696705599135624,
"learning_rate": 1.6746166694616821e-06,
"loss": 0.6741,
"step": 6370
},
{
"epoch": 0.8509786921871353,
"grad_norm": 3.2442263156108355,
"learning_rate": 1.6455837155492198e-06,
"loss": 0.6609,
"step": 6380
},
{
"epoch": 0.8523125145886825,
"grad_norm": 15.25195750196025,
"learning_rate": 1.616790028252526e-06,
"loss": 0.6984,
"step": 6390
},
{
"epoch": 0.8536463369902297,
"grad_norm": 2.0032662899755107,
"learning_rate": 1.5882361234584758e-06,
"loss": 0.6474,
"step": 6400
},
{
"epoch": 0.854980159391777,
"grad_norm": 2.1080148338215783,
"learning_rate": 1.559922512757847e-06,
"loss": 0.7035,
"step": 6410
},
{
"epoch": 0.8563139817933242,
"grad_norm": 6.9703452161898936,
"learning_rate": 1.5318497034361435e-06,
"loss": 0.6846,
"step": 6420
},
{
"epoch": 0.8576478041948714,
"grad_norm": 1.8937799281408725,
"learning_rate": 1.5040181984645112e-06,
"loss": 0.6981,
"step": 6430
},
{
"epoch": 0.8589816265964186,
"grad_norm": 3.1181183383646456,
"learning_rate": 1.4764284964907287e-06,
"loss": 0.7374,
"step": 6440
},
{
"epoch": 0.860315448997966,
"grad_norm": 2.387192199998032,
"learning_rate": 1.449081091830271e-06,
"loss": 0.6532,
"step": 6450
},
{
"epoch": 0.8616492713995132,
"grad_norm": 1.2875389574693819,
"learning_rate": 1.421976474457456e-06,
"loss": 0.654,
"step": 6460
},
{
"epoch": 0.8629830938010604,
"grad_norm": 2.987345071390569,
"learning_rate": 1.3951151299966526e-06,
"loss": 0.657,
"step": 6470
},
{
"epoch": 0.8643169162026076,
"grad_norm": 7.924875387180782,
"learning_rate": 1.3684975397135996e-06,
"loss": 0.7055,
"step": 6480
},
{
"epoch": 0.8656507386041549,
"grad_norm": 3.595776724489491,
"learning_rate": 1.3421241805067714e-06,
"loss": 0.6506,
"step": 6490
},
{
"epoch": 0.8669845610057021,
"grad_norm": 4.151439205775275,
"learning_rate": 1.3159955248988354e-06,
"loss": 0.6824,
"step": 6500
},
{
"epoch": 0.8683183834072493,
"grad_norm": 4.720298664536162,
"learning_rate": 1.2901120410281864e-06,
"loss": 0.6537,
"step": 6510
},
{
"epoch": 0.8696522058087965,
"grad_norm": 2.027933553114995,
"learning_rate": 1.2644741926405595e-06,
"loss": 0.6639,
"step": 6520
},
{
"epoch": 0.8709860282103438,
"grad_norm": 1.5286151248202084,
"learning_rate": 1.2390824390807204e-06,
"loss": 0.6841,
"step": 6530
},
{
"epoch": 0.872319850611891,
"grad_norm": 7.125525471902575,
"learning_rate": 1.2139372352842331e-06,
"loss": 0.6997,
"step": 6540
},
{
"epoch": 0.8736536730134382,
"grad_norm": 18.577442461442402,
"learning_rate": 1.189039031769319e-06,
"loss": 0.6865,
"step": 6550
},
{
"epoch": 0.8749874954149855,
"grad_norm": 1.4395699731820535,
"learning_rate": 1.1643882746287738e-06,
"loss": 0.6516,
"step": 6560
},
{
"epoch": 0.8763213178165328,
"grad_norm": 1.9996046629229984,
"learning_rate": 1.139985405521975e-06,
"loss": 0.6732,
"step": 6570
},
{
"epoch": 0.87765514021808,
"grad_norm": 2.569598252329582,
"learning_rate": 1.1158308616669777e-06,
"loss": 0.6844,
"step": 6580
},
{
"epoch": 0.8789889626196272,
"grad_norm": 2.631790460819988,
"learning_rate": 1.0919250758326777e-06,
"loss": 0.7076,
"step": 6590
},
{
"epoch": 0.8803227850211744,
"grad_norm": 3.1429104057356527,
"learning_rate": 1.0682684763310541e-06,
"loss": 0.7268,
"step": 6600
},
{
"epoch": 0.8816566074227217,
"grad_norm": 1.5952005703897327,
"learning_rate": 1.0448614870094975e-06,
"loss": 0.6776,
"step": 6610
},
{
"epoch": 0.8829904298242689,
"grad_norm": 2.510103599264486,
"learning_rate": 1.0217045272432174e-06,
"loss": 0.6659,
"step": 6620
},
{
"epoch": 0.8843242522258161,
"grad_norm": 3.0709555537325004,
"learning_rate": 9.98798011927725e-07,
"loss": 0.6826,
"step": 6630
},
{
"epoch": 0.8856580746273633,
"grad_norm": 2.9098067310557187,
"learning_rate": 9.76142351471408e-07,
"loss": 0.6804,
"step": 6640
},
{
"epoch": 0.8869918970289106,
"grad_norm": 2.0448969244687274,
"learning_rate": 9.537379517881633e-07,
"loss": 0.6805,
"step": 6650
},
{
"epoch": 0.8883257194304578,
"grad_norm": 3.7398026737645247,
"learning_rate": 9.315852142901366e-07,
"loss": 0.6524,
"step": 6660
},
{
"epoch": 0.889659541832005,
"grad_norm": 5.463023648161437,
"learning_rate": 9.096845358805278e-07,
"loss": 0.6834,
"step": 6670
},
{
"epoch": 0.8909933642335524,
"grad_norm": 3.384032982956332,
"learning_rate": 8.880363089464749e-07,
"loss": 0.6494,
"step": 6680
},
{
"epoch": 0.8923271866350996,
"grad_norm": 2.184269727099347,
"learning_rate": 8.666409213520305e-07,
"loss": 0.6632,
"step": 6690
},
{
"epoch": 0.8936610090366468,
"grad_norm": 2.0226144184429615,
"learning_rate": 8.454987564312039e-07,
"loss": 0.6678,
"step": 6700
},
{
"epoch": 0.894994831438194,
"grad_norm": 1.8831171446192312,
"learning_rate": 8.246101929811056e-07,
"loss": 0.7292,
"step": 6710
},
{
"epoch": 0.8963286538397413,
"grad_norm": 3.3161277339523854,
"learning_rate": 8.039756052551473e-07,
"loss": 0.6667,
"step": 6720
},
{
"epoch": 0.8976624762412885,
"grad_norm": 2.6296424685071758,
"learning_rate": 7.835953629563453e-07,
"loss": 0.6405,
"step": 6730
},
{
"epoch": 0.8989962986428357,
"grad_norm": 1.379168736705014,
"learning_rate": 7.634698312306915e-07,
"loss": 0.6955,
"step": 6740
},
{
"epoch": 0.9003301210443829,
"grad_norm": 3.1324270872259916,
"learning_rate": 7.435993706606197e-07,
"loss": 0.6788,
"step": 6750
},
{
"epoch": 0.9016639434459301,
"grad_norm": 2.296406704659335,
"learning_rate": 7.239843372585297e-07,
"loss": 0.7026,
"step": 6760
},
{
"epoch": 0.9029977658474774,
"grad_norm": 3.6436143643766012,
"learning_rate": 7.04625082460425e-07,
"loss": 0.6894,
"step": 6770
},
{
"epoch": 0.9043315882490246,
"grad_norm": 2.8228170417200955,
"learning_rate": 6.855219531196083e-07,
"loss": 0.6583,
"step": 6780
},
{
"epoch": 0.9056654106505718,
"grad_norm": 2.4528312750233985,
"learning_rate": 6.666752915004648e-07,
"loss": 0.637,
"step": 6790
},
{
"epoch": 0.9069992330521192,
"grad_norm": 2.553024077084152,
"learning_rate": 6.48085435272337e-07,
"loss": 0.6861,
"step": 6800
},
{
"epoch": 0.9083330554536664,
"grad_norm": 2.3466324438323385,
"learning_rate": 6.297527175034695e-07,
"loss": 0.6537,
"step": 6810
},
{
"epoch": 0.9096668778552136,
"grad_norm": 1.8344465329521413,
"learning_rate": 6.116774666550406e-07,
"loss": 0.6988,
"step": 6820
},
{
"epoch": 0.9110007002567608,
"grad_norm": 6.962306854472866,
"learning_rate": 5.938600065752836e-07,
"loss": 0.675,
"step": 6830
},
{
"epoch": 0.9123345226583081,
"grad_norm": 3.3732856352327585,
"learning_rate": 5.763006564936795e-07,
"loss": 0.6883,
"step": 6840
},
{
"epoch": 0.9136683450598553,
"grad_norm": 5.277637758684244,
"learning_rate": 5.589997310152389e-07,
"loss": 0.6773,
"step": 6850
},
{
"epoch": 0.9150021674614025,
"grad_norm": 2.85920456341858,
"learning_rate": 5.419575401148624e-07,
"loss": 0.6533,
"step": 6860
},
{
"epoch": 0.9163359898629497,
"grad_norm": 2.684183158725153,
"learning_rate": 5.251743891317923e-07,
"loss": 0.6474,
"step": 6870
},
{
"epoch": 0.917669812264497,
"grad_norm": 10.258889785991137,
"learning_rate": 5.086505787641399e-07,
"loss": 0.6894,
"step": 6880
},
{
"epoch": 0.9190036346660442,
"grad_norm": 2.0379895423718186,
"learning_rate": 4.923864050634952e-07,
"loss": 0.6868,
"step": 6890
},
{
"epoch": 0.9203374570675914,
"grad_norm": 6.293981602033667,
"learning_rate": 4.7638215942962584e-07,
"loss": 0.6876,
"step": 6900
},
{
"epoch": 0.9216712794691387,
"grad_norm": 9.224850669417195,
"learning_rate": 4.6063812860525635e-07,
"loss": 0.6366,
"step": 6910
},
{
"epoch": 0.923005101870686,
"grad_norm": 14.198601903726571,
"learning_rate": 4.45154594670924e-07,
"loss": 0.6831,
"step": 6920
},
{
"epoch": 0.9243389242722332,
"grad_norm": 3.5371506265712958,
"learning_rate": 4.299318350399395e-07,
"loss": 0.7072,
"step": 6930
},
{
"epoch": 0.9256727466737804,
"grad_norm": 2.7780830268564087,
"learning_rate": 4.149701224533975e-07,
"loss": 0.6843,
"step": 6940
},
{
"epoch": 0.9270065690753276,
"grad_norm": 1.4798946827712864,
"learning_rate": 4.0026972497530733e-07,
"loss": 0.7019,
"step": 6950
},
{
"epoch": 0.9283403914768749,
"grad_norm": 2.1229061581916513,
"learning_rate": 3.8583090598778013e-07,
"loss": 0.7288,
"step": 6960
},
{
"epoch": 0.9296742138784221,
"grad_norm": 12.695036054682456,
"learning_rate": 3.716539241863126e-07,
"loss": 0.6986,
"step": 6970
},
{
"epoch": 0.9310080362799693,
"grad_norm": 3.641243681504398,
"learning_rate": 3.577390335751507e-07,
"loss": 0.6737,
"step": 6980
},
{
"epoch": 0.9323418586815165,
"grad_norm": 16.49320427792961,
"learning_rate": 3.440864834627433e-07,
"loss": 0.642,
"step": 6990
},
{
"epoch": 0.9336756810830638,
"grad_norm": 2.552135441047015,
"learning_rate": 3.3069651845726913e-07,
"loss": 0.6881,
"step": 7000
},
{
"epoch": 0.935009503484611,
"grad_norm": 1.6630467999927316,
"learning_rate": 3.175693784622602e-07,
"loss": 0.6744,
"step": 7010
},
{
"epoch": 0.9363433258861582,
"grad_norm": 2.2762042117763723,
"learning_rate": 3.04705298672297e-07,
"loss": 0.6765,
"step": 7020
},
{
"epoch": 0.9376771482877055,
"grad_norm": 11.182715389122675,
"learning_rate": 2.9210450956880187e-07,
"loss": 0.6638,
"step": 7030
},
{
"epoch": 0.9390109706892528,
"grad_norm": 4.966179908778229,
"learning_rate": 2.7976723691590213e-07,
"loss": 0.6656,
"step": 7040
},
{
"epoch": 0.9403447930908,
"grad_norm": 4.753799725657891,
"learning_rate": 2.6769370175639186e-07,
"loss": 0.6486,
"step": 7050
},
{
"epoch": 0.9416786154923472,
"grad_norm": 2.3988972077316983,
"learning_rate": 2.5588412040776664e-07,
"loss": 0.7121,
"step": 7060
},
{
"epoch": 0.9430124378938944,
"grad_norm": 5.006017497136884,
"learning_rate": 2.443387044583534e-07,
"loss": 0.6992,
"step": 7070
},
{
"epoch": 0.9443462602954417,
"grad_norm": 16.572438608591884,
"learning_rate": 2.3305766076350987e-07,
"loss": 0.677,
"step": 7080
},
{
"epoch": 0.9456800826969889,
"grad_norm": 2.4983550381963537,
"learning_rate": 2.2204119144192957e-07,
"loss": 0.6566,
"step": 7090
},
{
"epoch": 0.9470139050985361,
"grad_norm": 2.7718761078532346,
"learning_rate": 2.1128949387201446e-07,
"loss": 0.6809,
"step": 7100
},
{
"epoch": 0.9483477275000833,
"grad_norm": 3.9021272188737504,
"learning_rate": 2.0080276068833447e-07,
"loss": 0.6956,
"step": 7110
},
{
"epoch": 0.9496815499016306,
"grad_norm": 3.56617105968243,
"learning_rate": 1.9058117977819034e-07,
"loss": 0.6761,
"step": 7120
},
{
"epoch": 0.9510153723031778,
"grad_norm": 2.3429918207277356,
"learning_rate": 1.8062493427822956e-07,
"loss": 0.7087,
"step": 7130
},
{
"epoch": 0.952349194704725,
"grad_norm": 2.087359454511869,
"learning_rate": 1.7093420257117743e-07,
"loss": 0.6937,
"step": 7140
},
{
"epoch": 0.9536830171062723,
"grad_norm": 3.3973528654843532,
"learning_rate": 1.6150915828263778e-07,
"loss": 0.7396,
"step": 7150
},
{
"epoch": 0.9550168395078196,
"grad_norm": 2.222236902135469,
"learning_rate": 1.523499702779807e-07,
"loss": 0.7013,
"step": 7160
},
{
"epoch": 0.9563506619093668,
"grad_norm": 3.3597499904262143,
"learning_rate": 1.4345680265931804e-07,
"loss": 0.6707,
"step": 7170
},
{
"epoch": 0.957684484310914,
"grad_norm": 1.2880682145085935,
"learning_rate": 1.3482981476256262e-07,
"loss": 0.6382,
"step": 7180
},
{
"epoch": 0.9590183067124612,
"grad_norm": 8.043345801476454,
"learning_rate": 1.2646916115457707e-07,
"loss": 0.6297,
"step": 7190
},
{
"epoch": 0.9603521291140085,
"grad_norm": 14.064430206072494,
"learning_rate": 1.1837499163039611e-07,
"loss": 0.6803,
"step": 7200
},
{
"epoch": 0.9616859515155557,
"grad_norm": 11.524691404236846,
"learning_rate": 1.1054745121055532e-07,
"loss": 0.7249,
"step": 7210
},
{
"epoch": 0.9630197739171029,
"grad_norm": 3.1625459026880365,
"learning_rate": 1.0298668013847823e-07,
"loss": 0.6648,
"step": 7220
},
{
"epoch": 0.9643535963186501,
"grad_norm": 2.1862826638592514,
"learning_rate": 9.569281387797668e-08,
"loss": 0.6901,
"step": 7230
},
{
"epoch": 0.9656874187201974,
"grad_norm": 3.236055831182351,
"learning_rate": 8.866598311081442e-08,
"loss": 0.7004,
"step": 7240
},
{
"epoch": 0.9670212411217446,
"grad_norm": 2.5502255171741273,
"learning_rate": 8.19063137343723e-08,
"loss": 0.6908,
"step": 7250
},
{
"epoch": 0.9683550635232919,
"grad_norm": 8.986162780686543,
"learning_rate": 7.541392685939009e-08,
"loss": 0.6706,
"step": 7260
},
{
"epoch": 0.9696888859248392,
"grad_norm": 1.3186397452163288,
"learning_rate": 6.918893880779154e-08,
"loss": 0.683,
"step": 7270
},
{
"epoch": 0.9710227083263864,
"grad_norm": 1.1408343188381653,
"learning_rate": 6.32314611106094e-08,
"loss": 0.6922,
"step": 7280
},
{
"epoch": 0.9723565307279336,
"grad_norm": 13.94913834949526,
"learning_rate": 5.754160050598367e-08,
"loss": 0.6884,
"step": 7290
},
{
"epoch": 0.9736903531294808,
"grad_norm": 5.205006119049316,
"learning_rate": 5.2119458937243126e-08,
"loss": 0.6682,
"step": 7300
},
{
"epoch": 0.975024175531028,
"grad_norm": 2.4232810584214635,
"learning_rate": 4.6965133551088447e-08,
"loss": 0.6621,
"step": 7310
},
{
"epoch": 0.9763579979325753,
"grad_norm": 2.245863821451424,
"learning_rate": 4.2078716695846954e-08,
"loss": 0.6355,
"step": 7320
},
{
"epoch": 0.9776918203341225,
"grad_norm": 1.6353933571614216,
"learning_rate": 3.746029591981559e-08,
"loss": 0.6886,
"step": 7330
},
{
"epoch": 0.9790256427356697,
"grad_norm": 7.457755835425968,
"learning_rate": 3.310995396969718e-08,
"loss": 0.64,
"step": 7340
},
{
"epoch": 0.980359465137217,
"grad_norm": 2.468156960638572,
"learning_rate": 2.902776878911495e-08,
"loss": 0.6677,
"step": 7350
},
{
"epoch": 0.9816932875387642,
"grad_norm": 8.250956624373028,
"learning_rate": 2.521381351721863e-08,
"loss": 0.7072,
"step": 7360
},
{
"epoch": 0.9830271099403114,
"grad_norm": 31.61426660587501,
"learning_rate": 2.166815648736886e-08,
"loss": 0.6619,
"step": 7370
},
{
"epoch": 0.9843609323418587,
"grad_norm": 4.256901242795981,
"learning_rate": 1.8390861225919818e-08,
"loss": 0.699,
"step": 7380
},
{
"epoch": 0.985694754743406,
"grad_norm": 3.308350753418023,
"learning_rate": 1.5381986451075138e-08,
"loss": 0.6577,
"step": 7390
},
{
"epoch": 0.9870285771449532,
"grad_norm": 2.2240538267647487,
"learning_rate": 1.2641586071840405e-08,
"loss": 0.6574,
"step": 7400
},
{
"epoch": 0.9883623995465004,
"grad_norm": 2.2372630201611554,
"learning_rate": 1.016970918705229e-08,
"loss": 0.7071,
"step": 7410
},
{
"epoch": 0.9896962219480476,
"grad_norm": 5.201937886584086,
"learning_rate": 7.966400084502556e-09,
"loss": 0.6908,
"step": 7420
},
{
"epoch": 0.9910300443495949,
"grad_norm": 3.9234547871794083,
"learning_rate": 6.0316982401470435e-09,
"loss": 0.66,
"step": 7430
},
{
"epoch": 0.9923638667511421,
"grad_norm": 3.2854459546383668,
"learning_rate": 4.36563831739123e-09,
"loss": 0.6491,
"step": 7440
},
{
"epoch": 0.9936976891526893,
"grad_norm": 3.2663326683050244,
"learning_rate": 2.968250166472397e-09,
"loss": 0.6942,
"step": 7450
},
{
"epoch": 0.9950315115542365,
"grad_norm": 2.575991123154551,
"learning_rate": 1.8395588239300543e-09,
"loss": 0.7148,
"step": 7460
},
{
"epoch": 0.9963653339557837,
"grad_norm": 4.941844911668257,
"learning_rate": 9.795845121496338e-10,
"loss": 0.6914,
"step": 7470
},
{
"epoch": 0.997699156357331,
"grad_norm": 4.809970537757657,
"learning_rate": 3.8834263900111577e-10,
"loss": 0.6506,
"step": 7480
},
{
"epoch": 0.9990329787588783,
"grad_norm": 4.013342776577845,
"learning_rate": 6.584379757090808e-11,
"loss": 0.6896,
"step": 7490
},
{
"epoch": 0.9999666544399614,
"step": 7497,
"total_flos": 3.4155964525109576e+19,
"train_loss": 0.6886205464597541,
"train_runtime": 97305.2479,
"train_samples_per_second": 4.931,
"train_steps_per_second": 0.077
}
],
"logging_steps": 10,
"max_steps": 7497,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 400,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 3.4155964525109576e+19,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}