125k_refsom / checkpoint-1600 /trainer_state.json
cjfcsjt's picture
Upload folder using huggingface_hub
1c2cd29 verified
raw
history blame contribute delete
No virus
28.6 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.21341158424755743,
"eval_steps": 500,
"global_step": 1600,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.001333822401547234,
"grad_norm": 72.53171450734499,
"learning_rate": 4e-06,
"loss": 1.0439,
"step": 10
},
{
"epoch": 0.002667644803094468,
"grad_norm": 7.72170877312484,
"learning_rate": 8e-06,
"loss": 1.0385,
"step": 20
},
{
"epoch": 0.004001467204641702,
"grad_norm": 28.037294890647996,
"learning_rate": 1.2e-05,
"loss": 1.0328,
"step": 30
},
{
"epoch": 0.005335289606188936,
"grad_norm": 10.980963099753343,
"learning_rate": 1.6e-05,
"loss": 0.9608,
"step": 40
},
{
"epoch": 0.00666911200773617,
"grad_norm": 9.853684997615185,
"learning_rate": 1.9999999999999998e-05,
"loss": 0.8941,
"step": 50
},
{
"epoch": 0.008002934409283404,
"grad_norm": 33.396994669126855,
"learning_rate": 2.4e-05,
"loss": 0.9174,
"step": 60
},
{
"epoch": 0.009336756810830639,
"grad_norm": 5.621190624979587,
"learning_rate": 2.8e-05,
"loss": 0.8146,
"step": 70
},
{
"epoch": 0.010670579212377872,
"grad_norm": 2.4646688601781843,
"learning_rate": 2.9999966406213696e-05,
"loss": 0.7774,
"step": 80
},
{
"epoch": 0.012004401613925107,
"grad_norm": 2.1272107013027215,
"learning_rate": 2.9999697656826056e-05,
"loss": 0.7426,
"step": 90
},
{
"epoch": 0.01333822401547234,
"grad_norm": 2.0001927521328575,
"learning_rate": 2.9999160162865885e-05,
"loss": 0.7251,
"step": 100
},
{
"epoch": 0.014672046417019574,
"grad_norm": 4.278494734910313,
"learning_rate": 2.9998353933963273e-05,
"loss": 0.7283,
"step": 110
},
{
"epoch": 0.016005868818566808,
"grad_norm": 84.74138622339441,
"learning_rate": 2.999727898456315e-05,
"loss": 0.721,
"step": 120
},
{
"epoch": 0.017339691220114042,
"grad_norm": 2.5596285292704533,
"learning_rate": 2.999593533392503e-05,
"loss": 0.7476,
"step": 130
},
{
"epoch": 0.018673513621661277,
"grad_norm": 3.167163073798878,
"learning_rate": 2.9994323006122654e-05,
"loss": 0.7701,
"step": 140
},
{
"epoch": 0.02000733602320851,
"grad_norm": 12.240132620099228,
"learning_rate": 2.9992442030043557e-05,
"loss": 0.7042,
"step": 150
},
{
"epoch": 0.021341158424755743,
"grad_norm": 2.266079140233148,
"learning_rate": 2.9990292439388565e-05,
"loss": 0.6945,
"step": 160
},
{
"epoch": 0.022674980826302978,
"grad_norm": 4.493189214061043,
"learning_rate": 2.9987874272671168e-05,
"loss": 0.7811,
"step": 170
},
{
"epoch": 0.024008803227850213,
"grad_norm": 3.8513915234854132,
"learning_rate": 2.9985187573216855e-05,
"loss": 0.7229,
"step": 180
},
{
"epoch": 0.025342625629397444,
"grad_norm": 2.1061250870336896,
"learning_rate": 2.998223238916232e-05,
"loss": 0.7253,
"step": 190
},
{
"epoch": 0.02667644803094468,
"grad_norm": 2.8326879963181373,
"learning_rate": 2.9979008773454618e-05,
"loss": 0.7576,
"step": 200
},
{
"epoch": 0.028010270432491914,
"grad_norm": 3.0294324007099402,
"learning_rate": 2.997551678385019e-05,
"loss": 0.7362,
"step": 210
},
{
"epoch": 0.02934409283403915,
"grad_norm": 1.5691302199679913,
"learning_rate": 2.997175648291384e-05,
"loss": 0.7212,
"step": 220
},
{
"epoch": 0.03067791523558638,
"grad_norm": 4.379709379404857,
"learning_rate": 2.996772793801763e-05,
"loss": 0.7382,
"step": 230
},
{
"epoch": 0.032011737637133615,
"grad_norm": 1.899348727085658,
"learning_rate": 2.996343122133965e-05,
"loss": 0.7056,
"step": 240
},
{
"epoch": 0.033345560038680847,
"grad_norm": 21.481584335801955,
"learning_rate": 2.9958866409862745e-05,
"loss": 0.697,
"step": 250
},
{
"epoch": 0.034679382440228085,
"grad_norm": 3.022034785435926,
"learning_rate": 2.9954033585373108e-05,
"loss": 0.69,
"step": 260
},
{
"epoch": 0.036013204841775316,
"grad_norm": 2.869274575136443,
"learning_rate": 2.994893283445885e-05,
"loss": 0.7026,
"step": 270
},
{
"epoch": 0.037347027243322554,
"grad_norm": 7.8785627268474,
"learning_rate": 2.9943564248508415e-05,
"loss": 0.7021,
"step": 280
},
{
"epoch": 0.038680849644869786,
"grad_norm": 4.450751706186503,
"learning_rate": 2.9937927923708966e-05,
"loss": 0.6814,
"step": 290
},
{
"epoch": 0.04001467204641702,
"grad_norm": 11.613541400971123,
"learning_rate": 2.993202396104465e-05,
"loss": 0.7043,
"step": 300
},
{
"epoch": 0.041348494447964255,
"grad_norm": 2.0602161594051536,
"learning_rate": 2.9925852466294795e-05,
"loss": 0.7073,
"step": 310
},
{
"epoch": 0.04268231684951149,
"grad_norm": 19.383133688390824,
"learning_rate": 2.9919413550032014e-05,
"loss": 0.6965,
"step": 320
},
{
"epoch": 0.04401613925105872,
"grad_norm": 7.9603948136759906,
"learning_rate": 2.991270732762022e-05,
"loss": 0.7029,
"step": 330
},
{
"epoch": 0.045349961652605957,
"grad_norm": 2.810319267000959,
"learning_rate": 2.990573391921255e-05,
"loss": 0.7412,
"step": 340
},
{
"epoch": 0.04668378405415319,
"grad_norm": 6.082487526538632,
"learning_rate": 2.989849344974924e-05,
"loss": 0.6913,
"step": 350
},
{
"epoch": 0.048017606455700426,
"grad_norm": 1.3651265332328906,
"learning_rate": 2.9890986048955368e-05,
"loss": 0.6802,
"step": 360
},
{
"epoch": 0.04935142885724766,
"grad_norm": 2.7766095141894023,
"learning_rate": 2.9883211851338516e-05,
"loss": 0.7254,
"step": 370
},
{
"epoch": 0.05068525125879489,
"grad_norm": 2.2284551774435033,
"learning_rate": 2.9875170996186392e-05,
"loss": 0.7026,
"step": 380
},
{
"epoch": 0.05201907366034213,
"grad_norm": 2.1609680967998957,
"learning_rate": 2.986686362756431e-05,
"loss": 0.7415,
"step": 390
},
{
"epoch": 0.05335289606188936,
"grad_norm": 2.071743222471935,
"learning_rate": 2.9858289894312617e-05,
"loss": 0.7081,
"step": 400
},
{
"epoch": 0.0546867184634366,
"grad_norm": 7.349693198515479,
"learning_rate": 2.9849449950044036e-05,
"loss": 0.7212,
"step": 410
},
{
"epoch": 0.05602054086498383,
"grad_norm": 12.990346405367537,
"learning_rate": 2.984034395314088e-05,
"loss": 0.7187,
"step": 420
},
{
"epoch": 0.05735436326653106,
"grad_norm": 2.4106321189642723,
"learning_rate": 2.983097206675227e-05,
"loss": 0.6881,
"step": 430
},
{
"epoch": 0.0586881856680783,
"grad_norm": 3.1647778114563163,
"learning_rate": 2.9821334458791156e-05,
"loss": 0.7529,
"step": 440
},
{
"epoch": 0.06002200806962553,
"grad_norm": 2.7769936556882713,
"learning_rate": 2.9811431301931344e-05,
"loss": 0.7073,
"step": 450
},
{
"epoch": 0.06135583047117276,
"grad_norm": 2.834916196253906,
"learning_rate": 2.9801262773604377e-05,
"loss": 0.6649,
"step": 460
},
{
"epoch": 0.06268965287271999,
"grad_norm": 2.431783869557187,
"learning_rate": 2.9790829055996398e-05,
"loss": 0.6889,
"step": 470
},
{
"epoch": 0.06402347527426723,
"grad_norm": 3.7325728788625825,
"learning_rate": 2.978013033604483e-05,
"loss": 0.6771,
"step": 480
},
{
"epoch": 0.06535729767581447,
"grad_norm": 10.194734403509758,
"learning_rate": 2.976916680543506e-05,
"loss": 0.6917,
"step": 490
},
{
"epoch": 0.06669112007736169,
"grad_norm": 6.4699962355353655,
"learning_rate": 2.975793866059701e-05,
"loss": 0.6656,
"step": 500
},
{
"epoch": 0.06802494247890893,
"grad_norm": 7.664735661456941,
"learning_rate": 2.9746446102701606e-05,
"loss": 0.7268,
"step": 510
},
{
"epoch": 0.06935876488045617,
"grad_norm": 2.926294421836929,
"learning_rate": 2.9734689337657157e-05,
"loss": 0.7498,
"step": 520
},
{
"epoch": 0.07069258728200341,
"grad_norm": 8.95556138792813,
"learning_rate": 2.9722668576105703e-05,
"loss": 0.7237,
"step": 530
},
{
"epoch": 0.07202640968355063,
"grad_norm": 2.725769364825004,
"learning_rate": 2.971038403341921e-05,
"loss": 0.701,
"step": 540
},
{
"epoch": 0.07336023208509787,
"grad_norm": 2.458130413236654,
"learning_rate": 2.9697835929695727e-05,
"loss": 0.6775,
"step": 550
},
{
"epoch": 0.07469405448664511,
"grad_norm": 1.908510150241819,
"learning_rate": 2.968502448975544e-05,
"loss": 0.7042,
"step": 560
},
{
"epoch": 0.07602787688819233,
"grad_norm": 26.340984665432142,
"learning_rate": 2.967194994313663e-05,
"loss": 0.7234,
"step": 570
},
{
"epoch": 0.07736169928973957,
"grad_norm": 5.677162249188698,
"learning_rate": 2.9658612524091594e-05,
"loss": 0.7159,
"step": 580
},
{
"epoch": 0.07869552169128681,
"grad_norm": 1.7975075976512287,
"learning_rate": 2.9645012471582406e-05,
"loss": 0.7268,
"step": 590
},
{
"epoch": 0.08002934409283403,
"grad_norm": 3.031655038731278,
"learning_rate": 2.9631150029276662e-05,
"loss": 0.6904,
"step": 600
},
{
"epoch": 0.08136316649438127,
"grad_norm": 13.019769162242698,
"learning_rate": 2.9617025445543114e-05,
"loss": 0.683,
"step": 610
},
{
"epoch": 0.08269698889592851,
"grad_norm": 5.982097939523946,
"learning_rate": 2.9602638973447218e-05,
"loss": 0.7511,
"step": 620
},
{
"epoch": 0.08403081129747574,
"grad_norm": 2.7343884277014054,
"learning_rate": 2.9587990870746574e-05,
"loss": 0.7114,
"step": 630
},
{
"epoch": 0.08536463369902297,
"grad_norm": 5.586409371459215,
"learning_rate": 2.9573081399886356e-05,
"loss": 0.6907,
"step": 640
},
{
"epoch": 0.08669845610057021,
"grad_norm": 9.87255558712134,
"learning_rate": 2.9557910827994568e-05,
"loss": 0.7319,
"step": 650
},
{
"epoch": 0.08803227850211744,
"grad_norm": 3.9867937712251664,
"learning_rate": 2.9542479426877283e-05,
"loss": 0.6759,
"step": 660
},
{
"epoch": 0.08936610090366467,
"grad_norm": 3.111751395898768,
"learning_rate": 2.9526787473013753e-05,
"loss": 0.7086,
"step": 670
},
{
"epoch": 0.09069992330521191,
"grad_norm": 3.0578210081080535,
"learning_rate": 2.9510835247551485e-05,
"loss": 0.7209,
"step": 680
},
{
"epoch": 0.09203374570675915,
"grad_norm": 16.575220829800145,
"learning_rate": 2.949462303630116e-05,
"loss": 0.7125,
"step": 690
},
{
"epoch": 0.09336756810830638,
"grad_norm": 1.8236956939037314,
"learning_rate": 2.9478151129731567e-05,
"loss": 0.6591,
"step": 700
},
{
"epoch": 0.09470139050985361,
"grad_norm": 1.9746052093914703,
"learning_rate": 2.9461419822964348e-05,
"loss": 0.7048,
"step": 710
},
{
"epoch": 0.09603521291140085,
"grad_norm": 5.9710918845264,
"learning_rate": 2.9444429415768726e-05,
"loss": 0.725,
"step": 720
},
{
"epoch": 0.09736903531294808,
"grad_norm": 9.961068856364284,
"learning_rate": 2.942718021255617e-05,
"loss": 0.7155,
"step": 730
},
{
"epoch": 0.09870285771449532,
"grad_norm": 9.195914431100427,
"learning_rate": 2.940967252237488e-05,
"loss": 0.7171,
"step": 740
},
{
"epoch": 0.10003668011604255,
"grad_norm": 5.972259927477824,
"learning_rate": 2.9391906658904296e-05,
"loss": 0.7088,
"step": 750
},
{
"epoch": 0.10137050251758978,
"grad_norm": 3.0581670197906226,
"learning_rate": 2.937388294044946e-05,
"loss": 0.6645,
"step": 760
},
{
"epoch": 0.10270432491913702,
"grad_norm": 3.2614276687575945,
"learning_rate": 2.9355601689935315e-05,
"loss": 0.6997,
"step": 770
},
{
"epoch": 0.10403814732068425,
"grad_norm": 3.212661762840432,
"learning_rate": 2.933706323490092e-05,
"loss": 0.7005,
"step": 780
},
{
"epoch": 0.10537196972223148,
"grad_norm": 10.732038885056795,
"learning_rate": 2.9318267907493583e-05,
"loss": 0.6588,
"step": 790
},
{
"epoch": 0.10670579212377872,
"grad_norm": 2.495380329507228,
"learning_rate": 2.9299216044462903e-05,
"loss": 0.6965,
"step": 800
},
{
"epoch": 0.10803961452532596,
"grad_norm": 4.60295956813534,
"learning_rate": 2.927990798715475e-05,
"loss": 0.6959,
"step": 810
},
{
"epoch": 0.1093734369268732,
"grad_norm": 5.95010483593304,
"learning_rate": 2.926034408150513e-05,
"loss": 0.7083,
"step": 820
},
{
"epoch": 0.11070725932842042,
"grad_norm": 10.48663715000287,
"learning_rate": 2.9240524678034016e-05,
"loss": 0.6977,
"step": 830
},
{
"epoch": 0.11204108172996766,
"grad_norm": 3.159424497856185,
"learning_rate": 2.9220450131839037e-05,
"loss": 0.7129,
"step": 840
},
{
"epoch": 0.1133749041315149,
"grad_norm": 3.984530217756339,
"learning_rate": 2.920012080258912e-05,
"loss": 0.6618,
"step": 850
},
{
"epoch": 0.11470872653306212,
"grad_norm": 4.212436954015185,
"learning_rate": 2.9179537054518085e-05,
"loss": 0.6958,
"step": 860
},
{
"epoch": 0.11604254893460936,
"grad_norm": 2.5397356754848657,
"learning_rate": 2.9158699256418056e-05,
"loss": 0.6593,
"step": 870
},
{
"epoch": 0.1173763713361566,
"grad_norm": 7.8192219616904115,
"learning_rate": 2.9137607781632913e-05,
"loss": 0.6932,
"step": 880
},
{
"epoch": 0.11871019373770382,
"grad_norm": 3.890974772100691,
"learning_rate": 2.911626300805155e-05,
"loss": 0.674,
"step": 890
},
{
"epoch": 0.12004401613925106,
"grad_norm": 2.241784461612369,
"learning_rate": 2.9094665318101155e-05,
"loss": 0.6808,
"step": 900
},
{
"epoch": 0.1213778385407983,
"grad_norm": 2.8295407624221456,
"learning_rate": 2.9072815098740326e-05,
"loss": 0.7074,
"step": 910
},
{
"epoch": 0.12271166094234552,
"grad_norm": 2.8191065019379455,
"learning_rate": 2.9050712741452136e-05,
"loss": 0.7218,
"step": 920
},
{
"epoch": 0.12404548334389276,
"grad_norm": 2.1348105213857362,
"learning_rate": 2.902835864223715e-05,
"loss": 0.6901,
"step": 930
},
{
"epoch": 0.12537930574543998,
"grad_norm": 2.547712785034466,
"learning_rate": 2.9005753201606287e-05,
"loss": 0.7024,
"step": 940
},
{
"epoch": 0.12671312814698724,
"grad_norm": 6.0714243848770195,
"learning_rate": 2.8982896824573678e-05,
"loss": 0.665,
"step": 950
},
{
"epoch": 0.12804695054853446,
"grad_norm": 2.928297684665459,
"learning_rate": 2.8959789920649394e-05,
"loss": 0.7178,
"step": 960
},
{
"epoch": 0.12938077295008168,
"grad_norm": 9.456759373673547,
"learning_rate": 2.893643290383212e-05,
"loss": 0.6759,
"step": 970
},
{
"epoch": 0.13071459535162894,
"grad_norm": 3.9700721280634648,
"learning_rate": 2.891282619260172e-05,
"loss": 0.6701,
"step": 980
},
{
"epoch": 0.13204841775317616,
"grad_norm": 1.9216628539066036,
"learning_rate": 2.8888970209911754e-05,
"loss": 0.6725,
"step": 990
},
{
"epoch": 0.13338224015472339,
"grad_norm": 6.51104150647371,
"learning_rate": 2.8864865383181893e-05,
"loss": 0.6811,
"step": 1000
},
{
"epoch": 0.13471606255627064,
"grad_norm": 7.1765693144596705,
"learning_rate": 2.8840512144290273e-05,
"loss": 0.7085,
"step": 1010
},
{
"epoch": 0.13604988495781786,
"grad_norm": 4.136786758396937,
"learning_rate": 2.8815910929565734e-05,
"loss": 0.7128,
"step": 1020
},
{
"epoch": 0.1373837073593651,
"grad_norm": 3.407893493053672,
"learning_rate": 2.879106217978002e-05,
"loss": 0.6782,
"step": 1030
},
{
"epoch": 0.13871752976091234,
"grad_norm": 2.5759617833329544,
"learning_rate": 2.8765966340139892e-05,
"loss": 0.7387,
"step": 1040
},
{
"epoch": 0.14005135216245956,
"grad_norm": 8.496713788438253,
"learning_rate": 2.8740623860279116e-05,
"loss": 0.6988,
"step": 1050
},
{
"epoch": 0.14138517456400682,
"grad_norm": 1.438197837288145,
"learning_rate": 2.871503519425044e-05,
"loss": 0.6797,
"step": 1060
},
{
"epoch": 0.14271899696555404,
"grad_norm": 3.305947931404651,
"learning_rate": 2.8689200800517448e-05,
"loss": 0.727,
"step": 1070
},
{
"epoch": 0.14405281936710126,
"grad_norm": 4.384984204254061,
"learning_rate": 2.866312114194634e-05,
"loss": 0.7073,
"step": 1080
},
{
"epoch": 0.14538664176864852,
"grad_norm": 3.0424755710445455,
"learning_rate": 2.8636796685797657e-05,
"loss": 0.6712,
"step": 1090
},
{
"epoch": 0.14672046417019574,
"grad_norm": 3.7562374375017415,
"learning_rate": 2.8610227903717876e-05,
"loss": 0.6811,
"step": 1100
},
{
"epoch": 0.14805428657174297,
"grad_norm": 18.148320631618972,
"learning_rate": 2.8583415271730994e-05,
"loss": 0.669,
"step": 1110
},
{
"epoch": 0.14938810897329022,
"grad_norm": 10.095266883651389,
"learning_rate": 2.855635927022998e-05,
"loss": 0.7034,
"step": 1120
},
{
"epoch": 0.15072193137483744,
"grad_norm": 6.298167661594497,
"learning_rate": 2.8529060383968175e-05,
"loss": 0.6736,
"step": 1130
},
{
"epoch": 0.15205575377638467,
"grad_norm": 1.7112633381805116,
"learning_rate": 2.850151910205061e-05,
"loss": 0.7072,
"step": 1140
},
{
"epoch": 0.15338957617793192,
"grad_norm": 4.478017946548926,
"learning_rate": 2.847373591792523e-05,
"loss": 0.679,
"step": 1150
},
{
"epoch": 0.15472339857947914,
"grad_norm": 6.208223721690807,
"learning_rate": 2.844571132937407e-05,
"loss": 0.688,
"step": 1160
},
{
"epoch": 0.15605722098102637,
"grad_norm": 3.5734211349612974,
"learning_rate": 2.841744583850431e-05,
"loss": 0.7054,
"step": 1170
},
{
"epoch": 0.15739104338257362,
"grad_norm": 2.9418941665585874,
"learning_rate": 2.838893995173932e-05,
"loss": 0.7051,
"step": 1180
},
{
"epoch": 0.15872486578412084,
"grad_norm": 2.8796543096657055,
"learning_rate": 2.836019417980955e-05,
"loss": 0.6856,
"step": 1190
},
{
"epoch": 0.16005868818566807,
"grad_norm": 1.3218998593276705,
"learning_rate": 2.8331209037743387e-05,
"loss": 0.69,
"step": 1200
},
{
"epoch": 0.16139251058721532,
"grad_norm": 1.566163155963456,
"learning_rate": 2.8301985044857947e-05,
"loss": 0.6384,
"step": 1210
},
{
"epoch": 0.16272633298876255,
"grad_norm": 6.731577281026319,
"learning_rate": 2.8272522724749743e-05,
"loss": 0.7241,
"step": 1220
},
{
"epoch": 0.16406015539030977,
"grad_norm": 3.3521245738531813,
"learning_rate": 2.8242822605285323e-05,
"loss": 0.6931,
"step": 1230
},
{
"epoch": 0.16539397779185702,
"grad_norm": 11.826633616347152,
"learning_rate": 2.8212885218591812e-05,
"loss": 0.702,
"step": 1240
},
{
"epoch": 0.16672780019340425,
"grad_norm": 1.826627133990473,
"learning_rate": 2.8182711101047362e-05,
"loss": 0.7061,
"step": 1250
},
{
"epoch": 0.16806162259495147,
"grad_norm": 2.573810458184169,
"learning_rate": 2.815230079327156e-05,
"loss": 0.7369,
"step": 1260
},
{
"epoch": 0.16939544499649872,
"grad_norm": 3.2839708945300665,
"learning_rate": 2.8121654840115734e-05,
"loss": 0.6966,
"step": 1270
},
{
"epoch": 0.17072926739804595,
"grad_norm": 2.3799212816187136,
"learning_rate": 2.809077379065319e-05,
"loss": 0.6586,
"step": 1280
},
{
"epoch": 0.17206308979959317,
"grad_norm": 8.449506341181594,
"learning_rate": 2.805965819816937e-05,
"loss": 0.6759,
"step": 1290
},
{
"epoch": 0.17339691220114042,
"grad_norm": 19.59186920922371,
"learning_rate": 2.802830862015196e-05,
"loss": 0.6537,
"step": 1300
},
{
"epoch": 0.17473073460268765,
"grad_norm": 1.9822479021376653,
"learning_rate": 2.799672561828087e-05,
"loss": 0.6854,
"step": 1310
},
{
"epoch": 0.17606455700423487,
"grad_norm": 34.5695029919346,
"learning_rate": 2.79649097584182e-05,
"loss": 0.696,
"step": 1320
},
{
"epoch": 0.17739837940578213,
"grad_norm": 1.886275024045834,
"learning_rate": 2.7932861610598077e-05,
"loss": 0.7308,
"step": 1330
},
{
"epoch": 0.17873220180732935,
"grad_norm": 12.128310547971918,
"learning_rate": 2.7900581749016466e-05,
"loss": 0.6775,
"step": 1340
},
{
"epoch": 0.1800660242088766,
"grad_norm": 5.164562576250181,
"learning_rate": 2.7868070752020865e-05,
"loss": 0.7096,
"step": 1350
},
{
"epoch": 0.18139984661042383,
"grad_norm": 10.603161437547913,
"learning_rate": 2.7835329202099944e-05,
"loss": 0.7128,
"step": 1360
},
{
"epoch": 0.18273366901197105,
"grad_norm": 4.184026615605208,
"learning_rate": 2.7802357685873117e-05,
"loss": 0.6958,
"step": 1370
},
{
"epoch": 0.1840674914135183,
"grad_norm": 4.135408851494439,
"learning_rate": 2.7769156794080033e-05,
"loss": 0.6933,
"step": 1380
},
{
"epoch": 0.18540131381506553,
"grad_norm": 1.7535535971850658,
"learning_rate": 2.7735727121569967e-05,
"loss": 0.7088,
"step": 1390
},
{
"epoch": 0.18673513621661275,
"grad_norm": 5.354882317397714,
"learning_rate": 2.770206926729121e-05,
"loss": 0.6777,
"step": 1400
},
{
"epoch": 0.18806895861816,
"grad_norm": 3.2593978573810523,
"learning_rate": 2.7668183834280284e-05,
"loss": 0.6535,
"step": 1410
},
{
"epoch": 0.18940278101970723,
"grad_norm": 2.48929554913285,
"learning_rate": 2.763407142965117e-05,
"loss": 0.6853,
"step": 1420
},
{
"epoch": 0.19073660342125445,
"grad_norm": 7.550246976134938,
"learning_rate": 2.759973266458444e-05,
"loss": 0.679,
"step": 1430
},
{
"epoch": 0.1920704258228017,
"grad_norm": 5.033109901639584,
"learning_rate": 2.756516815431627e-05,
"loss": 0.6875,
"step": 1440
},
{
"epoch": 0.19340424822434893,
"grad_norm": 6.1146492204468545,
"learning_rate": 2.7530378518127445e-05,
"loss": 0.6762,
"step": 1450
},
{
"epoch": 0.19473807062589615,
"grad_norm": 72.48946246157955,
"learning_rate": 2.7495364379332256e-05,
"loss": 0.6766,
"step": 1460
},
{
"epoch": 0.1960718930274434,
"grad_norm": 2.9676182245937426,
"learning_rate": 2.7460126365267335e-05,
"loss": 0.6731,
"step": 1470
},
{
"epoch": 0.19740571542899063,
"grad_norm": 2.31300235137622,
"learning_rate": 2.7424665107280402e-05,
"loss": 0.6936,
"step": 1480
},
{
"epoch": 0.19873953783053785,
"grad_norm": 2.594823775876976,
"learning_rate": 2.738898124071898e-05,
"loss": 0.6703,
"step": 1490
},
{
"epoch": 0.2000733602320851,
"grad_norm": 2.8354629666044575,
"learning_rate": 2.735307540491898e-05,
"loss": 0.7022,
"step": 1500
},
{
"epoch": 0.20140718263363233,
"grad_norm": 4.553033073507926,
"learning_rate": 2.7316948243193273e-05,
"loss": 0.6948,
"step": 1510
},
{
"epoch": 0.20274100503517956,
"grad_norm": 10.052746707722667,
"learning_rate": 2.7280600402820146e-05,
"loss": 0.7216,
"step": 1520
},
{
"epoch": 0.2040748274367268,
"grad_norm": 2.3783106567256977,
"learning_rate": 2.724403253503171e-05,
"loss": 0.6955,
"step": 1530
},
{
"epoch": 0.20540864983827403,
"grad_norm": 2.26171864458522,
"learning_rate": 2.7207245295002242e-05,
"loss": 0.7062,
"step": 1540
},
{
"epoch": 0.20674247223982126,
"grad_norm": 3.223919319241339,
"learning_rate": 2.7170239341836436e-05,
"loss": 0.6659,
"step": 1550
},
{
"epoch": 0.2080762946413685,
"grad_norm": 1.6810934171118763,
"learning_rate": 2.7133015338557585e-05,
"loss": 0.7069,
"step": 1560
},
{
"epoch": 0.20941011704291573,
"grad_norm": 2.1432477935562515,
"learning_rate": 2.7095573952095727e-05,
"loss": 0.703,
"step": 1570
},
{
"epoch": 0.21074393944446296,
"grad_norm": 1.5654642597457342,
"learning_rate": 2.705791585327568e-05,
"loss": 0.7037,
"step": 1580
},
{
"epoch": 0.2120777618460102,
"grad_norm": 2.5797486712147992,
"learning_rate": 2.7020041716805014e-05,
"loss": 0.6945,
"step": 1590
},
{
"epoch": 0.21341158424755743,
"grad_norm": 6.993511462468879,
"learning_rate": 2.6981952221261986e-05,
"loss": 0.6965,
"step": 1600
}
],
"logging_steps": 10,
"max_steps": 7497,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 400,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 7.289521573986304e+18,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}