|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.19745502413339183, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0013163668275559457, |
|
"grad_norm": 1.1889708603566411, |
|
"learning_rate": 2e-05, |
|
"loss": 1.819, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0026327336551118913, |
|
"grad_norm": 1.2487402928615228, |
|
"learning_rate": 1.999777729859618e-05, |
|
"loss": 1.786, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.003949100482667837, |
|
"grad_norm": 11.818568585279303, |
|
"learning_rate": 1.9991110182465032e-05, |
|
"loss": 2.1123, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.005265467310223783, |
|
"grad_norm": 9.670771499171282, |
|
"learning_rate": 1.9980001615408228e-05, |
|
"loss": 2.1052, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006581834137779728, |
|
"grad_norm": 6.7296227669578945, |
|
"learning_rate": 1.9964456535631287e-05, |
|
"loss": 2.0417, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.007898200965335674, |
|
"grad_norm": 2.9490730461911254, |
|
"learning_rate": 1.9944481853548335e-05, |
|
"loss": 1.9756, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.009214567792891619, |
|
"grad_norm": 2.8814574350373383, |
|
"learning_rate": 1.9920086448710162e-05, |
|
"loss": 1.9305, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.010530934620447565, |
|
"grad_norm": 2.360259343454192, |
|
"learning_rate": 1.9891281165856876e-05, |
|
"loss": 1.9001, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01184730144800351, |
|
"grad_norm": 1.6478415946047946, |
|
"learning_rate": 1.9858078810097004e-05, |
|
"loss": 1.9285, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.013163668275559455, |
|
"grad_norm": 1.865513115308653, |
|
"learning_rate": 1.98204941412151e-05, |
|
"loss": 1.9158, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.014480035103115402, |
|
"grad_norm": 1.2039798356530171, |
|
"learning_rate": 1.9778543867110428e-05, |
|
"loss": 1.9177, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.015796401930671347, |
|
"grad_norm": 1.1202568182839863, |
|
"learning_rate": 1.9732246636369605e-05, |
|
"loss": 1.9124, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.017112768758227294, |
|
"grad_norm": 0.9613691948096944, |
|
"learning_rate": 1.968162302997659e-05, |
|
"loss": 1.9048, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.018429135585783237, |
|
"grad_norm": 0.893717907926126, |
|
"learning_rate": 1.962669555216358e-05, |
|
"loss": 1.8905, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.019745502413339184, |
|
"grad_norm": 0.9042103319398382, |
|
"learning_rate": 1.9567488620406984e-05, |
|
"loss": 1.9179, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02106186924089513, |
|
"grad_norm": 0.8537257879688175, |
|
"learning_rate": 1.9504028554572865e-05, |
|
"loss": 1.8956, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.022378236068451074, |
|
"grad_norm": 0.7723152246718353, |
|
"learning_rate": 1.943634356521671e-05, |
|
"loss": 1.9106, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02369460289600702, |
|
"grad_norm": 0.788599031919039, |
|
"learning_rate": 1.9364463741042694e-05, |
|
"loss": 1.8714, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.025010969723562967, |
|
"grad_norm": 0.7138782811411121, |
|
"learning_rate": 1.928842103552803e-05, |
|
"loss": 1.8805, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02632733655111891, |
|
"grad_norm": 0.6526275706400243, |
|
"learning_rate": 1.920824925271838e-05, |
|
"loss": 1.8992, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.027643703378674857, |
|
"grad_norm": 0.7018291273940191, |
|
"learning_rate": 1.9123984032200586e-05, |
|
"loss": 1.8774, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.028960070206230804, |
|
"grad_norm": 0.7136424568096796, |
|
"learning_rate": 1.9035662833259433e-05, |
|
"loss": 1.8978, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.030276437033786747, |
|
"grad_norm": 0.7147862026041938, |
|
"learning_rate": 1.8943324918225495e-05, |
|
"loss": 1.8965, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.031592803861342694, |
|
"grad_norm": 0.7049064442746601, |
|
"learning_rate": 1.8847011335021447e-05, |
|
"loss": 1.8831, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03290917068889864, |
|
"grad_norm": 0.6689369768605253, |
|
"learning_rate": 1.874676489891461e-05, |
|
"loss": 1.8881, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03422553751645459, |
|
"grad_norm": 0.6425593648258924, |
|
"learning_rate": 1.8642630173483832e-05, |
|
"loss": 1.889, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03554190434401053, |
|
"grad_norm": 0.6931260970545167, |
|
"learning_rate": 1.85346534508092e-05, |
|
"loss": 1.8936, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.036858271171566474, |
|
"grad_norm": 0.5855894301604793, |
|
"learning_rate": 1.8422882730893323e-05, |
|
"loss": 1.9131, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03817463799912242, |
|
"grad_norm": 0.6403065763687231, |
|
"learning_rate": 1.8307367700323412e-05, |
|
"loss": 1.9104, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03949100482667837, |
|
"grad_norm": 0.667165710149791, |
|
"learning_rate": 1.8188159710183595e-05, |
|
"loss": 1.8807, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.040807371654234315, |
|
"grad_norm": 0.601710689560651, |
|
"learning_rate": 1.8065311753227272e-05, |
|
"loss": 1.9261, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.04212373848179026, |
|
"grad_norm": 0.6543848114887616, |
|
"learning_rate": 1.7938878440319722e-05, |
|
"loss": 1.9178, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0434401053093462, |
|
"grad_norm": 0.7216887863002542, |
|
"learning_rate": 1.7808915976161364e-05, |
|
"loss": 1.9212, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04475647213690215, |
|
"grad_norm": 0.6605220221485377, |
|
"learning_rate": 1.7675482134302503e-05, |
|
"loss": 1.9019, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.046072838964458095, |
|
"grad_norm": 0.5651946270062499, |
|
"learning_rate": 1.753863623146066e-05, |
|
"loss": 1.9065, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04738920579201404, |
|
"grad_norm": 0.6283354866608959, |
|
"learning_rate": 1.7398439101151908e-05, |
|
"loss": 1.8926, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04870557261956999, |
|
"grad_norm": 0.591339280937863, |
|
"learning_rate": 1.7254953066647915e-05, |
|
"loss": 1.898, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.050021939447125935, |
|
"grad_norm": 0.566891433067587, |
|
"learning_rate": 1.710824191327075e-05, |
|
"loss": 1.9314, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.051338306274681875, |
|
"grad_norm": 0.5036303737690743, |
|
"learning_rate": 1.695837086003772e-05, |
|
"loss": 1.908, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05265467310223782, |
|
"grad_norm": 0.5732998210738149, |
|
"learning_rate": 1.680540653066891e-05, |
|
"loss": 1.907, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05397103992979377, |
|
"grad_norm": 0.600054956243109, |
|
"learning_rate": 1.6649416923970248e-05, |
|
"loss": 1.908, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.055287406757349715, |
|
"grad_norm": 0.5780718337975314, |
|
"learning_rate": 1.649047138360529e-05, |
|
"loss": 1.9006, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05660377358490566, |
|
"grad_norm": 0.5998379136280807, |
|
"learning_rate": 1.632864056726917e-05, |
|
"loss": 1.9023, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.05792014041246161, |
|
"grad_norm": 0.630710242272472, |
|
"learning_rate": 1.6163996415278423e-05, |
|
"loss": 1.9284, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.05923650724001755, |
|
"grad_norm": 0.5104566997273228, |
|
"learning_rate": 1.5996612118590604e-05, |
|
"loss": 1.9089, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.060552874067573495, |
|
"grad_norm": 0.6521533677979531, |
|
"learning_rate": 1.5826562086267956e-05, |
|
"loss": 1.9285, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06186924089512944, |
|
"grad_norm": 0.5098826190359927, |
|
"learning_rate": 1.565392191239959e-05, |
|
"loss": 1.916, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06318560772268539, |
|
"grad_norm": 0.62515291794207, |
|
"learning_rate": 1.5478768342496872e-05, |
|
"loss": 1.9069, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06450197455024133, |
|
"grad_norm": 0.5918382830263713, |
|
"learning_rate": 1.5301179239376936e-05, |
|
"loss": 1.9224, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06581834137779728, |
|
"grad_norm": 0.6387295926323716, |
|
"learning_rate": 1.512123354854955e-05, |
|
"loss": 1.9022, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06713470820535322, |
|
"grad_norm": 0.5484211324952757, |
|
"learning_rate": 1.4939011263122635e-05, |
|
"loss": 1.9024, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06845107503290918, |
|
"grad_norm": 0.6838268591901047, |
|
"learning_rate": 1.4754593388242117e-05, |
|
"loss": 1.9133, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.06976744186046512, |
|
"grad_norm": 0.49026244529365287, |
|
"learning_rate": 1.4568061905081874e-05, |
|
"loss": 1.9044, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.07108380868802106, |
|
"grad_norm": 0.617101630668953, |
|
"learning_rate": 1.4379499734399797e-05, |
|
"loss": 1.9176, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07240017551557701, |
|
"grad_norm": 0.5185658207760714, |
|
"learning_rate": 1.4188990699676186e-05, |
|
"loss": 1.915, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07371654234313295, |
|
"grad_norm": 0.5509987613832577, |
|
"learning_rate": 1.3996619489850822e-05, |
|
"loss": 1.9282, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.0750329091706889, |
|
"grad_norm": 0.5404326350759733, |
|
"learning_rate": 1.3802471621675337e-05, |
|
"loss": 1.9121, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07634927599824484, |
|
"grad_norm": 0.45194626257894893, |
|
"learning_rate": 1.3606633401697557e-05, |
|
"loss": 1.9348, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.0776656428258008, |
|
"grad_norm": 0.44319095497602445, |
|
"learning_rate": 1.340919188789477e-05, |
|
"loss": 1.9162, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.07898200965335674, |
|
"grad_norm": 0.45917008340300286, |
|
"learning_rate": 1.3210234850972966e-05, |
|
"loss": 1.9349, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08029837648091268, |
|
"grad_norm": 0.4397437065658123, |
|
"learning_rate": 1.300985073534919e-05, |
|
"loss": 1.9344, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.08161474330846863, |
|
"grad_norm": 0.43586654360169025, |
|
"learning_rate": 1.280812861983446e-05, |
|
"loss": 1.9144, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.08293111013602457, |
|
"grad_norm": 0.4334759261856198, |
|
"learning_rate": 1.2605158178034656e-05, |
|
"loss": 1.9202, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.08424747696358052, |
|
"grad_norm": 0.41957147456515964, |
|
"learning_rate": 1.2401029638486952e-05, |
|
"loss": 1.8986, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08556384379113646, |
|
"grad_norm": 0.41703972877231255, |
|
"learning_rate": 1.219583374454963e-05, |
|
"loss": 1.9367, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0868802106186924, |
|
"grad_norm": 0.3885734477047593, |
|
"learning_rate": 1.1989661714063e-05, |
|
"loss": 1.9536, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08819657744624836, |
|
"grad_norm": 0.3629396674080558, |
|
"learning_rate": 1.1782605198799371e-05, |
|
"loss": 1.942, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.0895129442738043, |
|
"grad_norm": 0.41851931328687847, |
|
"learning_rate": 1.157475624372018e-05, |
|
"loss": 1.9149, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.09082931110136025, |
|
"grad_norm": 0.38254555542206087, |
|
"learning_rate": 1.1366207246058269e-05, |
|
"loss": 1.9258, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.09214567792891619, |
|
"grad_norm": 0.3760396216126684, |
|
"learning_rate": 1.1157050914243614e-05, |
|
"loss": 1.9395, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.09346204475647214, |
|
"grad_norm": 0.3802671210811049, |
|
"learning_rate": 1.0947380226690686e-05, |
|
"loss": 1.9354, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09477841158402808, |
|
"grad_norm": 0.346627059727759, |
|
"learning_rate": 1.0737288390465792e-05, |
|
"loss": 1.9259, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09609477841158402, |
|
"grad_norm": 0.3873365911679245, |
|
"learning_rate": 1.0526868799852797e-05, |
|
"loss": 1.9493, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09741114523913998, |
|
"grad_norm": 0.3805418724228647, |
|
"learning_rate": 1.031621499483559e-05, |
|
"loss": 1.9429, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09872751206669592, |
|
"grad_norm": 0.3477296159832507, |
|
"learning_rate": 1.0105420619515798e-05, |
|
"loss": 1.9348, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.10004387889425187, |
|
"grad_norm": 0.36081350820036023, |
|
"learning_rate": 9.894579380484206e-06, |
|
"loss": 1.951, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.10136024572180781, |
|
"grad_norm": 0.3603157658181124, |
|
"learning_rate": 9.683785005164412e-06, |
|
"loss": 1.9568, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.10267661254936375, |
|
"grad_norm": 0.3344964866298326, |
|
"learning_rate": 9.473131200147205e-06, |
|
"loss": 1.9635, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1039929793769197, |
|
"grad_norm": 0.3577372410086672, |
|
"learning_rate": 9.262711609534211e-06, |
|
"loss": 1.9493, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.10530934620447564, |
|
"grad_norm": 0.32166763062706677, |
|
"learning_rate": 9.052619773309318e-06, |
|
"loss": 1.9416, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1066257130320316, |
|
"grad_norm": 0.3642098555682263, |
|
"learning_rate": 8.842949085756389e-06, |
|
"loss": 1.9375, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.10794207985958754, |
|
"grad_norm": 0.31313176156525635, |
|
"learning_rate": 8.633792753941733e-06, |
|
"loss": 1.9482, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.10925844668714349, |
|
"grad_norm": 0.32357294734122866, |
|
"learning_rate": 8.425243756279824e-06, |
|
"loss": 1.9274, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.11057481351469943, |
|
"grad_norm": 0.3149933190799516, |
|
"learning_rate": 8.217394801200632e-06, |
|
"loss": 1.9494, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.11189118034225537, |
|
"grad_norm": 0.3145273258487357, |
|
"learning_rate": 8.010338285937006e-06, |
|
"loss": 1.9383, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.11320754716981132, |
|
"grad_norm": 0.3293525045662984, |
|
"learning_rate": 7.804166255450372e-06, |
|
"loss": 1.9438, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.11452391399736726, |
|
"grad_norm": 0.29338737195487413, |
|
"learning_rate": 7.598970361513052e-06, |
|
"loss": 1.9486, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.11584028082492322, |
|
"grad_norm": 0.3103457426467627, |
|
"learning_rate": 7.394841821965345e-06, |
|
"loss": 1.9274, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.11715664765247916, |
|
"grad_norm": 0.3143049571743679, |
|
"learning_rate": 7.191871380165538e-06, |
|
"loss": 1.9524, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1184730144800351, |
|
"grad_norm": 0.29504032042698036, |
|
"learning_rate": 6.990149264650814e-06, |
|
"loss": 1.9445, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11978938130759105, |
|
"grad_norm": 0.3210704673929567, |
|
"learning_rate": 6.789765149027039e-06, |
|
"loss": 1.9515, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.12110574813514699, |
|
"grad_norm": 0.28621542114599363, |
|
"learning_rate": 6.590808112105232e-06, |
|
"loss": 1.969, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.12242211496270294, |
|
"grad_norm": 0.2882677076447023, |
|
"learning_rate": 6.3933665983024465e-06, |
|
"loss": 1.954, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.12373848179025888, |
|
"grad_norm": 0.287930909134049, |
|
"learning_rate": 6.197528378324664e-06, |
|
"loss": 1.9313, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.12505484861781482, |
|
"grad_norm": 0.31023728939120476, |
|
"learning_rate": 6.003380510149179e-06, |
|
"loss": 1.9602, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.12637121544537078, |
|
"grad_norm": 0.2752773114898817, |
|
"learning_rate": 5.8110093003238175e-06, |
|
"loss": 1.9831, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.12768758227292673, |
|
"grad_norm": 0.2972076509810327, |
|
"learning_rate": 5.620500265600206e-06, |
|
"loss": 1.9562, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.12900394910048266, |
|
"grad_norm": 0.28344306622056237, |
|
"learning_rate": 5.431938094918132e-06, |
|
"loss": 1.9679, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1303203159280386, |
|
"grad_norm": 0.27862896188615965, |
|
"learning_rate": 5.245406611757882e-06, |
|
"loss": 1.9667, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.13163668275559456, |
|
"grad_norm": 0.2775069903986726, |
|
"learning_rate": 5.060988736877366e-06, |
|
"loss": 1.9486, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13295304958315052, |
|
"grad_norm": 0.2750571862770337, |
|
"learning_rate": 4.878766451450451e-06, |
|
"loss": 1.9557, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.13426941641070644, |
|
"grad_norm": 0.2680704001161532, |
|
"learning_rate": 4.698820760623064e-06, |
|
"loss": 1.9506, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1355857832382624, |
|
"grad_norm": 0.2782630531024094, |
|
"learning_rate": 4.5212316575031325e-06, |
|
"loss": 1.9639, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.13690215006581835, |
|
"grad_norm": 0.25617683357697074, |
|
"learning_rate": 4.346078087600411e-06, |
|
"loss": 1.9683, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.13821851689337428, |
|
"grad_norm": 0.2444943371569369, |
|
"learning_rate": 4.173437913732048e-06, |
|
"loss": 1.9659, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.13953488372093023, |
|
"grad_norm": 0.2597397835336073, |
|
"learning_rate": 4.003387881409397e-06, |
|
"loss": 1.9704, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.14085125054848618, |
|
"grad_norm": 0.26063366143876027, |
|
"learning_rate": 3.836003584721577e-06, |
|
"loss": 1.97, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1421676173760421, |
|
"grad_norm": 0.23235593032133328, |
|
"learning_rate": 3.6713594327308343e-06, |
|
"loss": 1.9554, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.14348398420359806, |
|
"grad_norm": 0.22701215505987368, |
|
"learning_rate": 3.509528616394716e-06, |
|
"loss": 1.9737, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.14480035103115402, |
|
"grad_norm": 0.24108913765275466, |
|
"learning_rate": 3.3505830760297543e-06, |
|
"loss": 1.9699, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.14611671785870997, |
|
"grad_norm": 0.24405476026520742, |
|
"learning_rate": 3.1945934693310897e-06, |
|
"loss": 1.9767, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.1474330846862659, |
|
"grad_norm": 0.23175867510438058, |
|
"learning_rate": 3.0416291399622834e-06, |
|
"loss": 2.0023, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.14874945151382185, |
|
"grad_norm": 0.22596129228759587, |
|
"learning_rate": 2.891758086729253e-06, |
|
"loss": 1.955, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.1500658183413778, |
|
"grad_norm": 0.24851949999567013, |
|
"learning_rate": 2.7450469333520856e-06, |
|
"loss": 1.9611, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.15138218516893373, |
|
"grad_norm": 0.22293156665441605, |
|
"learning_rate": 2.6015608988480956e-06, |
|
"loss": 1.9658, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.15269855199648968, |
|
"grad_norm": 0.21616202749444716, |
|
"learning_rate": 2.4613637685393433e-06, |
|
"loss": 1.9753, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.15401491882404564, |
|
"grad_norm": 0.22185470586350384, |
|
"learning_rate": 2.324517865697501e-06, |
|
"loss": 1.9495, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.1553312856516016, |
|
"grad_norm": 0.21722679538918865, |
|
"learning_rate": 2.19108402383864e-06, |
|
"loss": 1.9628, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.15664765247915752, |
|
"grad_norm": 0.21205900012695214, |
|
"learning_rate": 2.06112155968028e-06, |
|
"loss": 1.9823, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.15796401930671347, |
|
"grad_norm": 0.2182253662134732, |
|
"learning_rate": 1.9346882467727323e-06, |
|
"loss": 1.9875, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.15928038613426942, |
|
"grad_norm": 0.22007111443224736, |
|
"learning_rate": 1.811840289816409e-06, |
|
"loss": 1.9805, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.16059675296182535, |
|
"grad_norm": 0.20406970469647664, |
|
"learning_rate": 1.6926322996765899e-06, |
|
"loss": 1.9818, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.1619131197893813, |
|
"grad_norm": 0.2024904864715853, |
|
"learning_rate": 1.5771172691066793e-06, |
|
"loss": 1.9859, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.16322948661693726, |
|
"grad_norm": 0.20299971001174535, |
|
"learning_rate": 1.4653465491908003e-06, |
|
"loss": 2.0096, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1645458534444932, |
|
"grad_norm": 0.21516178859981677, |
|
"learning_rate": 1.3573698265161683e-06, |
|
"loss": 1.9654, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.16586222027204914, |
|
"grad_norm": 0.20722629324747893, |
|
"learning_rate": 1.2532351010853916e-06, |
|
"loss": 1.9708, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.1671785870996051, |
|
"grad_norm": 0.21213871178644442, |
|
"learning_rate": 1.152988664978556e-06, |
|
"loss": 1.9787, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.16849495392716105, |
|
"grad_norm": 0.20023503307538396, |
|
"learning_rate": 1.0566750817745076e-06, |
|
"loss": 1.9858, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.16981132075471697, |
|
"grad_norm": 0.1924700263570635, |
|
"learning_rate": 9.6433716674057e-07, |
|
"loss": 1.9781, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.17112768758227292, |
|
"grad_norm": 0.20779051083187167, |
|
"learning_rate": 8.760159677994174e-07, |
|
"loss": 1.9827, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.17244405440982888, |
|
"grad_norm": 0.20230044677950978, |
|
"learning_rate": 7.91750747281621e-07, |
|
"loss": 1.9741, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.1737604212373848, |
|
"grad_norm": 0.19683020520485797, |
|
"learning_rate": 7.115789644719728e-07, |
|
"loss": 1.9949, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.17507678806494076, |
|
"grad_norm": 0.19524340774257554, |
|
"learning_rate": 6.355362589573078e-07, |
|
"loss": 1.9762, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.1763931548924967, |
|
"grad_norm": 0.20098123761556075, |
|
"learning_rate": 5.636564347832907e-07, |
|
"loss": 1.9818, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.17770952172005267, |
|
"grad_norm": 0.18935262395702177, |
|
"learning_rate": 4.95971445427137e-07, |
|
"loss": 1.983, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1790258885476086, |
|
"grad_norm": 0.1906817912560046, |
|
"learning_rate": 4.3251137959302023e-07, |
|
"loss": 1.9708, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.18034225537516455, |
|
"grad_norm": 0.19470612058115894, |
|
"learning_rate": 3.733044478364234e-07, |
|
"loss": 1.967, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.1816586222027205, |
|
"grad_norm": 0.18636115957247928, |
|
"learning_rate": 3.1837697002341293e-07, |
|
"loss": 1.9791, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.18297498903027642, |
|
"grad_norm": 0.18877431556550928, |
|
"learning_rate": 2.677533636303964e-07, |
|
"loss": 1.9694, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.18429135585783238, |
|
"grad_norm": 0.1955087203028538, |
|
"learning_rate": 2.214561328895748e-07, |
|
"loss": 1.9741, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.18560772268538833, |
|
"grad_norm": 0.196549286904382, |
|
"learning_rate": 1.7950585878489856e-07, |
|
"loss": 1.979, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.18692408951294429, |
|
"grad_norm": 0.19224460294869852, |
|
"learning_rate": 1.419211899029971e-07, |
|
"loss": 1.9707, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.1882404563405002, |
|
"grad_norm": 0.18853581462437616, |
|
"learning_rate": 1.0871883414312778e-07, |
|
"loss": 1.981, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.18955682316805617, |
|
"grad_norm": 0.1838928559361403, |
|
"learning_rate": 7.99135512898408e-08, |
|
"loss": 1.9731, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.19087318999561212, |
|
"grad_norm": 0.190162401461088, |
|
"learning_rate": 5.55181464516652e-08, |
|
"loss": 1.975, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.19218955682316805, |
|
"grad_norm": 0.19003523312950812, |
|
"learning_rate": 3.554346436871581e-08, |
|
"loss": 1.9704, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.193505923650724, |
|
"grad_norm": 0.19021539316010605, |
|
"learning_rate": 1.9998384591773945e-08, |
|
"loss": 1.979, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.19482229047827995, |
|
"grad_norm": 0.18754432655782285, |
|
"learning_rate": 8.889817534969425e-09, |
|
"loss": 1.9867, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.1961386573058359, |
|
"grad_norm": 0.1897467195961825, |
|
"learning_rate": 2.222701403818972e-09, |
|
"loss": 1.9756, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.19745502413339183, |
|
"grad_norm": 0.20302937860642753, |
|
"learning_rate": 0.0, |
|
"loss": 1.9877, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.19745502413339183, |
|
"step": 150, |
|
"total_flos": 349677715193856.0, |
|
"train_loss": 1.9421729667981467, |
|
"train_runtime": 5209.8016, |
|
"train_samples_per_second": 58.044, |
|
"train_steps_per_second": 0.029 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 150, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 349677715193856.0, |
|
"train_batch_size": 42, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|