|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.4038364462392731, |
|
"eval_steps": 500, |
|
"global_step": 2400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0016826518593303045, |
|
"grad_norm": 5.367858933563703, |
|
"learning_rate": 4.9999999999999996e-06, |
|
"loss": 0.9537, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.003365303718660609, |
|
"grad_norm": 9.386746384686745, |
|
"learning_rate": 9.999999999999999e-06, |
|
"loss": 0.943, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.005047955577990914, |
|
"grad_norm": 7.387362447577942, |
|
"learning_rate": 1.5e-05, |
|
"loss": 0.934, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.006730607437321218, |
|
"grad_norm": 6.9256319824932655, |
|
"learning_rate": 1.9999999999999998e-05, |
|
"loss": 0.8376, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.008413259296651522, |
|
"grad_norm": 9.1148382590838, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.8484, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.010095911155981827, |
|
"grad_norm": 3.9989232759892426, |
|
"learning_rate": 3e-05, |
|
"loss": 0.8097, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.011778563015312132, |
|
"grad_norm": 3.892371218590039, |
|
"learning_rate": 2.9999786123888308e-05, |
|
"loss": 0.7811, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.013461214874642436, |
|
"grad_norm": 8.096662196282066, |
|
"learning_rate": 2.9999144501652298e-05, |
|
"loss": 0.7446, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01514386673397274, |
|
"grad_norm": 1.5769306611206149, |
|
"learning_rate": 2.9998075151588992e-05, |
|
"loss": 0.7258, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.016826518593303044, |
|
"grad_norm": 8.47430485487969, |
|
"learning_rate": 2.999657810419285e-05, |
|
"loss": 0.7052, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01850917045263335, |
|
"grad_norm": 2.363071299913598, |
|
"learning_rate": 2.999465340215489e-05, |
|
"loss": 0.7657, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.020191822311963654, |
|
"grad_norm": 1.9252385425154874, |
|
"learning_rate": 2.999230110036149e-05, |
|
"loss": 0.7329, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.02187447417129396, |
|
"grad_norm": 8.946028475031488, |
|
"learning_rate": 2.99895212658928e-05, |
|
"loss": 0.7304, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.023557126030624265, |
|
"grad_norm": 6.877609312630206, |
|
"learning_rate": 2.9986313978020846e-05, |
|
"loss": 0.7453, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.02523977788995457, |
|
"grad_norm": 2.5256324882367993, |
|
"learning_rate": 2.9982679328207262e-05, |
|
"loss": 0.7366, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.02692242974928487, |
|
"grad_norm": 2.709550398238738, |
|
"learning_rate": 2.9978617420100692e-05, |
|
"loss": 0.7258, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.028605081608615177, |
|
"grad_norm": 1.543550019689774, |
|
"learning_rate": 2.9974128369533805e-05, |
|
"loss": 0.7372, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.03028773346794548, |
|
"grad_norm": 3.3453966881155504, |
|
"learning_rate": 2.9969212304520034e-05, |
|
"loss": 0.743, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.03197038532727579, |
|
"grad_norm": 1.922001656181265, |
|
"learning_rate": 2.9963869365249895e-05, |
|
"loss": 0.7819, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.03365303718660609, |
|
"grad_norm": 2.0611188483400036, |
|
"learning_rate": 2.995809970408699e-05, |
|
"loss": 0.7155, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.0353356890459364, |
|
"grad_norm": 1.5313041833127259, |
|
"learning_rate": 2.9951903485563685e-05, |
|
"loss": 0.7322, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.0370183409052667, |
|
"grad_norm": 2.0124191694435085, |
|
"learning_rate": 2.99452808863764e-05, |
|
"loss": 0.6759, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.03870099276459701, |
|
"grad_norm": 3.182123324389477, |
|
"learning_rate": 2.993823209538056e-05, |
|
"loss": 0.6953, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.04038364462392731, |
|
"grad_norm": 1.6122782177661379, |
|
"learning_rate": 2.9930757313585238e-05, |
|
"loss": 0.6953, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.04206629648325761, |
|
"grad_norm": 2.2027482596695647, |
|
"learning_rate": 2.9922856754147406e-05, |
|
"loss": 0.7301, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.04374894834258792, |
|
"grad_norm": 2.6782477155989213, |
|
"learning_rate": 2.9914530642365852e-05, |
|
"loss": 0.6891, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.04543160020191822, |
|
"grad_norm": 1.9740401144541417, |
|
"learning_rate": 2.990577921567476e-05, |
|
"loss": 0.7231, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.04711425206124853, |
|
"grad_norm": 1.719874620968932, |
|
"learning_rate": 2.989660272363696e-05, |
|
"loss": 0.7505, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.04879690392057883, |
|
"grad_norm": 1.3138364164203409, |
|
"learning_rate": 2.988700142793676e-05, |
|
"loss": 0.7116, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.05047955577990914, |
|
"grad_norm": 5.853627389344256, |
|
"learning_rate": 2.9876975602372536e-05, |
|
"loss": 0.719, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.05216220763923944, |
|
"grad_norm": 2.347259437170711, |
|
"learning_rate": 2.9866525532848906e-05, |
|
"loss": 0.6803, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.05384485949856974, |
|
"grad_norm": 1.937679220955038, |
|
"learning_rate": 2.9855651517368567e-05, |
|
"loss": 0.7461, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.05552751135790005, |
|
"grad_norm": 1.6661300351569575, |
|
"learning_rate": 2.9844353866023802e-05, |
|
"loss": 0.7472, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.05721016321723035, |
|
"grad_norm": 2.357915869204484, |
|
"learning_rate": 2.9832632900987642e-05, |
|
"loss": 0.7148, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.05889281507656066, |
|
"grad_norm": 4.398815186243292, |
|
"learning_rate": 2.982048895650468e-05, |
|
"loss": 0.6992, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.06057546693589096, |
|
"grad_norm": 12.662682224480092, |
|
"learning_rate": 2.9807922378881537e-05, |
|
"loss": 0.7539, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.06225811879522127, |
|
"grad_norm": 0.8642696401357872, |
|
"learning_rate": 2.979493352647697e-05, |
|
"loss": 0.7212, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.06394077065455157, |
|
"grad_norm": 27.047937858232604, |
|
"learning_rate": 2.9781522769691686e-05, |
|
"loss": 0.722, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.06562342251388188, |
|
"grad_norm": 2.598805292448644, |
|
"learning_rate": 2.9767690490957758e-05, |
|
"loss": 0.7065, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.06730607437321218, |
|
"grad_norm": 1.2314762895092763, |
|
"learning_rate": 2.9753437084727713e-05, |
|
"loss": 0.7498, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.06898872623254249, |
|
"grad_norm": 1.6421909669790502, |
|
"learning_rate": 2.9738762957463292e-05, |
|
"loss": 0.6992, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.0706713780918728, |
|
"grad_norm": 2.023552968622588, |
|
"learning_rate": 2.9723668527623877e-05, |
|
"loss": 0.6943, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.0723540299512031, |
|
"grad_norm": 1.5172337910969138, |
|
"learning_rate": 2.9708154225654526e-05, |
|
"loss": 0.6987, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.0740366818105334, |
|
"grad_norm": 1.197852135730745, |
|
"learning_rate": 2.9692220493973712e-05, |
|
"loss": 0.7302, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.0757193336698637, |
|
"grad_norm": 2.4396443837967183, |
|
"learning_rate": 2.9675867786960718e-05, |
|
"loss": 0.7318, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.07740198552919401, |
|
"grad_norm": 1.4599851880563282, |
|
"learning_rate": 2.9659096570942654e-05, |
|
"loss": 0.6941, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.07908463738852431, |
|
"grad_norm": 1.117755825364562, |
|
"learning_rate": 2.9641907324181194e-05, |
|
"loss": 0.7399, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.08076728924785462, |
|
"grad_norm": 2.9235378164576242, |
|
"learning_rate": 2.96243005368589e-05, |
|
"loss": 0.7207, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.08244994110718493, |
|
"grad_norm": 7.308883163781362, |
|
"learning_rate": 2.960627671106527e-05, |
|
"loss": 0.682, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.08413259296651522, |
|
"grad_norm": 3.4394827932955234, |
|
"learning_rate": 2.9587836360782405e-05, |
|
"loss": 0.708, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.08581524482584553, |
|
"grad_norm": 3.2314529856927634, |
|
"learning_rate": 2.9568980011870357e-05, |
|
"loss": 0.7335, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.08749789668517584, |
|
"grad_norm": 1.825724533695325, |
|
"learning_rate": 2.954970820205214e-05, |
|
"loss": 0.6951, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.08918054854450615, |
|
"grad_norm": 3.3231741746640076, |
|
"learning_rate": 2.9530021480898393e-05, |
|
"loss": 0.7793, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.09086320040383644, |
|
"grad_norm": 1.3097651462571123, |
|
"learning_rate": 2.9509920409811696e-05, |
|
"loss": 0.7087, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.09254585226316675, |
|
"grad_norm": 6.685911471215255, |
|
"learning_rate": 2.9489405562010565e-05, |
|
"loss": 0.6906, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.09422850412249706, |
|
"grad_norm": 2.870746617513948, |
|
"learning_rate": 2.9468477522513132e-05, |
|
"loss": 0.7028, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.09591115598182735, |
|
"grad_norm": 1.782555352805469, |
|
"learning_rate": 2.9447136888120408e-05, |
|
"loss": 0.6901, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.09759380784115766, |
|
"grad_norm": 2.336519711000487, |
|
"learning_rate": 2.9425384267399327e-05, |
|
"loss": 0.7779, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.09927645970048797, |
|
"grad_norm": 8.935574410818228, |
|
"learning_rate": 2.940322028066534e-05, |
|
"loss": 0.7503, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.10095911155981828, |
|
"grad_norm": 2.754713786882031, |
|
"learning_rate": 2.938064555996476e-05, |
|
"loss": 0.7208, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.10264176341914857, |
|
"grad_norm": 1.5082503557652136, |
|
"learning_rate": 2.9357660749056713e-05, |
|
"loss": 0.7169, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.10432441527847888, |
|
"grad_norm": 9.04522194526273, |
|
"learning_rate": 2.9334266503394803e-05, |
|
"loss": 0.6927, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.10600706713780919, |
|
"grad_norm": 55.28278686388287, |
|
"learning_rate": 2.9310463490108397e-05, |
|
"loss": 0.7107, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.10768971899713949, |
|
"grad_norm": 3.721916069105249, |
|
"learning_rate": 2.928625238798362e-05, |
|
"loss": 0.6951, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.1093723708564698, |
|
"grad_norm": 2.5040797323750112, |
|
"learning_rate": 2.9261633887443993e-05, |
|
"loss": 0.6916, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.1110550227158001, |
|
"grad_norm": 3.5468924769840617, |
|
"learning_rate": 2.9236608690530738e-05, |
|
"loss": 0.7077, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.11273767457513041, |
|
"grad_norm": 3.0266819778200746, |
|
"learning_rate": 2.921117751088276e-05, |
|
"loss": 0.6952, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.1144203264344607, |
|
"grad_norm": 1.634743894298146, |
|
"learning_rate": 2.91853410737163e-05, |
|
"loss": 0.6936, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.11610297829379101, |
|
"grad_norm": 1.0925365801520501, |
|
"learning_rate": 2.915910011580426e-05, |
|
"loss": 0.7317, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.11778563015312132, |
|
"grad_norm": 1.6959112138540386, |
|
"learning_rate": 2.9132455385455176e-05, |
|
"loss": 0.6917, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.11946828201245162, |
|
"grad_norm": 1.9723433746891168, |
|
"learning_rate": 2.9105407642491895e-05, |
|
"loss": 0.7209, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.12115093387178193, |
|
"grad_norm": 2.1537215293733833, |
|
"learning_rate": 2.907795765822989e-05, |
|
"loss": 0.7488, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.12283358573111224, |
|
"grad_norm": 3.227101869737169, |
|
"learning_rate": 2.9050106215455283e-05, |
|
"loss": 0.7152, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.12451623759044254, |
|
"grad_norm": 2.7222358893572554, |
|
"learning_rate": 2.9021854108402516e-05, |
|
"loss": 0.708, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.12619888944977284, |
|
"grad_norm": 2.1054843767538136, |
|
"learning_rate": 2.8993202142731693e-05, |
|
"loss": 0.7251, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.12788154130910315, |
|
"grad_norm": 2.11845883419618, |
|
"learning_rate": 2.8964151135505616e-05, |
|
"loss": 0.7405, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.12956419316843346, |
|
"grad_norm": 13.171512404187755, |
|
"learning_rate": 2.8934701915166477e-05, |
|
"loss": 0.6844, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.13124684502776376, |
|
"grad_norm": 2.7633375632879127, |
|
"learning_rate": 2.890485532151225e-05, |
|
"loss": 0.6766, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.13292949688709407, |
|
"grad_norm": 1.8420785342693768, |
|
"learning_rate": 2.887461220567271e-05, |
|
"loss": 0.7037, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.13461214874642435, |
|
"grad_norm": 1.5557447509529954, |
|
"learning_rate": 2.8843973430085204e-05, |
|
"loss": 0.6991, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.13629480060575466, |
|
"grad_norm": 1.9295826624758823, |
|
"learning_rate": 2.8812939868470016e-05, |
|
"loss": 0.6956, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.13797745246508497, |
|
"grad_norm": 3.3211216557707126, |
|
"learning_rate": 2.878151240580548e-05, |
|
"loss": 0.6774, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.13966010432441528, |
|
"grad_norm": 4.196064403930616, |
|
"learning_rate": 2.874969193830274e-05, |
|
"loss": 0.6752, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.1413427561837456, |
|
"grad_norm": 5.574976270137628, |
|
"learning_rate": 2.871747937338016e-05, |
|
"loss": 0.6553, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.1430254080430759, |
|
"grad_norm": 1.6494038718740478, |
|
"learning_rate": 2.8684875629637505e-05, |
|
"loss": 0.7152, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.1447080599024062, |
|
"grad_norm": 1.3061892609414858, |
|
"learning_rate": 2.8651881636829698e-05, |
|
"loss": 0.7462, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.1463907117617365, |
|
"grad_norm": 4.321044418392694, |
|
"learning_rate": 2.861849833584032e-05, |
|
"loss": 0.6902, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.1480733636210668, |
|
"grad_norm": 2.9444722968009764, |
|
"learning_rate": 2.8584726678654787e-05, |
|
"loss": 0.6813, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.1497560154803971, |
|
"grad_norm": 1.4940245340163587, |
|
"learning_rate": 2.85505676283332e-05, |
|
"loss": 0.689, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.1514386673397274, |
|
"grad_norm": 3.3704010040589565, |
|
"learning_rate": 2.851602215898287e-05, |
|
"loss": 0.6953, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.15312131919905772, |
|
"grad_norm": 1.6597144402924948, |
|
"learning_rate": 2.8481091255730552e-05, |
|
"loss": 0.7277, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.15480397105838803, |
|
"grad_norm": 10.969872224353953, |
|
"learning_rate": 2.844577591469435e-05, |
|
"loss": 0.7142, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.15648662291771834, |
|
"grad_norm": 8.45616831264245, |
|
"learning_rate": 2.8410077142955304e-05, |
|
"loss": 0.7197, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.15816927477704862, |
|
"grad_norm": 2.9594258901214427, |
|
"learning_rate": 2.8373995958528683e-05, |
|
"loss": 0.7351, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.15985192663637893, |
|
"grad_norm": 2.168676312428759, |
|
"learning_rate": 2.8337533390334942e-05, |
|
"loss": 0.7544, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.16153457849570924, |
|
"grad_norm": 7.898767360662744, |
|
"learning_rate": 2.8300690478170388e-05, |
|
"loss": 0.7015, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.16321723035503954, |
|
"grad_norm": 16.83650212945308, |
|
"learning_rate": 2.826346827267753e-05, |
|
"loss": 0.7139, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.16489988221436985, |
|
"grad_norm": 2.3791337429068977, |
|
"learning_rate": 2.8225867835315114e-05, |
|
"loss": 0.7053, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.16658253407370016, |
|
"grad_norm": 1.9679363325295285, |
|
"learning_rate": 2.8187890238327842e-05, |
|
"loss": 0.7313, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.16826518593303044, |
|
"grad_norm": 1.4822625638777076, |
|
"learning_rate": 2.814953656471583e-05, |
|
"loss": 0.7085, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.16994783779236075, |
|
"grad_norm": 2.647291447509443, |
|
"learning_rate": 2.8110807908203682e-05, |
|
"loss": 0.6638, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.17163048965169106, |
|
"grad_norm": 2.969379719654364, |
|
"learning_rate": 2.8071705373209328e-05, |
|
"loss": 0.6884, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.17331314151102137, |
|
"grad_norm": 1.1163745403124403, |
|
"learning_rate": 2.803223007481252e-05, |
|
"loss": 0.6885, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.17499579337035168, |
|
"grad_norm": 1.2686557979094786, |
|
"learning_rate": 2.7992383138723034e-05, |
|
"loss": 0.7037, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.17667844522968199, |
|
"grad_norm": 4.648945448875594, |
|
"learning_rate": 2.7952165701248573e-05, |
|
"loss": 0.6933, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.1783610970890123, |
|
"grad_norm": 4.723564874595428, |
|
"learning_rate": 2.7911578909262353e-05, |
|
"loss": 0.7144, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.18004374894834257, |
|
"grad_norm": 5.211806926801946, |
|
"learning_rate": 2.787062392017041e-05, |
|
"loss": 0.7266, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.18172640080767288, |
|
"grad_norm": 1.3725560316172503, |
|
"learning_rate": 2.7829301901878592e-05, |
|
"loss": 0.7445, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.1834090526670032, |
|
"grad_norm": 0.9012241436004484, |
|
"learning_rate": 2.7787614032759243e-05, |
|
"loss": 0.6986, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.1850917045263335, |
|
"grad_norm": 2.912544243603394, |
|
"learning_rate": 2.7745561501617605e-05, |
|
"loss": 0.7173, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.1867743563856638, |
|
"grad_norm": 1.4248442614931247, |
|
"learning_rate": 2.7703145507657923e-05, |
|
"loss": 0.7035, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.18845700824499412, |
|
"grad_norm": 2.186609904533333, |
|
"learning_rate": 2.766036726044926e-05, |
|
"loss": 0.7371, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.19013966010432443, |
|
"grad_norm": 2.0524595532166603, |
|
"learning_rate": 2.7617227979890957e-05, |
|
"loss": 0.6986, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.1918223119636547, |
|
"grad_norm": 1.8227045280907195, |
|
"learning_rate": 2.7573728896177897e-05, |
|
"loss": 0.7075, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.19350496382298502, |
|
"grad_norm": 1.8425998009576734, |
|
"learning_rate": 2.7529871249765397e-05, |
|
"loss": 0.6897, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.19518761568231532, |
|
"grad_norm": 5.3035191638420836, |
|
"learning_rate": 2.7485656291333845e-05, |
|
"loss": 0.7027, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.19687026754164563, |
|
"grad_norm": 3.3228474353685504, |
|
"learning_rate": 2.7441085281753028e-05, |
|
"loss": 0.7091, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.19855291940097594, |
|
"grad_norm": 3.5016968564731283, |
|
"learning_rate": 2.739615949204617e-05, |
|
"loss": 0.7241, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.20023557126030625, |
|
"grad_norm": 1.7190048028902127, |
|
"learning_rate": 2.7350880203353703e-05, |
|
"loss": 0.7192, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.20191822311963656, |
|
"grad_norm": 3.7186824247487515, |
|
"learning_rate": 2.7305248706896722e-05, |
|
"loss": 0.7063, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.20360087497896684, |
|
"grad_norm": 4.1717869895766935, |
|
"learning_rate": 2.7259266303940164e-05, |
|
"loss": 0.7088, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.20528352683829715, |
|
"grad_norm": 2.5124857963805804, |
|
"learning_rate": 2.7212934305755697e-05, |
|
"loss": 0.7198, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.20696617869762746, |
|
"grad_norm": 2.095136268936366, |
|
"learning_rate": 2.7166254033584343e-05, |
|
"loss": 0.753, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.20864883055695777, |
|
"grad_norm": 3.2661098868577256, |
|
"learning_rate": 2.7119226818598784e-05, |
|
"loss": 0.6779, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.21033148241628807, |
|
"grad_norm": 3.055506603735091, |
|
"learning_rate": 2.7071854001865402e-05, |
|
"loss": 0.7013, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.21201413427561838, |
|
"grad_norm": 12.522953778477769, |
|
"learning_rate": 2.702413693430604e-05, |
|
"loss": 0.7088, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.2136967861349487, |
|
"grad_norm": 3.476240301739368, |
|
"learning_rate": 2.697607697665948e-05, |
|
"loss": 0.689, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.21537943799427897, |
|
"grad_norm": 1.1862686197570156, |
|
"learning_rate": 2.6927675499442648e-05, |
|
"loss": 0.7243, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.21706208985360928, |
|
"grad_norm": 1.6505042403801382, |
|
"learning_rate": 2.68789338829115e-05, |
|
"loss": 0.7083, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.2187447417129396, |
|
"grad_norm": 4.74071740077375, |
|
"learning_rate": 2.6829853517021698e-05, |
|
"loss": 0.7016, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.2204273935722699, |
|
"grad_norm": 4.124079283639458, |
|
"learning_rate": 2.6780435801388945e-05, |
|
"loss": 0.7077, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.2221100454316002, |
|
"grad_norm": 1.9487864410536297, |
|
"learning_rate": 2.6730682145249093e-05, |
|
"loss": 0.7355, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.22379269729093051, |
|
"grad_norm": 2.4839241050514733, |
|
"learning_rate": 2.668059396741795e-05, |
|
"loss": 0.7092, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.22547534915026082, |
|
"grad_norm": 2.841913657394254, |
|
"learning_rate": 2.6630172696250804e-05, |
|
"loss": 0.7303, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.2271580010095911, |
|
"grad_norm": 2.7442870185873347, |
|
"learning_rate": 2.6579419769601715e-05, |
|
"loss": 0.6739, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.2288406528689214, |
|
"grad_norm": 1.3854365909071105, |
|
"learning_rate": 2.6528336634782493e-05, |
|
"loss": 0.7073, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.23052330472825172, |
|
"grad_norm": 3.115941001607779, |
|
"learning_rate": 2.6476924748521443e-05, |
|
"loss": 0.7267, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.23220595658758203, |
|
"grad_norm": 6.9185951332741, |
|
"learning_rate": 2.6425185576921812e-05, |
|
"loss": 0.7456, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.23388860844691234, |
|
"grad_norm": 2.378601355345996, |
|
"learning_rate": 2.637312059541997e-05, |
|
"loss": 0.6912, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.23557126030624265, |
|
"grad_norm": 2.7929947858543906, |
|
"learning_rate": 2.632073128874336e-05, |
|
"loss": 0.7184, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.23725391216557296, |
|
"grad_norm": 1.5382855773213957, |
|
"learning_rate": 2.6268019150868144e-05, |
|
"loss": 0.7099, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.23893656402490324, |
|
"grad_norm": 6.1010563795570025, |
|
"learning_rate": 2.62149856849766e-05, |
|
"loss": 0.6895, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.24061921588423354, |
|
"grad_norm": 5.999491987974443, |
|
"learning_rate": 2.616163240341426e-05, |
|
"loss": 0.7493, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.24230186774356385, |
|
"grad_norm": 2.837037600849311, |
|
"learning_rate": 2.6107960827646774e-05, |
|
"loss": 0.7176, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.24398451960289416, |
|
"grad_norm": 1.7029089834427125, |
|
"learning_rate": 2.6053972488216538e-05, |
|
"loss": 0.6852, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.24566717146222447, |
|
"grad_norm": 1.382189249222589, |
|
"learning_rate": 2.5999668924699035e-05, |
|
"loss": 0.685, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.24734982332155478, |
|
"grad_norm": 1.9496045543050813, |
|
"learning_rate": 2.5945051685658923e-05, |
|
"loss": 0.6591, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.2490324751808851, |
|
"grad_norm": 5.479390805764353, |
|
"learning_rate": 2.5890122328605908e-05, |
|
"loss": 0.7085, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.25071512704021537, |
|
"grad_norm": 1.7567995670915637, |
|
"learning_rate": 2.5834882419950295e-05, |
|
"loss": 0.7091, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.2523977788995457, |
|
"grad_norm": 1.9685911084195309, |
|
"learning_rate": 2.577933353495833e-05, |
|
"loss": 0.7218, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.254080430758876, |
|
"grad_norm": 3.400633915540874, |
|
"learning_rate": 2.5723477257707293e-05, |
|
"loss": 0.7148, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.2557630826182063, |
|
"grad_norm": 1.2116738326443663, |
|
"learning_rate": 2.566731518104029e-05, |
|
"loss": 0.7321, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.2574457344775366, |
|
"grad_norm": 1.3376343864594256, |
|
"learning_rate": 2.5610848906520878e-05, |
|
"loss": 0.748, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.2591283863368669, |
|
"grad_norm": 2.6089861003232055, |
|
"learning_rate": 2.5554080044387344e-05, |
|
"loss": 0.7127, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.2608110381961972, |
|
"grad_norm": 3.2047926120640526, |
|
"learning_rate": 2.5497010213506825e-05, |
|
"loss": 0.7262, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.26249369005552753, |
|
"grad_norm": 1.4899957348295265, |
|
"learning_rate": 2.5439641041329128e-05, |
|
"loss": 0.7122, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.26417634191485784, |
|
"grad_norm": 3.595968473922136, |
|
"learning_rate": 2.5381974163840313e-05, |
|
"loss": 0.7092, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.26585899377418815, |
|
"grad_norm": 3.5232117574234003, |
|
"learning_rate": 2.532401122551605e-05, |
|
"loss": 0.6924, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.2675416456335184, |
|
"grad_norm": 2.618947453668302, |
|
"learning_rate": 2.526575387927473e-05, |
|
"loss": 0.7067, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.2692242974928487, |
|
"grad_norm": 3.6282673284589566, |
|
"learning_rate": 2.52072037864303e-05, |
|
"loss": 0.6945, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.270906949352179, |
|
"grad_norm": 2.2274379147013, |
|
"learning_rate": 2.5148362616644926e-05, |
|
"loss": 0.6727, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.2725896012115093, |
|
"grad_norm": 2.823867881580523, |
|
"learning_rate": 2.508923204788135e-05, |
|
"loss": 0.7158, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.27427225307083963, |
|
"grad_norm": 2.0118901151982245, |
|
"learning_rate": 2.5029813766355062e-05, |
|
"loss": 0.7422, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.27595490493016994, |
|
"grad_norm": 1.2843584175617246, |
|
"learning_rate": 2.4970109466486202e-05, |
|
"loss": 0.7099, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.27763755678950025, |
|
"grad_norm": 3.5059277881120914, |
|
"learning_rate": 2.491012085085122e-05, |
|
"loss": 0.7164, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.27932020864883056, |
|
"grad_norm": 1.7458993688338285, |
|
"learning_rate": 2.4849849630134384e-05, |
|
"loss": 0.6901, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.28100286050816087, |
|
"grad_norm": 5.813346226937464, |
|
"learning_rate": 2.4789297523078924e-05, |
|
"loss": 0.7181, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.2826855123674912, |
|
"grad_norm": 2.0515286491489237, |
|
"learning_rate": 2.4728466256438072e-05, |
|
"loss": 0.7431, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.2843681642268215, |
|
"grad_norm": 2.6702746679350375, |
|
"learning_rate": 2.4667357564925798e-05, |
|
"loss": 0.701, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.2860508160861518, |
|
"grad_norm": 2.707565805299449, |
|
"learning_rate": 2.460597319116735e-05, |
|
"loss": 0.6725, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.2877334679454821, |
|
"grad_norm": 1.7994267796032153, |
|
"learning_rate": 2.4544314885649552e-05, |
|
"loss": 0.7043, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.2894161198048124, |
|
"grad_norm": 2.240627477157692, |
|
"learning_rate": 2.4482384406670883e-05, |
|
"loss": 0.7337, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.29109877166414266, |
|
"grad_norm": 1.4093208691675285, |
|
"learning_rate": 2.4420183520291354e-05, |
|
"loss": 0.706, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.292781423523473, |
|
"grad_norm": 1.5799653304195502, |
|
"learning_rate": 2.4357714000282127e-05, |
|
"loss": 0.7254, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.2944640753828033, |
|
"grad_norm": 1.8282839714116759, |
|
"learning_rate": 2.4294977628074938e-05, |
|
"loss": 0.68, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.2961467272421336, |
|
"grad_norm": 13.490769798309381, |
|
"learning_rate": 2.42319761927113e-05, |
|
"loss": 0.6984, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.2978293791014639, |
|
"grad_norm": 1.1660842236351188, |
|
"learning_rate": 2.4168711490791484e-05, |
|
"loss": 0.6893, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.2995120309607942, |
|
"grad_norm": 1.4880113732457052, |
|
"learning_rate": 2.4105185326423286e-05, |
|
"loss": 0.7371, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.3011946828201245, |
|
"grad_norm": 1.9796491202207207, |
|
"learning_rate": 2.4041399511170574e-05, |
|
"loss": 0.7372, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.3028773346794548, |
|
"grad_norm": 3.2861914347482846, |
|
"learning_rate": 2.3977355864001635e-05, |
|
"loss": 0.7145, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.30455998653878513, |
|
"grad_norm": 3.8536888582450595, |
|
"learning_rate": 2.3913056211237304e-05, |
|
"loss": 0.7244, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.30624263839811544, |
|
"grad_norm": 2.250827213388724, |
|
"learning_rate": 2.3848502386498866e-05, |
|
"loss": 0.7444, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.30792529025744575, |
|
"grad_norm": 1.6760548188250846, |
|
"learning_rate": 2.3783696230655802e-05, |
|
"loss": 0.7415, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.30960794211677606, |
|
"grad_norm": 2.83690011157284, |
|
"learning_rate": 2.371863959177326e-05, |
|
"loss": 0.6769, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.31129059397610637, |
|
"grad_norm": 3.6586666108883037, |
|
"learning_rate": 2.365333432505937e-05, |
|
"loss": 0.6981, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.3129732458354367, |
|
"grad_norm": 2.967916913846329, |
|
"learning_rate": 2.3587782292812323e-05, |
|
"loss": 0.7235, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.31465589769476693, |
|
"grad_norm": 2.7607388194454607, |
|
"learning_rate": 2.35219853643673e-05, |
|
"loss": 0.7202, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.31633854955409724, |
|
"grad_norm": 2.5793375573884925, |
|
"learning_rate": 2.3455945416043132e-05, |
|
"loss": 0.7437, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.31802120141342755, |
|
"grad_norm": 1.6474727320404343, |
|
"learning_rate": 2.338966433108879e-05, |
|
"loss": 0.6664, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.31970385327275785, |
|
"grad_norm": 2.8252072958720102, |
|
"learning_rate": 2.3323143999629712e-05, |
|
"loss": 0.6641, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.32138650513208816, |
|
"grad_norm": 1.8240997471681801, |
|
"learning_rate": 2.3256386318613877e-05, |
|
"loss": 0.7029, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.32306915699141847, |
|
"grad_norm": 1.7867386563705459, |
|
"learning_rate": 2.318939319175771e-05, |
|
"loss": 0.6806, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.3247518088507488, |
|
"grad_norm": 2.519605910503542, |
|
"learning_rate": 2.3122166529491822e-05, |
|
"loss": 0.6837, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.3264344607100791, |
|
"grad_norm": 1.5090617010699425, |
|
"learning_rate": 2.3054708248906483e-05, |
|
"loss": 0.7201, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.3281171125694094, |
|
"grad_norm": 1.85373627743108, |
|
"learning_rate": 2.2987020273696996e-05, |
|
"loss": 0.7007, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.3297997644287397, |
|
"grad_norm": 3.1668783585579714, |
|
"learning_rate": 2.2919104534108825e-05, |
|
"loss": 0.6827, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.33148241628807, |
|
"grad_norm": 2.802801151344103, |
|
"learning_rate": 2.2850962966882547e-05, |
|
"loss": 0.733, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.3331650681474003, |
|
"grad_norm": 4.351080547606847, |
|
"learning_rate": 2.278259751519861e-05, |
|
"loss": 0.7125, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.33484772000673063, |
|
"grad_norm": 1.4284076903376268, |
|
"learning_rate": 2.2714010128621957e-05, |
|
"loss": 0.7166, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.3365303718660609, |
|
"grad_norm": 1.4047557097137526, |
|
"learning_rate": 2.2645202763046385e-05, |
|
"loss": 0.7306, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.3382130237253912, |
|
"grad_norm": 1.7792478988054032, |
|
"learning_rate": 2.2576177380638808e-05, |
|
"loss": 0.6819, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.3398956755847215, |
|
"grad_norm": 1.541280861427235, |
|
"learning_rate": 2.2506935949783277e-05, |
|
"loss": 0.7188, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.3415783274440518, |
|
"grad_norm": 3.1010621866032717, |
|
"learning_rate": 2.243748044502485e-05, |
|
"loss": 0.7262, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.3432609793033821, |
|
"grad_norm": 2.3564311476470245, |
|
"learning_rate": 2.236781284701332e-05, |
|
"loss": 0.6862, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.3449436311627124, |
|
"grad_norm": 2.277474191898381, |
|
"learning_rate": 2.229793514244666e-05, |
|
"loss": 0.7086, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.34662628302204274, |
|
"grad_norm": 1.9189792309740905, |
|
"learning_rate": 2.222784932401445e-05, |
|
"loss": 0.6785, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.34830893488137304, |
|
"grad_norm": 8.557409847942546, |
|
"learning_rate": 2.2157557390341e-05, |
|
"loss": 0.7162, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.34999158674070335, |
|
"grad_norm": 2.0580462138378324, |
|
"learning_rate": 2.2087061345928375e-05, |
|
"loss": 0.663, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.35167423860003366, |
|
"grad_norm": 11.262172640870563, |
|
"learning_rate": 2.2016363201099205e-05, |
|
"loss": 0.7363, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.35335689045936397, |
|
"grad_norm": 12.967220942427355, |
|
"learning_rate": 2.1945464971939424e-05, |
|
"loss": 0.7169, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.3550395423186943, |
|
"grad_norm": 31.131995950709616, |
|
"learning_rate": 2.1874368680240692e-05, |
|
"loss": 0.7109, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.3567221941780246, |
|
"grad_norm": 3.759967527046392, |
|
"learning_rate": 2.1803076353442806e-05, |
|
"loss": 0.72, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.3584048460373549, |
|
"grad_norm": 2.0854209231683494, |
|
"learning_rate": 2.1731590024575848e-05, |
|
"loss": 0.7443, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.36008749789668515, |
|
"grad_norm": 1.550491190575623, |
|
"learning_rate": 2.165991173220223e-05, |
|
"loss": 0.7272, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.36177014975601546, |
|
"grad_norm": 3.184378797474582, |
|
"learning_rate": 2.158804352035855e-05, |
|
"loss": 0.6796, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.36345280161534577, |
|
"grad_norm": 2.2017388277936623, |
|
"learning_rate": 2.1515987438497295e-05, |
|
"loss": 0.7079, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.3651354534746761, |
|
"grad_norm": 1.8785352439752243, |
|
"learning_rate": 2.1443745541428416e-05, |
|
"loss": 0.7157, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.3668181053340064, |
|
"grad_norm": 5.799376408169486, |
|
"learning_rate": 2.137131988926072e-05, |
|
"loss": 0.7147, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.3685007571933367, |
|
"grad_norm": 1.463705775425984, |
|
"learning_rate": 2.129871254734312e-05, |
|
"loss": 0.6965, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.370183409052667, |
|
"grad_norm": 2.8778655656985435, |
|
"learning_rate": 2.122592558620575e-05, |
|
"loss": 0.6853, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.3718660609119973, |
|
"grad_norm": 2.830232793716089, |
|
"learning_rate": 2.1152961081500906e-05, |
|
"loss": 0.6682, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.3735487127713276, |
|
"grad_norm": 7.790226305121679, |
|
"learning_rate": 2.1079821113943863e-05, |
|
"loss": 0.6855, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.3752313646306579, |
|
"grad_norm": 1.635629077245975, |
|
"learning_rate": 2.100650776925353e-05, |
|
"loss": 0.7156, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.37691401648998824, |
|
"grad_norm": 1.6353332190474046, |
|
"learning_rate": 2.0933023138092995e-05, |
|
"loss": 0.7083, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.37859666834931854, |
|
"grad_norm": 2.4020219788955237, |
|
"learning_rate": 2.0859369316009877e-05, |
|
"loss": 0.6873, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.38027932020864885, |
|
"grad_norm": 1.5183766349594623, |
|
"learning_rate": 2.0785548403376592e-05, |
|
"loss": 0.6882, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.38196197206797916, |
|
"grad_norm": 3.9509980884297313, |
|
"learning_rate": 2.0711562505330437e-05, |
|
"loss": 0.7037, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.3836446239273094, |
|
"grad_norm": 1.080804508141519, |
|
"learning_rate": 2.063741373171357e-05, |
|
"loss": 0.7124, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.3853272757866397, |
|
"grad_norm": 2.4771901968792975, |
|
"learning_rate": 2.0563104197012847e-05, |
|
"loss": 0.7348, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.38700992764597003, |
|
"grad_norm": 1.4012276716265397, |
|
"learning_rate": 2.048863602029951e-05, |
|
"loss": 0.7157, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.38869257950530034, |
|
"grad_norm": 1.8764316814985353, |
|
"learning_rate": 2.0414011325168777e-05, |
|
"loss": 0.6932, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.39037523136463065, |
|
"grad_norm": 3.2158348437646183, |
|
"learning_rate": 2.0339232239679252e-05, |
|
"loss": 0.6789, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.39205788322396096, |
|
"grad_norm": 2.468508068736803, |
|
"learning_rate": 2.026430089629229e-05, |
|
"loss": 0.7163, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.39374053508329127, |
|
"grad_norm": 2.0179845911955656, |
|
"learning_rate": 2.0189219431811123e-05, |
|
"loss": 0.6566, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.3954231869426216, |
|
"grad_norm": 0.9653345936734927, |
|
"learning_rate": 2.0113989987319988e-05, |
|
"loss": 0.6939, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.3971058388019519, |
|
"grad_norm": 1.568403155522187, |
|
"learning_rate": 2.0038614708123023e-05, |
|
"loss": 0.7288, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.3987884906612822, |
|
"grad_norm": 5.098482519541986, |
|
"learning_rate": 1.996309574368311e-05, |
|
"loss": 0.7217, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.4004711425206125, |
|
"grad_norm": 1.656770228609968, |
|
"learning_rate": 1.9887435247560586e-05, |
|
"loss": 0.743, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.4021537943799428, |
|
"grad_norm": 1.679661595344395, |
|
"learning_rate": 1.981163537735181e-05, |
|
"loss": 0.724, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.4038364462392731, |
|
"grad_norm": 2.057009585071239, |
|
"learning_rate": 1.9735698294627644e-05, |
|
"loss": 0.7228, |
|
"step": 2400 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 5943, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 400, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.0934282360979456e+19, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|