|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9956108266276518, |
|
"eval_steps": 500, |
|
"global_step": 1023, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.029261155815654718, |
|
"grad_norm": 9.167642141908177, |
|
"learning_rate": 9.615384615384617e-07, |
|
"loss": 0.8861, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.058522311631309436, |
|
"grad_norm": 2.1214939165936095, |
|
"learning_rate": 1.9230769230769234e-06, |
|
"loss": 0.8326, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08778346744696415, |
|
"grad_norm": 1.4860143285284373, |
|
"learning_rate": 2.8846153846153845e-06, |
|
"loss": 0.7582, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11704462326261887, |
|
"grad_norm": 1.2064803930790473, |
|
"learning_rate": 3.846153846153847e-06, |
|
"loss": 0.7319, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.14630577907827358, |
|
"grad_norm": 1.2325754199204868, |
|
"learning_rate": 4.807692307692308e-06, |
|
"loss": 0.715, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1755669348939283, |
|
"grad_norm": 1.1527080287428282, |
|
"learning_rate": 4.999162611434756e-06, |
|
"loss": 0.6993, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.20482809070958302, |
|
"grad_norm": 1.0684614331267772, |
|
"learning_rate": 4.995761681803464e-06, |
|
"loss": 0.6873, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23408924652523774, |
|
"grad_norm": 0.7959876001332168, |
|
"learning_rate": 4.989748431344101e-06, |
|
"loss": 0.6767, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.26335040234089246, |
|
"grad_norm": 0.5559455782011308, |
|
"learning_rate": 4.981129154142974e-06, |
|
"loss": 0.669, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.29261155815654716, |
|
"grad_norm": 0.5142664344625067, |
|
"learning_rate": 4.969912872021965e-06, |
|
"loss": 0.6592, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3218727139722019, |
|
"grad_norm": 0.4068010226081863, |
|
"learning_rate": 4.956111325095368e-06, |
|
"loss": 0.667, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3511338697878566, |
|
"grad_norm": 0.3655542210299479, |
|
"learning_rate": 4.9397389594814805e-06, |
|
"loss": 0.6714, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.38039502560351135, |
|
"grad_norm": 0.3768366422263747, |
|
"learning_rate": 4.920812912181809e-06, |
|
"loss": 0.6549, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.40965618141916604, |
|
"grad_norm": 0.37566474114009984, |
|
"learning_rate": 4.899352993143717e-06, |
|
"loss": 0.6553, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.4389173372348208, |
|
"grad_norm": 0.3612379084735751, |
|
"learning_rate": 4.875381664525304e-06, |
|
"loss": 0.6504, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.4681784930504755, |
|
"grad_norm": 0.38526971126907095, |
|
"learning_rate": 4.848924017184196e-06, |
|
"loss": 0.6521, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.49743964886613024, |
|
"grad_norm": 0.360842741539703, |
|
"learning_rate": 4.820007744414875e-06, |
|
"loss": 0.6571, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5267008046817849, |
|
"grad_norm": 0.34250107685671755, |
|
"learning_rate": 4.788663112962028e-06, |
|
"loss": 0.6566, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5559619604974396, |
|
"grad_norm": 0.35953985379031617, |
|
"learning_rate": 4.7549229313402636e-06, |
|
"loss": 0.645, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5852231163130943, |
|
"grad_norm": 0.3405632867922337, |
|
"learning_rate": 4.71882251549334e-06, |
|
"loss": 0.6461, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6144842721287491, |
|
"grad_norm": 0.3372605973780747, |
|
"learning_rate": 4.680399651828873e-06, |
|
"loss": 0.6526, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.6437454279444038, |
|
"grad_norm": 0.3509569638126989, |
|
"learning_rate": 4.639694557667194e-06, |
|
"loss": 0.6404, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6730065837600585, |
|
"grad_norm": 0.34631382508847197, |
|
"learning_rate": 4.596749839145763e-06, |
|
"loss": 0.6403, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.7022677395757132, |
|
"grad_norm": 0.32023219081524656, |
|
"learning_rate": 4.551610446623209e-06, |
|
"loss": 0.6472, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.731528895391368, |
|
"grad_norm": 0.3457771028484765, |
|
"learning_rate": 4.504323627629662e-06, |
|
"loss": 0.6455, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7607900512070227, |
|
"grad_norm": 0.34127281622446637, |
|
"learning_rate": 4.454938877412623e-06, |
|
"loss": 0.6481, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.7900512070226774, |
|
"grad_norm": 0.3765210731050284, |
|
"learning_rate": 4.4035078871301565e-06, |
|
"loss": 0.6503, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.8193123628383321, |
|
"grad_norm": 0.3469100901523423, |
|
"learning_rate": 4.350084489745601e-06, |
|
"loss": 0.6491, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.8485735186539868, |
|
"grad_norm": 0.3308234776132991, |
|
"learning_rate": 4.294724603680461e-06, |
|
"loss": 0.6413, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8778346744696416, |
|
"grad_norm": 0.3435672110063973, |
|
"learning_rate": 4.23748617428443e-06, |
|
"loss": 0.6331, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.9070958302852963, |
|
"grad_norm": 0.35464153574051116, |
|
"learning_rate": 4.178429113183835e-06, |
|
"loss": 0.6463, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.936356986100951, |
|
"grad_norm": 0.3250073753317316, |
|
"learning_rate": 4.117615235571962e-06, |
|
"loss": 0.6381, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9656181419166057, |
|
"grad_norm": 0.37939565000110126, |
|
"learning_rate": 4.05510819550692e-06, |
|
"loss": 0.6414, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.9948792977322605, |
|
"grad_norm": 0.3580142785620277, |
|
"learning_rate": 3.990973419284758e-06, |
|
"loss": 0.6374, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.9978054133138259, |
|
"eval_loss": 0.641979992389679, |
|
"eval_runtime": 346.288, |
|
"eval_samples_per_second": 26.593, |
|
"eval_steps_per_second": 0.416, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.025237746891002, |
|
"grad_norm": 0.3644926759716025, |
|
"learning_rate": 3.925278036957578e-06, |
|
"loss": 0.6553, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.054498902706657, |
|
"grad_norm": 0.38301527457304524, |
|
"learning_rate": 3.858090812068314e-06, |
|
"loss": 0.6099, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.0837600585223117, |
|
"grad_norm": 0.3546790345224804, |
|
"learning_rate": 3.789482069675738e-06, |
|
"loss": 0.6133, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.1130212143379663, |
|
"grad_norm": 0.3146730032039869, |
|
"learning_rate": 3.719523622745026e-06, |
|
"loss": 0.6105, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.142282370153621, |
|
"grad_norm": 0.38139306525912997, |
|
"learning_rate": 3.6482886969809204e-06, |
|
"loss": 0.6158, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.1715435259692757, |
|
"grad_norm": 0.3468751817352307, |
|
"learning_rate": 3.575851854182175e-06, |
|
"loss": 0.6172, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.2008046817849305, |
|
"grad_norm": 0.36354294198162557, |
|
"learning_rate": 3.502288914197513e-06, |
|
"loss": 0.6169, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.2300658376005853, |
|
"grad_norm": 0.32988905808120284, |
|
"learning_rate": 3.4276768755647748e-06, |
|
"loss": 0.6049, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.2593269934162399, |
|
"grad_norm": 0.3173866656888502, |
|
"learning_rate": 3.352093834916329e-06, |
|
"loss": 0.615, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.2885881492318947, |
|
"grad_norm": 0.39499650219748045, |
|
"learning_rate": 3.275618905235109e-06, |
|
"loss": 0.6075, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.3178493050475493, |
|
"grad_norm": 0.34196576749131313, |
|
"learning_rate": 3.1983321330468235e-06, |
|
"loss": 0.6114, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.347110460863204, |
|
"grad_norm": 0.323009005146513, |
|
"learning_rate": 3.1203144146350396e-06, |
|
"loss": 0.6089, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.3763716166788589, |
|
"grad_norm": 0.31817976264323716, |
|
"learning_rate": 3.0416474113668055e-06, |
|
"loss": 0.6062, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.4056327724945135, |
|
"grad_norm": 0.3262356503952236, |
|
"learning_rate": 2.9624134642174706e-06, |
|
"loss": 0.6068, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.4348939283101683, |
|
"grad_norm": 0.3457424267563388, |
|
"learning_rate": 2.8826955075841527e-06, |
|
"loss": 0.6131, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.464155084125823, |
|
"grad_norm": 0.3307935925728294, |
|
"learning_rate": 2.802576982478068e-06, |
|
"loss": 0.6016, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.4934162399414777, |
|
"grad_norm": 0.3311781576210982, |
|
"learning_rate": 2.7221417491865885e-06, |
|
"loss": 0.6096, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.5226773957571325, |
|
"grad_norm": 0.34216041994362073, |
|
"learning_rate": 2.6414739994964416e-06, |
|
"loss": 0.6119, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.5519385515727873, |
|
"grad_norm": 0.33367655978352256, |
|
"learning_rate": 2.56065816856993e-06, |
|
"loss": 0.6105, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.5811997073884418, |
|
"grad_norm": 0.33131489612390447, |
|
"learning_rate": 2.4797788465664047e-06, |
|
"loss": 0.6171, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.6104608632040964, |
|
"grad_norm": 0.3386564765273768, |
|
"learning_rate": 2.398920690101506e-06, |
|
"loss": 0.6079, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.6397220190197512, |
|
"grad_norm": 0.3422865962147514, |
|
"learning_rate": 2.318168333636849e-06, |
|
"loss": 0.6097, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.668983174835406, |
|
"grad_norm": 0.33015186307929917, |
|
"learning_rate": 2.2376063008928832e-06, |
|
"loss": 0.6002, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.6982443306510606, |
|
"grad_norm": 0.330356253324613, |
|
"learning_rate": 2.1573189163776857e-06, |
|
"loss": 0.6098, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.7275054864667154, |
|
"grad_norm": 0.32143316840124014, |
|
"learning_rate": 2.0773902171242485e-06, |
|
"loss": 0.6095, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.7567666422823702, |
|
"grad_norm": 0.33148914076976616, |
|
"learning_rate": 1.997903864728688e-06, |
|
"loss": 0.6066, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.7860277980980248, |
|
"grad_norm": 0.3480703652832865, |
|
"learning_rate": 1.9189430577814063e-06, |
|
"loss": 0.609, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.8152889539136796, |
|
"grad_norm": 0.31334239803694514, |
|
"learning_rate": 1.8405904447828982e-06, |
|
"loss": 0.6093, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.8445501097293344, |
|
"grad_norm": 0.31827545220106257, |
|
"learning_rate": 1.762928037635325e-06, |
|
"loss": 0.6086, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.873811265544989, |
|
"grad_norm": 0.3155798256896876, |
|
"learning_rate": 1.6860371258004293e-06, |
|
"loss": 0.6033, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.9030724213606436, |
|
"grad_norm": 0.3390645406779253, |
|
"learning_rate": 1.6099981912136216e-06, |
|
"loss": 0.609, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.9323335771762986, |
|
"grad_norm": 0.30392526393365127, |
|
"learning_rate": 1.5348908240433125e-06, |
|
"loss": 0.6096, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.9615947329919532, |
|
"grad_norm": 0.3261239230385701, |
|
"learning_rate": 1.4607936393836542e-06, |
|
"loss": 0.5984, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.9908558888076078, |
|
"grad_norm": 0.3115182329109519, |
|
"learning_rate": 1.387784194967904e-06, |
|
"loss": 0.6129, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.9967081199707388, |
|
"eval_loss": 0.6351735591888428, |
|
"eval_runtime": 347.6382, |
|
"eval_samples_per_second": 26.49, |
|
"eval_steps_per_second": 0.414, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 2.0212143379663496, |
|
"grad_norm": 0.32036924437042374, |
|
"learning_rate": 1.3159389099885194e-06, |
|
"loss": 0.633, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.050475493782004, |
|
"grad_norm": 0.324600013295968, |
|
"learning_rate": 1.2453329851089787e-06, |
|
"loss": 0.5844, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.0797366495976592, |
|
"grad_norm": 0.3107112923712249, |
|
"learning_rate": 1.1760403237510255e-06, |
|
"loss": 0.5866, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.108997805413314, |
|
"grad_norm": 0.3270752834403749, |
|
"learning_rate": 1.108133454739757e-06, |
|
"loss": 0.5876, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.1382589612289684, |
|
"grad_norm": 0.31301220771612853, |
|
"learning_rate": 1.0416834563874873e-06, |
|
"loss": 0.5892, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.1675201170446234, |
|
"grad_norm": 0.33747585616696024, |
|
"learning_rate": 9.767598820958859e-07, |
|
"loss": 0.5939, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.196781272860278, |
|
"grad_norm": 0.3224610267552276, |
|
"learning_rate": 9.134306875542273e-07, |
|
"loss": 0.5927, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.2260424286759326, |
|
"grad_norm": 0.3274463198805806, |
|
"learning_rate": 8.517621596099837e-07, |
|
"loss": 0.5898, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.255303584491587, |
|
"grad_norm": 0.3096086967508753, |
|
"learning_rate": 7.918188468861892e-07, |
|
"loss": 0.588, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.284564740307242, |
|
"grad_norm": 0.3134498246737174, |
|
"learning_rate": 7.336634922182184e-07, |
|
"loss": 0.5875, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.313825896122897, |
|
"grad_norm": 0.3003387704727999, |
|
"learning_rate": 6.773569669806765e-07, |
|
"loss": 0.5854, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.3430870519385514, |
|
"grad_norm": 0.31046829316377283, |
|
"learning_rate": 6.22958207373166e-07, |
|
"loss": 0.5931, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.3723482077542064, |
|
"grad_norm": 0.3058504002323456, |
|
"learning_rate": 5.705241527315924e-07, |
|
"loss": 0.5881, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.401609363569861, |
|
"grad_norm": 0.3148001544813671, |
|
"learning_rate": 5.201096859296056e-07, |
|
"loss": 0.5943, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.4308705193855156, |
|
"grad_norm": 0.29314718822976743, |
|
"learning_rate": 4.717675759325363e-07, |
|
"loss": 0.5866, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.4601316752011706, |
|
"grad_norm": 0.30348786991224425, |
|
"learning_rate": 4.255484225639739e-07, |
|
"loss": 0.5869, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.489392831016825, |
|
"grad_norm": 0.31048470531749484, |
|
"learning_rate": 3.8150060354278373e-07, |
|
"loss": 0.5886, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.5186539868324798, |
|
"grad_norm": 0.31385687702363135, |
|
"learning_rate": 3.396702238460167e-07, |
|
"loss": 0.5818, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.547915142648135, |
|
"grad_norm": 0.3077001922479862, |
|
"learning_rate": 3.001010674506932e-07, |
|
"loss": 0.5842, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.5771762984637894, |
|
"grad_norm": 0.2926705998755526, |
|
"learning_rate": 2.628345515049954e-07, |
|
"loss": 0.5882, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.606437454279444, |
|
"grad_norm": 0.2995165809811577, |
|
"learning_rate": 2.2790968297681792e-07, |
|
"loss": 0.5875, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.6356986100950985, |
|
"grad_norm": 0.30466800666878485, |
|
"learning_rate": 1.9536301782506373e-07, |
|
"loss": 0.5921, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.6649597659107536, |
|
"grad_norm": 0.3053293282467882, |
|
"learning_rate": 1.652286227364211e-07, |
|
"loss": 0.5924, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.694220921726408, |
|
"grad_norm": 0.3137494141095099, |
|
"learning_rate": 1.375380394676623e-07, |
|
"loss": 0.5864, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.723482077542063, |
|
"grad_norm": 0.31676507013297805, |
|
"learning_rate": 1.1232025183080141e-07, |
|
"loss": 0.5898, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.7527432333577178, |
|
"grad_norm": 0.30895835779314046, |
|
"learning_rate": 8.960165535565168e-08, |
|
"loss": 0.5888, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.7820043891733723, |
|
"grad_norm": 0.2949131063367962, |
|
"learning_rate": 6.940602966155341e-08, |
|
"loss": 0.5875, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.811265544989027, |
|
"grad_norm": 0.3073000958086021, |
|
"learning_rate": 5.175451356717737e-08, |
|
"loss": 0.5914, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.840526700804682, |
|
"grad_norm": 0.2952136468986014, |
|
"learning_rate": 3.6665582964462986e-08, |
|
"loss": 0.5871, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.8697878566203365, |
|
"grad_norm": 0.30029138721190923, |
|
"learning_rate": 2.41550314798511e-08, |
|
"loss": 0.5869, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.899049012435991, |
|
"grad_norm": 0.29808129794254773, |
|
"learning_rate": 1.4235953943048942e-08, |
|
"loss": 0.5796, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.928310168251646, |
|
"grad_norm": 0.3005338199882765, |
|
"learning_rate": 6.918732680635809e-09, |
|
"loss": 0.5876, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.9575713240673007, |
|
"grad_norm": 0.3005090950716719, |
|
"learning_rate": 2.21102664885231e-09, |
|
"loss": 0.5789, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.9868324798829553, |
|
"grad_norm": 0.30127576183009913, |
|
"learning_rate": 1.177634169488573e-10, |
|
"loss": 0.591, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.9956108266276518, |
|
"eval_loss": 0.635776937007904, |
|
"eval_runtime": 344.6936, |
|
"eval_samples_per_second": 26.716, |
|
"eval_steps_per_second": 0.418, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.9956108266276518, |
|
"step": 1023, |
|
"total_flos": 2144987064041472.0, |
|
"train_loss": 0.6243289487685859, |
|
"train_runtime": 55081.5376, |
|
"train_samples_per_second": 9.529, |
|
"train_steps_per_second": 0.019 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1023, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2144987064041472.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|