|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 11.0, |
|
"eval_steps": 500, |
|
"global_step": 55154, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0997207818109294, |
|
"grad_norm": 2.346997022628784, |
|
"learning_rate": 4.5023932987634625e-05, |
|
"loss": 8.2424, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.1994415636218588, |
|
"grad_norm": 2.3684158325195312, |
|
"learning_rate": 4.0037893897088155e-05, |
|
"loss": 7.6851, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.2991623454327882, |
|
"grad_norm": 3.409303665161133, |
|
"learning_rate": 3.5051854806541686e-05, |
|
"loss": 7.4872, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.3988831272437176, |
|
"grad_norm": 2.615360975265503, |
|
"learning_rate": 3.0065815715995216e-05, |
|
"loss": 7.344, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.49860390905464697, |
|
"grad_norm": 3.5242176055908203, |
|
"learning_rate": 2.5079776625448743e-05, |
|
"loss": 7.2749, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.5983246908655764, |
|
"grad_norm": 3.690262794494629, |
|
"learning_rate": 2.0093737534902273e-05, |
|
"loss": 7.1657, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.6980454726765057, |
|
"grad_norm": 2.940692663192749, |
|
"learning_rate": 1.5107698444355806e-05, |
|
"loss": 7.1298, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.7977662544874352, |
|
"grad_norm": 2.9132378101348877, |
|
"learning_rate": 1.0121659353809334e-05, |
|
"loss": 7.0938, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.8974870362983646, |
|
"grad_norm": 3.101921558380127, |
|
"learning_rate": 5.135620263262864e-06, |
|
"loss": 7.0715, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.9972078181092939, |
|
"grad_norm": 3.2258358001708984, |
|
"learning_rate": 1.495811727163941e-07, |
|
"loss": 7.0478, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.0969285999202234, |
|
"grad_norm": 3.2722208499908447, |
|
"learning_rate": 3.903270841643399e-05, |
|
"loss": 7.0374, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.1966493817311528, |
|
"grad_norm": 5.218217849731445, |
|
"learning_rate": 3.803550059832469e-05, |
|
"loss": 7.0289, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.2963701635420821, |
|
"grad_norm": 3.466571807861328, |
|
"learning_rate": 3.70382927802154e-05, |
|
"loss": 6.9595, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.3960909453530115, |
|
"grad_norm": 3.688443183898926, |
|
"learning_rate": 3.6041084962106106e-05, |
|
"loss": 6.9267, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.4958117271639408, |
|
"grad_norm": 3.0426700115203857, |
|
"learning_rate": 3.504387714399681e-05, |
|
"loss": 6.8954, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.5955325089748702, |
|
"grad_norm": 3.7769949436187744, |
|
"learning_rate": 3.404666932588751e-05, |
|
"loss": 6.8657, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.6952532907857998, |
|
"grad_norm": 3.0776305198669434, |
|
"learning_rate": 3.304946150777822e-05, |
|
"loss": 6.8285, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.7949740725967291, |
|
"grad_norm": 3.350515604019165, |
|
"learning_rate": 3.2052253689668926e-05, |
|
"loss": 6.7948, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.8946948544076585, |
|
"grad_norm": 3.393035411834717, |
|
"learning_rate": 3.1055045871559636e-05, |
|
"loss": 6.7725, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.994415636218588, |
|
"grad_norm": 3.438401222229004, |
|
"learning_rate": 3.0057838053450336e-05, |
|
"loss": 6.7484, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.0941364180295174, |
|
"grad_norm": 4.042023181915283, |
|
"learning_rate": 2.9060630235341047e-05, |
|
"loss": 6.6939, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 2.193857199840447, |
|
"grad_norm": 3.3481028079986572, |
|
"learning_rate": 2.8063422417231757e-05, |
|
"loss": 6.6854, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 2.293577981651376, |
|
"grad_norm": 3.266961097717285, |
|
"learning_rate": 2.706820901475868e-05, |
|
"loss": 6.6555, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 2.3932987634623055, |
|
"grad_norm": 3.215405225753784, |
|
"learning_rate": 2.607100119664938e-05, |
|
"loss": 6.6713, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 2.493019545273235, |
|
"grad_norm": 3.380500316619873, |
|
"learning_rate": 2.507379337854009e-05, |
|
"loss": 6.6581, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 2.5927403270841642, |
|
"grad_norm": 3.536166191101074, |
|
"learning_rate": 2.4076585560430796e-05, |
|
"loss": 6.5945, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 2.6924611088950936, |
|
"grad_norm": 3.9319474697113037, |
|
"learning_rate": 2.30793777423215e-05, |
|
"loss": 6.6057, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 2.792181890706023, |
|
"grad_norm": 4.334239482879639, |
|
"learning_rate": 2.2084164339848425e-05, |
|
"loss": 6.5818, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 2.8919026725169523, |
|
"grad_norm": 4.093286514282227, |
|
"learning_rate": 2.1086956521739132e-05, |
|
"loss": 6.5732, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 2.9916234543278817, |
|
"grad_norm": 4.026576995849609, |
|
"learning_rate": 2.008974870362984e-05, |
|
"loss": 6.5627, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 3.0913442361388115, |
|
"grad_norm": 3.7285637855529785, |
|
"learning_rate": 1.9092540885520542e-05, |
|
"loss": 6.5268, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 3.191065017949741, |
|
"grad_norm": 3.7349226474761963, |
|
"learning_rate": 1.809533306741125e-05, |
|
"loss": 6.5388, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 3.29078579976067, |
|
"grad_norm": 3.5330066680908203, |
|
"learning_rate": 1.7098125249301956e-05, |
|
"loss": 6.5141, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 3.3905065815715996, |
|
"grad_norm": 3.6961631774902344, |
|
"learning_rate": 1.6100917431192662e-05, |
|
"loss": 6.5013, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 3.490227363382529, |
|
"grad_norm": 3.413053274154663, |
|
"learning_rate": 1.5103709613083367e-05, |
|
"loss": 6.4932, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 3.5899481451934583, |
|
"grad_norm": 4.584457874298096, |
|
"learning_rate": 1.4108496210610292e-05, |
|
"loss": 6.4695, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 3.6896689270043876, |
|
"grad_norm": 3.3078787326812744, |
|
"learning_rate": 1.3111288392500998e-05, |
|
"loss": 6.4711, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 3.789389708815317, |
|
"grad_norm": 3.6679279804229736, |
|
"learning_rate": 1.2114080574391703e-05, |
|
"loss": 6.466, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 3.8891104906262464, |
|
"grad_norm": 4.358784198760986, |
|
"learning_rate": 1.1116872756282408e-05, |
|
"loss": 6.4568, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 3.988831272437176, |
|
"grad_norm": 4.014244556427002, |
|
"learning_rate": 1.0119664938173115e-05, |
|
"loss": 6.4536, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 4.0885520542481055, |
|
"grad_norm": 3.8396079540252686, |
|
"learning_rate": 9.122457120063822e-06, |
|
"loss": 6.443, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 4.188272836059035, |
|
"grad_norm": 3.850647449493408, |
|
"learning_rate": 8.125249301954529e-06, |
|
"loss": 6.4186, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 4.287993617869964, |
|
"grad_norm": 3.829951047897339, |
|
"learning_rate": 7.128041483845234e-06, |
|
"loss": 6.4178, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 4.387714399680894, |
|
"grad_norm": 3.5512278079986572, |
|
"learning_rate": 6.132828081372159e-06, |
|
"loss": 6.4055, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 4.487435181491823, |
|
"grad_norm": 3.568665027618408, |
|
"learning_rate": 5.135620263262864e-06, |
|
"loss": 6.4076, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 4.587155963302752, |
|
"grad_norm": 3.71463942527771, |
|
"learning_rate": 4.13841244515357e-06, |
|
"loss": 6.4086, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 4.686876745113682, |
|
"grad_norm": 3.9615983963012695, |
|
"learning_rate": 3.1412046270442757e-06, |
|
"loss": 6.4061, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 4.786597526924611, |
|
"grad_norm": 4.0287909507751465, |
|
"learning_rate": 2.1459912245712007e-06, |
|
"loss": 6.3772, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 4.88631830873554, |
|
"grad_norm": 4.012565612792969, |
|
"learning_rate": 1.1487834064619066e-06, |
|
"loss": 6.3956, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 4.98603909054647, |
|
"grad_norm": 4.36814022064209, |
|
"learning_rate": 1.515755883526127e-07, |
|
"loss": 6.3996, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 25070, |
|
"total_flos": 2.639861525017728e+16, |
|
"train_loss": 5.285465436767286, |
|
"train_runtime": 6500.188, |
|
"train_samples_per_second": 61.705, |
|
"train_steps_per_second": 3.857 |
|
}, |
|
{ |
|
"epoch": 5.085759872357399, |
|
"grad_norm": 3.5418105125427246, |
|
"learning_rate": 4.946400079776626e-05, |
|
"loss": 6.5458, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 5.1854806541683285, |
|
"grad_norm": 4.323005676269531, |
|
"learning_rate": 4.884074591144795e-05, |
|
"loss": 6.5604, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 5.285201435979258, |
|
"grad_norm": 4.445618629455566, |
|
"learning_rate": 4.8217491025129644e-05, |
|
"loss": 6.5452, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 5.384922217790187, |
|
"grad_norm": 4.320890426635742, |
|
"learning_rate": 4.759423613881133e-05, |
|
"loss": 6.5239, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 5.484642999601117, |
|
"grad_norm": 3.8980209827423096, |
|
"learning_rate": 4.697098125249302e-05, |
|
"loss": 6.5278, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 5.584363781412046, |
|
"grad_norm": 4.074916362762451, |
|
"learning_rate": 4.6347726366174716e-05, |
|
"loss": 6.5044, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 5.684084563222975, |
|
"grad_norm": 4.465285778045654, |
|
"learning_rate": 4.572447147985641e-05, |
|
"loss": 6.472, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 5.783805345033905, |
|
"grad_norm": 4.351347923278809, |
|
"learning_rate": 4.5101216593538095e-05, |
|
"loss": 6.4504, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 5.883526126844835, |
|
"grad_norm": 4.14565372467041, |
|
"learning_rate": 4.447796170721978e-05, |
|
"loss": 6.4375, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 5.983246908655763, |
|
"grad_norm": 4.669959545135498, |
|
"learning_rate": 4.3854706820901474e-05, |
|
"loss": 6.4393, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 6.082967690466694, |
|
"grad_norm": 4.345717430114746, |
|
"learning_rate": 4.323145193458317e-05, |
|
"loss": 6.3808, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 6.182688472277623, |
|
"grad_norm": 4.040054798126221, |
|
"learning_rate": 4.260819704826486e-05, |
|
"loss": 6.3705, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 6.282409254088552, |
|
"grad_norm": 4.663171291351318, |
|
"learning_rate": 4.198618867171919e-05, |
|
"loss": 6.3803, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 6.382130035899482, |
|
"grad_norm": 4.45890474319458, |
|
"learning_rate": 4.136293378540088e-05, |
|
"loss": 6.3256, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 6.481850817710411, |
|
"grad_norm": 4.158110618591309, |
|
"learning_rate": 4.073967889908257e-05, |
|
"loss": 6.3351, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 6.58157159952134, |
|
"grad_norm": 4.460795879364014, |
|
"learning_rate": 4.0116424012764265e-05, |
|
"loss": 6.3137, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 6.68129238133227, |
|
"grad_norm": 4.767895221710205, |
|
"learning_rate": 3.949316912644596e-05, |
|
"loss": 6.2751, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 6.781013163143199, |
|
"grad_norm": 4.399994850158691, |
|
"learning_rate": 3.887116074990028e-05, |
|
"loss": 6.2345, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 6.8807339449541285, |
|
"grad_norm": 4.522914886474609, |
|
"learning_rate": 3.8247905863581976e-05, |
|
"loss": 6.218, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 6.980454726765058, |
|
"grad_norm": 4.697731018066406, |
|
"learning_rate": 3.762465097726366e-05, |
|
"loss": 6.1819, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 7.080175508575987, |
|
"grad_norm": 5.113608360290527, |
|
"learning_rate": 3.7001396090945355e-05, |
|
"loss": 6.1566, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 7.179896290386917, |
|
"grad_norm": 4.987142086029053, |
|
"learning_rate": 3.637814120462705e-05, |
|
"loss": 6.1504, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 7.279617072197846, |
|
"grad_norm": 4.797494888305664, |
|
"learning_rate": 3.5756132828081373e-05, |
|
"loss": 6.0915, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 7.379337854008775, |
|
"grad_norm": 5.114543437957764, |
|
"learning_rate": 3.5132877941763066e-05, |
|
"loss": 6.0859, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 7.479058635819705, |
|
"grad_norm": 5.5212721824646, |
|
"learning_rate": 3.450962305544476e-05, |
|
"loss": 6.0643, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 7.578779417630634, |
|
"grad_norm": 4.77981424331665, |
|
"learning_rate": 3.3886368169126446e-05, |
|
"loss": 6.038, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 7.678500199441563, |
|
"grad_norm": 5.6912760734558105, |
|
"learning_rate": 3.326311328280814e-05, |
|
"loss": 6.0327, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 7.778220981252493, |
|
"grad_norm": 5.021594524383545, |
|
"learning_rate": 3.2641104906262464e-05, |
|
"loss": 6.0089, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 7.877941763063422, |
|
"grad_norm": 4.9512410163879395, |
|
"learning_rate": 3.201785001994416e-05, |
|
"loss": 5.9914, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 7.9776625448743514, |
|
"grad_norm": 4.6659088134765625, |
|
"learning_rate": 3.139459513362585e-05, |
|
"loss": 5.9688, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 8.07738332668528, |
|
"grad_norm": 5.084179401397705, |
|
"learning_rate": 3.601552017986003e-05, |
|
"loss": 5.9368, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 8.177104108496211, |
|
"grad_norm": 5.475657939910889, |
|
"learning_rate": 3.556224389890126e-05, |
|
"loss": 5.9181, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 8.27682489030714, |
|
"grad_norm": 4.678411960601807, |
|
"learning_rate": 3.510896761794249e-05, |
|
"loss": 5.8795, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 8.37654567211807, |
|
"grad_norm": 5.502169132232666, |
|
"learning_rate": 3.465569133698372e-05, |
|
"loss": 5.8389, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 8.476266453928998, |
|
"grad_norm": 5.32131290435791, |
|
"learning_rate": 3.420241505602495e-05, |
|
"loss": 5.8329, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 8.575987235739928, |
|
"grad_norm": 5.6808552742004395, |
|
"learning_rate": 3.374913877506618e-05, |
|
"loss": 5.8001, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 8.675708017550857, |
|
"grad_norm": 4.988351821899414, |
|
"learning_rate": 3.329586249410741e-05, |
|
"loss": 5.7928, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 8.775428799361787, |
|
"grad_norm": 5.559896469116211, |
|
"learning_rate": 3.284258621314864e-05, |
|
"loss": 5.7488, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 8.875149581172716, |
|
"grad_norm": 6.084516525268555, |
|
"learning_rate": 3.238930993218987e-05, |
|
"loss": 5.7262, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 8.974870362983646, |
|
"grad_norm": 6.219081401824951, |
|
"learning_rate": 3.19360336512311e-05, |
|
"loss": 5.6925, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 9.074591144794574, |
|
"grad_norm": 6.170139789581299, |
|
"learning_rate": 3.1482757370272333e-05, |
|
"loss": 5.6491, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 9.174311926605505, |
|
"grad_norm": 5.830073356628418, |
|
"learning_rate": 3.102948108931356e-05, |
|
"loss": 5.6228, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 9.274032708416435, |
|
"grad_norm": 5.452333927154541, |
|
"learning_rate": 3.0577111360916706e-05, |
|
"loss": 5.5724, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 9.373753490227363, |
|
"grad_norm": 5.113864421844482, |
|
"learning_rate": 3.0123835079957935e-05, |
|
"loss": 5.5437, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 9.473474272038294, |
|
"grad_norm": 5.875530242919922, |
|
"learning_rate": 2.9670558798999164e-05, |
|
"loss": 5.525, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 9.573195053849222, |
|
"grad_norm": 5.342255592346191, |
|
"learning_rate": 2.9217282518040397e-05, |
|
"loss": 5.5145, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 9.672915835660152, |
|
"grad_norm": 6.1103644371032715, |
|
"learning_rate": 2.8764006237081626e-05, |
|
"loss": 5.4687, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 9.77263661747108, |
|
"grad_norm": 6.640170097351074, |
|
"learning_rate": 2.8310729956122855e-05, |
|
"loss": 5.4448, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 9.872357399282011, |
|
"grad_norm": 6.135842323303223, |
|
"learning_rate": 2.7858360227726005e-05, |
|
"loss": 5.4075, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 9.97207818109294, |
|
"grad_norm": 6.063602924346924, |
|
"learning_rate": 2.7405083946767234e-05, |
|
"loss": 5.374, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 10.07179896290387, |
|
"grad_norm": 6.689053535461426, |
|
"learning_rate": 2.6951807665808463e-05, |
|
"loss": 5.3459, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 10.171519744714798, |
|
"grad_norm": 6.488341331481934, |
|
"learning_rate": 2.6498531384849696e-05, |
|
"loss": 5.3185, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 10.271240526525728, |
|
"grad_norm": 6.589330673217773, |
|
"learning_rate": 2.6045255103890925e-05, |
|
"loss": 5.3019, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 10.370961308336657, |
|
"grad_norm": 6.61977481842041, |
|
"learning_rate": 2.5592885375494075e-05, |
|
"loss": 5.2792, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 10.470682090147587, |
|
"grad_norm": 6.396610736846924, |
|
"learning_rate": 2.5139609094535304e-05, |
|
"loss": 5.2347, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 10.570402871958516, |
|
"grad_norm": 7.000791549682617, |
|
"learning_rate": 2.4686332813576534e-05, |
|
"loss": 5.2252, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 10.670123653769446, |
|
"grad_norm": 6.714987277984619, |
|
"learning_rate": 2.4233056532617763e-05, |
|
"loss": 5.1965, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 10.769844435580374, |
|
"grad_norm": 7.012180805206299, |
|
"learning_rate": 2.3779780251658992e-05, |
|
"loss": 5.1769, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 10.869565217391305, |
|
"grad_norm": 6.85835599899292, |
|
"learning_rate": 2.332650397070022e-05, |
|
"loss": 5.1442, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 10.969285999202233, |
|
"grad_norm": 6.789878845214844, |
|
"learning_rate": 2.2873227689741453e-05, |
|
"loss": 5.1071, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"step": 55154, |
|
"total_flos": 5.807695355039002e+16, |
|
"train_loss": 1.5156397336923944, |
|
"train_runtime": 4860.501, |
|
"train_samples_per_second": 181.547, |
|
"train_steps_per_second": 11.347 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 55154, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 11, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.807695355039002e+16, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|