|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9990645463049579, |
|
"eval_steps": 500, |
|
"global_step": 534, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0018709073900841909, |
|
"grad_norm": 0.5139586586877397, |
|
"learning_rate": 1.8518518518518518e-07, |
|
"loss": 1.4813, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009354536950420954, |
|
"grad_norm": 0.5084917331083768, |
|
"learning_rate": 9.259259259259259e-07, |
|
"loss": 1.4869, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.018709073900841908, |
|
"grad_norm": 0.5086882763520935, |
|
"learning_rate": 1.8518518518518519e-06, |
|
"loss": 1.4912, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02806361085126286, |
|
"grad_norm": 0.49639126734512623, |
|
"learning_rate": 2.7777777777777783e-06, |
|
"loss": 1.4813, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.037418147801683815, |
|
"grad_norm": 0.5064168113795362, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 1.4849, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04677268475210477, |
|
"grad_norm": 0.474054321279664, |
|
"learning_rate": 4.62962962962963e-06, |
|
"loss": 1.4776, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05612722170252572, |
|
"grad_norm": 0.40667394555209146, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 1.4756, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06548175865294668, |
|
"grad_norm": 0.2821916256658075, |
|
"learning_rate": 6.481481481481482e-06, |
|
"loss": 1.4585, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07483629560336763, |
|
"grad_norm": 0.2369804283848295, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 1.4529, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08419083255378859, |
|
"grad_norm": 0.23752287185591942, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 1.4356, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.09354536950420954, |
|
"grad_norm": 0.233703618586824, |
|
"learning_rate": 9.25925925925926e-06, |
|
"loss": 1.4195, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1028999064546305, |
|
"grad_norm": 0.21334448181657395, |
|
"learning_rate": 9.999892908320647e-06, |
|
"loss": 1.4009, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.11225444340505145, |
|
"grad_norm": 0.21146314339445296, |
|
"learning_rate": 9.996145181203616e-06, |
|
"loss": 1.3904, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1216089803554724, |
|
"grad_norm": 0.20882987047024354, |
|
"learning_rate": 9.98704745668676e-06, |
|
"loss": 1.3718, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.13096351730589337, |
|
"grad_norm": 0.21207151944716085, |
|
"learning_rate": 9.972609476841368e-06, |
|
"loss": 1.3591, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1403180542563143, |
|
"grad_norm": 0.22054679673319932, |
|
"learning_rate": 9.952846702217886e-06, |
|
"loss": 1.342, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.14967259120673526, |
|
"grad_norm": 0.27057760894999794, |
|
"learning_rate": 9.92778029529039e-06, |
|
"loss": 1.3213, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.15902712815715622, |
|
"grad_norm": 0.29764481463424675, |
|
"learning_rate": 9.897437097795257e-06, |
|
"loss": 1.2966, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.16838166510757718, |
|
"grad_norm": 0.3056628343357791, |
|
"learning_rate": 9.861849601988384e-06, |
|
"loss": 1.2748, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.17773620205799812, |
|
"grad_norm": 0.3099000095239784, |
|
"learning_rate": 9.821055915851647e-06, |
|
"loss": 1.2542, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.18709073900841908, |
|
"grad_norm": 0.2668477842603494, |
|
"learning_rate": 9.775099722285934e-06, |
|
"loss": 1.2217, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19644527595884004, |
|
"grad_norm": 0.2595557434588695, |
|
"learning_rate": 9.72403023233439e-06, |
|
"loss": 1.2122, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.205799812909261, |
|
"grad_norm": 0.2099058809011832, |
|
"learning_rate": 9.667902132486009e-06, |
|
"loss": 1.1948, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21515434985968196, |
|
"grad_norm": 0.20126316643616676, |
|
"learning_rate": 9.606775526115963e-06, |
|
"loss": 1.1774, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2245088868101029, |
|
"grad_norm": 0.17345905255518454, |
|
"learning_rate": 9.540715869125407e-06, |
|
"loss": 1.1681, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23386342376052385, |
|
"grad_norm": 0.25572107838360714, |
|
"learning_rate": 9.469793899849663e-06, |
|
"loss": 1.1716, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2432179607109448, |
|
"grad_norm": 0.15018874182675532, |
|
"learning_rate": 9.394085563309827e-06, |
|
"loss": 1.1569, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.25257249766136575, |
|
"grad_norm": 0.15353642479045185, |
|
"learning_rate": 9.31367192988896e-06, |
|
"loss": 1.148, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.26192703461178674, |
|
"grad_norm": 0.16296485857684445, |
|
"learning_rate": 9.228639108519867e-06, |
|
"loss": 1.1504, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.27128157156220767, |
|
"grad_norm": 0.14482935533275312, |
|
"learning_rate": 9.139078154477512e-06, |
|
"loss": 1.1423, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2806361085126286, |
|
"grad_norm": 0.1566349158870611, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 1.137, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2899906454630496, |
|
"grad_norm": 0.14554164847096335, |
|
"learning_rate": 8.94676021096575e-06, |
|
"loss": 1.1363, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2993451824134705, |
|
"grad_norm": 0.1508011037252777, |
|
"learning_rate": 8.844209160367298e-06, |
|
"loss": 1.1304, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3086997193638915, |
|
"grad_norm": 0.14045125665102134, |
|
"learning_rate": 8.737541634312985e-06, |
|
"loss": 1.1332, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.31805425631431244, |
|
"grad_norm": 0.14175186310915225, |
|
"learning_rate": 8.626871855061438e-06, |
|
"loss": 1.1341, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3274087932647334, |
|
"grad_norm": 0.14507855935792988, |
|
"learning_rate": 8.51231833058426e-06, |
|
"loss": 1.1302, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.33676333021515437, |
|
"grad_norm": 0.1444068343697475, |
|
"learning_rate": 8.39400372766471e-06, |
|
"loss": 1.1333, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3461178671655753, |
|
"grad_norm": 0.14780446060701052, |
|
"learning_rate": 8.272054740543053e-06, |
|
"loss": 1.1324, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.35547240411599623, |
|
"grad_norm": 0.14569418896265066, |
|
"learning_rate": 8.146601955249187e-06, |
|
"loss": 1.1173, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3648269410664172, |
|
"grad_norm": 0.14203531449915593, |
|
"learning_rate": 8.017779709767857e-06, |
|
"loss": 1.115, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.37418147801683815, |
|
"grad_norm": 0.13692219577509893, |
|
"learning_rate": 7.88572595018617e-06, |
|
"loss": 1.1135, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.38353601496725914, |
|
"grad_norm": 0.14078263930837534, |
|
"learning_rate": 7.750582082977468e-06, |
|
"loss": 1.1159, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3928905519176801, |
|
"grad_norm": 0.15520280646196877, |
|
"learning_rate": 7.612492823579744e-06, |
|
"loss": 1.1162, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.402245088868101, |
|
"grad_norm": 0.14752002744879342, |
|
"learning_rate": 7.471606041430724e-06, |
|
"loss": 1.1131, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.411599625818522, |
|
"grad_norm": 0.1509280644564414, |
|
"learning_rate": 7.328072601625558e-06, |
|
"loss": 1.1118, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.42095416276894293, |
|
"grad_norm": 0.1616348908118278, |
|
"learning_rate": 7.18204620336671e-06, |
|
"loss": 1.1044, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.4303086997193639, |
|
"grad_norm": 0.16062290714439983, |
|
"learning_rate": 7.033683215379002e-06, |
|
"loss": 1.1049, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.43966323666978485, |
|
"grad_norm": 0.14900564691058804, |
|
"learning_rate": 6.883142508466054e-06, |
|
"loss": 1.1072, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4490177736202058, |
|
"grad_norm": 0.14583649841999052, |
|
"learning_rate": 6.730585285387465e-06, |
|
"loss": 1.1062, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4583723105706268, |
|
"grad_norm": 0.15656333466816236, |
|
"learning_rate": 6.57617490823885e-06, |
|
"loss": 1.1038, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.4677268475210477, |
|
"grad_norm": 0.16642188631242563, |
|
"learning_rate": 6.420076723519615e-06, |
|
"loss": 1.112, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.47708138447146864, |
|
"grad_norm": 0.266837548538648, |
|
"learning_rate": 6.26245788507579e-06, |
|
"loss": 1.1104, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.4864359214218896, |
|
"grad_norm": 0.15891960204556085, |
|
"learning_rate": 6.103487175107508e-06, |
|
"loss": 1.1013, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.49579045837231056, |
|
"grad_norm": 0.15199420768584646, |
|
"learning_rate": 5.943334823432777e-06, |
|
"loss": 1.0965, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5051449953227315, |
|
"grad_norm": 0.1649301263523591, |
|
"learning_rate": 5.782172325201155e-06, |
|
"loss": 1.1015, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5144995322731525, |
|
"grad_norm": 0.17068364714449247, |
|
"learning_rate": 5.620172257252427e-06, |
|
"loss": 1.1048, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5238540692235735, |
|
"grad_norm": 0.1560964872722369, |
|
"learning_rate": 5.457508093317013e-06, |
|
"loss": 1.1048, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5332086061739943, |
|
"grad_norm": 0.15004408686462048, |
|
"learning_rate": 5.294354018255945e-06, |
|
"loss": 1.1001, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5425631431244153, |
|
"grad_norm": 0.1563742845945151, |
|
"learning_rate": 5.130884741539367e-06, |
|
"loss": 1.0959, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5519176800748363, |
|
"grad_norm": 0.15549310467355393, |
|
"learning_rate": 4.967275310163241e-06, |
|
"loss": 1.097, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5612722170252572, |
|
"grad_norm": 0.17033773254724513, |
|
"learning_rate": 4.803700921204659e-06, |
|
"loss": 1.0991, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5706267539756782, |
|
"grad_norm": 0.16793233585056505, |
|
"learning_rate": 4.640336734216403e-06, |
|
"loss": 1.1011, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5799812909260992, |
|
"grad_norm": 0.15858153553414958, |
|
"learning_rate": 4.477357683661734e-06, |
|
"loss": 1.0953, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5893358278765201, |
|
"grad_norm": 0.18459161802082374, |
|
"learning_rate": 4.314938291590161e-06, |
|
"loss": 1.0905, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.598690364826941, |
|
"grad_norm": 0.16567092468891204, |
|
"learning_rate": 4.1532524807548776e-06, |
|
"loss": 1.0906, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.608044901777362, |
|
"grad_norm": 0.16035972271379115, |
|
"learning_rate": 3.992473388371914e-06, |
|
"loss": 1.0924, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.617399438727783, |
|
"grad_norm": 0.18108290101376773, |
|
"learning_rate": 3.832773180720475e-06, |
|
"loss": 1.092, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6267539756782039, |
|
"grad_norm": 0.16657617125834437, |
|
"learning_rate": 3.6743228687829596e-06, |
|
"loss": 1.0887, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6361085126286249, |
|
"grad_norm": 0.1783833220121851, |
|
"learning_rate": 3.517292125122146e-06, |
|
"loss": 1.0938, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6454630495790459, |
|
"grad_norm": 0.15395137633162537, |
|
"learning_rate": 3.3618491021915334e-06, |
|
"loss": 1.0902, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6548175865294668, |
|
"grad_norm": 0.16256299187537024, |
|
"learning_rate": 3.2081602522734987e-06, |
|
"loss": 1.0875, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6641721234798877, |
|
"grad_norm": 0.15781917797031997, |
|
"learning_rate": 3.056390149238022e-06, |
|
"loss": 1.0897, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6735266604303087, |
|
"grad_norm": 0.15450725652248593, |
|
"learning_rate": 2.906701312312861e-06, |
|
"loss": 1.0883, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6828811973807296, |
|
"grad_norm": 0.16092272691343973, |
|
"learning_rate": 2.759254032053888e-06, |
|
"loss": 1.0908, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6922357343311506, |
|
"grad_norm": 0.15881876352311045, |
|
"learning_rate": 2.614206198701958e-06, |
|
"loss": 1.0964, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7015902712815716, |
|
"grad_norm": 0.16123816798728421, |
|
"learning_rate": 2.471713133110078e-06, |
|
"loss": 1.0826, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.7109448082319925, |
|
"grad_norm": 0.17326937796971717, |
|
"learning_rate": 2.3319274204219427e-06, |
|
"loss": 1.0857, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7202993451824135, |
|
"grad_norm": 0.15824306754559836, |
|
"learning_rate": 2.1949987466799524e-06, |
|
"loss": 1.0926, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.7296538821328344, |
|
"grad_norm": 0.15676499958123513, |
|
"learning_rate": 2.061073738537635e-06, |
|
"loss": 1.091, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7390084190832554, |
|
"grad_norm": 0.15899394910820866, |
|
"learning_rate": 1.9302958062481673e-06, |
|
"loss": 1.0845, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7483629560336763, |
|
"grad_norm": 0.16579475502144433, |
|
"learning_rate": 1.8028049900970768e-06, |
|
"loss": 1.0854, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7577174929840973, |
|
"grad_norm": 0.15812690411171398, |
|
"learning_rate": 1.6787378104435931e-06, |
|
"loss": 1.0867, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7670720299345183, |
|
"grad_norm": 0.1562749678781465, |
|
"learning_rate": 1.5582271215312294e-06, |
|
"loss": 1.0869, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7764265668849392, |
|
"grad_norm": 0.1573904723461426, |
|
"learning_rate": 1.4414019692241437e-06, |
|
"loss": 1.0968, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7857811038353602, |
|
"grad_norm": 0.1663255000536237, |
|
"learning_rate": 1.3283874528215735e-06, |
|
"loss": 1.0896, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7951356407857811, |
|
"grad_norm": 0.1565901532094871, |
|
"learning_rate": 1.2193045910983864e-06, |
|
"loss": 1.0892, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.804490177736202, |
|
"grad_norm": 0.15886041285660657, |
|
"learning_rate": 1.1142701927151456e-06, |
|
"loss": 1.086, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.813844714686623, |
|
"grad_norm": 0.1653805801993096, |
|
"learning_rate": 1.013396731136465e-06, |
|
"loss": 1.0846, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.823199251637044, |
|
"grad_norm": 0.1659489588893349, |
|
"learning_rate": 9.167922241916055e-07, |
|
"loss": 1.0882, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8325537885874649, |
|
"grad_norm": 0.14935331942375346, |
|
"learning_rate": 8.245601184062851e-07, |
|
"loss": 1.0919, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.8419083255378859, |
|
"grad_norm": 0.1612238398597313, |
|
"learning_rate": 7.367991782295392e-07, |
|
"loss": 1.0956, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8512628624883068, |
|
"grad_norm": 0.16096449999893725, |
|
"learning_rate": 6.536033802742814e-07, |
|
"loss": 1.0892, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8606173994387278, |
|
"grad_norm": 0.18591172254707147, |
|
"learning_rate": 5.750618126847912e-07, |
|
"loss": 1.0814, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8699719363891487, |
|
"grad_norm": 0.16230740670761631, |
|
"learning_rate": 5.012585797388936e-07, |
|
"loss": 1.0863, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8793264733395697, |
|
"grad_norm": 0.16399177374939766, |
|
"learning_rate": 4.322727117869951e-07, |
|
"loss": 1.0836, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8886810102899907, |
|
"grad_norm": 0.19550575898363698, |
|
"learning_rate": 3.6817808062440953e-07, |
|
"loss": 1.091, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8980355472404116, |
|
"grad_norm": 0.15359012389254986, |
|
"learning_rate": 3.0904332038757977e-07, |
|
"loss": 1.0937, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.9073900841908326, |
|
"grad_norm": 0.1606805442454799, |
|
"learning_rate": 2.5493175405893076e-07, |
|
"loss": 1.0848, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.9167446211412535, |
|
"grad_norm": 0.16036880753390018, |
|
"learning_rate": 2.0590132565903475e-07, |
|
"loss": 1.0904, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9260991580916744, |
|
"grad_norm": 0.16046720313774515, |
|
"learning_rate": 1.6200453819870122e-07, |
|
"loss": 1.0839, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.9354536950420954, |
|
"grad_norm": 0.16013022562616708, |
|
"learning_rate": 1.232883974574367e-07, |
|
"loss": 1.0876, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9448082319925164, |
|
"grad_norm": 0.1680369869804664, |
|
"learning_rate": 8.979436164848088e-08, |
|
"loss": 1.0854, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9541627689429373, |
|
"grad_norm": 0.1538401535295636, |
|
"learning_rate": 6.15582970243117e-08, |
|
"loss": 1.0904, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9635173058933583, |
|
"grad_norm": 0.1590652953665931, |
|
"learning_rate": 3.861043947016474e-08, |
|
"loss": 1.0783, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9728718428437793, |
|
"grad_norm": 0.15600780093245614, |
|
"learning_rate": 2.097536212669171e-08, |
|
"loss": 1.0857, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9822263797942001, |
|
"grad_norm": 0.1590897713521175, |
|
"learning_rate": 8.671949076420883e-09, |
|
"loss": 1.0859, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.9915809167446211, |
|
"grad_norm": 0.15861216238412335, |
|
"learning_rate": 1.7133751222137007e-09, |
|
"loss": 1.0845, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9990645463049579, |
|
"eval_runtime": 3.2564, |
|
"eval_samples_per_second": 3.071, |
|
"eval_steps_per_second": 0.921, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.9990645463049579, |
|
"step": 534, |
|
"total_flos": 2.2276586722453094e+17, |
|
"train_loss": 1.1599709373734863, |
|
"train_runtime": 17757.3527, |
|
"train_samples_per_second": 1.925, |
|
"train_steps_per_second": 0.03 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 534, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.2276586722453094e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|