|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 3146, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01589319771137953, |
|
"grad_norm": 0.47528788447380066, |
|
"learning_rate": 1.1904761904761903e-05, |
|
"loss": 1.4053, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03178639542275906, |
|
"grad_norm": 0.3256166875362396, |
|
"learning_rate": 2.3809523809523807e-05, |
|
"loss": 1.1769, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04767959313413859, |
|
"grad_norm": 0.40220576524734497, |
|
"learning_rate": 3.571428571428571e-05, |
|
"loss": 1.1523, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06357279084551812, |
|
"grad_norm": 0.37274038791656494, |
|
"learning_rate": 4.7619047619047614e-05, |
|
"loss": 0.909, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07946598855689765, |
|
"grad_norm": 0.3363211452960968, |
|
"learning_rate": 5.952380952380952e-05, |
|
"loss": 0.7534, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.09535918626827718, |
|
"grad_norm": 0.28489017486572266, |
|
"learning_rate": 7.142857142857142e-05, |
|
"loss": 0.7111, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11125238397965671, |
|
"grad_norm": 0.25532016158103943, |
|
"learning_rate": 8.333333333333333e-05, |
|
"loss": 0.8574, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.12714558169103624, |
|
"grad_norm": 0.20530685782432556, |
|
"learning_rate": 9.523809523809523e-05, |
|
"loss": 0.7174, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14303877940241577, |
|
"grad_norm": 0.294625848531723, |
|
"learning_rate": 0.00010714285714285714, |
|
"loss": 0.7769, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1589319771137953, |
|
"grad_norm": 0.3870828151702881, |
|
"learning_rate": 0.00011904761904761903, |
|
"loss": 0.7005, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.17482517482517482, |
|
"grad_norm": 0.26453226804733276, |
|
"learning_rate": 0.00013095238095238093, |
|
"loss": 0.7645, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.19071837253655435, |
|
"grad_norm": 0.31448787450790405, |
|
"learning_rate": 0.00014285714285714284, |
|
"loss": 0.7961, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2066115702479339, |
|
"grad_norm": 0.25471773743629456, |
|
"learning_rate": 0.00014999538207561303, |
|
"loss": 0.8038, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.22250476795931343, |
|
"grad_norm": 0.2693980634212494, |
|
"learning_rate": 0.0001499434369568889, |
|
"loss": 0.7773, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.23839796567069294, |
|
"grad_norm": 0.21915049850940704, |
|
"learning_rate": 0.0001498338144248584, |
|
"loss": 0.7611, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.25429116338207247, |
|
"grad_norm": 0.24487756192684174, |
|
"learning_rate": 0.00014966659884640333, |
|
"loss": 0.7344, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.270184361093452, |
|
"grad_norm": 0.23428066074848175, |
|
"learning_rate": 0.00014944191891273962, |
|
"loss": 0.7511, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.28607755880483154, |
|
"grad_norm": 0.15835721790790558, |
|
"learning_rate": 0.00014915994754037505, |
|
"loss": 0.6929, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3019707565162111, |
|
"grad_norm": 0.2219650149345398, |
|
"learning_rate": 0.00014882090173803036, |
|
"loss": 0.7666, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.3178639542275906, |
|
"grad_norm": 0.24038617312908173, |
|
"learning_rate": 0.0001484250424396267, |
|
"loss": 0.8315, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3337571519389701, |
|
"grad_norm": 0.1785655915737152, |
|
"learning_rate": 0.00014797267430346768, |
|
"loss": 0.8097, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.34965034965034963, |
|
"grad_norm": 0.2057177722454071, |
|
"learning_rate": 0.0001474641454777703, |
|
"loss": 0.8053, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.36554354736172917, |
|
"grad_norm": 0.15342770516872406, |
|
"learning_rate": 0.00014689984733272585, |
|
"loss": 0.7279, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3814367450731087, |
|
"grad_norm": 0.26547038555145264, |
|
"learning_rate": 0.00014628021415929646, |
|
"loss": 0.7449, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.39732994278448824, |
|
"grad_norm": 0.21101844310760498, |
|
"learning_rate": 0.00014560572283497925, |
|
"loss": 0.793, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.4132231404958678, |
|
"grad_norm": 0.3606005311012268, |
|
"learning_rate": 0.00014487689245679545, |
|
"loss": 0.7311, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.4291163382072473, |
|
"grad_norm": 0.20686589181423187, |
|
"learning_rate": 0.00014409428394178674, |
|
"loss": 0.7273, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.44500953591862685, |
|
"grad_norm": 0.25700056552886963, |
|
"learning_rate": 0.00014325849959532654, |
|
"loss": 0.7998, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.46090273363000633, |
|
"grad_norm": 0.19483058154582977, |
|
"learning_rate": 0.00014237018264757795, |
|
"loss": 0.7463, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.47679593134138587, |
|
"grad_norm": 0.18222449719905853, |
|
"learning_rate": 0.0001414300167584561, |
|
"loss": 0.7365, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.4926891290527654, |
|
"grad_norm": 0.1947900503873825, |
|
"learning_rate": 0.00014043872549147454, |
|
"loss": 0.747, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.5085823267641449, |
|
"grad_norm": 0.1856011003255844, |
|
"learning_rate": 0.00013939707175688178, |
|
"loss": 0.7638, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5244755244755245, |
|
"grad_norm": 0.20004665851593018, |
|
"learning_rate": 0.00013830585722451587, |
|
"loss": 0.7243, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.540368722186904, |
|
"grad_norm": 0.1734880954027176, |
|
"learning_rate": 0.0001371659217068294, |
|
"loss": 0.7505, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5562619198982836, |
|
"grad_norm": 0.24975039064884186, |
|
"learning_rate": 0.00013597814251255891, |
|
"loss": 0.7699, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.5721551176096631, |
|
"grad_norm": 0.24918633699417114, |
|
"learning_rate": 0.0001347434337715374, |
|
"loss": 0.771, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5880483153210426, |
|
"grad_norm": 0.15272338688373566, |
|
"learning_rate": 0.00013346274573116824, |
|
"loss": 0.7405, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.6039415130324222, |
|
"grad_norm": 0.19538848102092743, |
|
"learning_rate": 0.0001321370640251029, |
|
"loss": 0.7981, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.6198347107438017, |
|
"grad_norm": 0.21340744197368622, |
|
"learning_rate": 0.0001307674089146848, |
|
"loss": 0.7714, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.6357279084551812, |
|
"grad_norm": 0.2356826215982437, |
|
"learning_rate": 0.00012935483450374346, |
|
"loss": 0.7867, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6516211061665607, |
|
"grad_norm": 0.2050427347421646, |
|
"learning_rate": 0.0001279004279273428, |
|
"loss": 0.7375, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6675143038779402, |
|
"grad_norm": 0.28300225734710693, |
|
"learning_rate": 0.00012640530851510827, |
|
"loss": 0.7864, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6834075015893197, |
|
"grad_norm": 0.2704091966152191, |
|
"learning_rate": 0.00012487062692977685, |
|
"loss": 0.7685, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.6993006993006993, |
|
"grad_norm": 0.18991053104400635, |
|
"learning_rate": 0.00012329756428163224, |
|
"loss": 0.7825, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7151938970120788, |
|
"grad_norm": 0.20676083862781525, |
|
"learning_rate": 0.00012168733121950773, |
|
"loss": 0.6943, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.7310870947234583, |
|
"grad_norm": 0.22199320793151855, |
|
"learning_rate": 0.00012004116699905555, |
|
"loss": 0.7364, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7469802924348379, |
|
"grad_norm": 0.29109102487564087, |
|
"learning_rate": 0.00011836033852900001, |
|
"loss": 0.7073, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.7628734901462174, |
|
"grad_norm": 0.18443694710731506, |
|
"learning_rate": 0.00011664613939610873, |
|
"loss": 0.7531, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.778766687857597, |
|
"grad_norm": 0.1905236691236496, |
|
"learning_rate": 0.00011489988886963181, |
|
"loss": 0.7124, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.7946598855689765, |
|
"grad_norm": 0.17691577970981598, |
|
"learning_rate": 0.00011312293088597576, |
|
"loss": 0.7461, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.810553083280356, |
|
"grad_norm": 0.1734876185655594, |
|
"learning_rate": 0.00011131663301439294, |
|
"loss": 0.7764, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.8264462809917356, |
|
"grad_norm": 0.17549951374530792, |
|
"learning_rate": 0.00010948238540448318, |
|
"loss": 0.7874, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8423394787031151, |
|
"grad_norm": 0.19360962510108948, |
|
"learning_rate": 0.00010762159971631704, |
|
"loss": 0.7463, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.8582326764144946, |
|
"grad_norm": 0.18942518532276154, |
|
"learning_rate": 0.00010573570803400446, |
|
"loss": 0.777, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8741258741258742, |
|
"grad_norm": 0.1779840737581253, |
|
"learning_rate": 0.00010382616176354481, |
|
"loss": 0.6899, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.8900190718372537, |
|
"grad_norm": 0.2987983226776123, |
|
"learning_rate": 0.00010189443051580653, |
|
"loss": 0.6906, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.9059122695486331, |
|
"grad_norm": 0.2795010209083557, |
|
"learning_rate": 9.994200097549602e-05, |
|
"loss": 0.7119, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.9218054672600127, |
|
"grad_norm": 0.21874257922172546, |
|
"learning_rate": 9.797037575698653e-05, |
|
"loss": 0.7284, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.9376986649713922, |
|
"grad_norm": 0.18118910491466522, |
|
"learning_rate": 9.598107224788701e-05, |
|
"loss": 0.6999, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.9535918626827717, |
|
"grad_norm": 0.2674807608127594, |
|
"learning_rate": 9.397562144124163e-05, |
|
"loss": 0.6853, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9694850603941513, |
|
"grad_norm": 0.24154460430145264, |
|
"learning_rate": 9.195556675725821e-05, |
|
"loss": 0.7482, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.9853782581055308, |
|
"grad_norm": 0.2160235196352005, |
|
"learning_rate": 8.992246285547248e-05, |
|
"loss": 0.7444, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.0012714558169105, |
|
"grad_norm": 0.17261384427547455, |
|
"learning_rate": 8.787787443826268e-05, |
|
"loss": 0.822, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.0171646535282899, |
|
"grad_norm": 0.17116381227970123, |
|
"learning_rate": 8.582337504663479e-05, |
|
"loss": 0.6318, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.0330578512396693, |
|
"grad_norm": 0.18086880445480347, |
|
"learning_rate": 8.376054584920558e-05, |
|
"loss": 0.639, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.048951048951049, |
|
"grad_norm": 0.23205679655075073, |
|
"learning_rate": 8.169097442531531e-05, |
|
"loss": 0.7593, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.0648442466624284, |
|
"grad_norm": 0.2291744500398636, |
|
"learning_rate": 7.961625354320664e-05, |
|
"loss": 0.7277, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.080737444373808, |
|
"grad_norm": 0.2415177822113037, |
|
"learning_rate": 7.753797993421022e-05, |
|
"loss": 0.6691, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.0966306420851875, |
|
"grad_norm": 0.22513903677463531, |
|
"learning_rate": 7.545775306388e-05, |
|
"loss": 0.7055, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.112523839796567, |
|
"grad_norm": 0.21355651319026947, |
|
"learning_rate": 7.337717390102455e-05, |
|
"loss": 0.6853, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.1284170375079465, |
|
"grad_norm": 0.25458982586860657, |
|
"learning_rate": 7.129784368558108e-05, |
|
"loss": 0.68, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.1443102352193262, |
|
"grad_norm": 0.31313472986221313, |
|
"learning_rate": 6.922136269628126e-05, |
|
"loss": 0.6753, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.1602034329307056, |
|
"grad_norm": 0.18478913605213165, |
|
"learning_rate": 6.714932901905637e-05, |
|
"loss": 0.685, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.1760966306420853, |
|
"grad_norm": 0.22722911834716797, |
|
"learning_rate": 6.508333731713026e-05, |
|
"loss": 0.74, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.1919898283534647, |
|
"grad_norm": 0.23728103935718536, |
|
"learning_rate": 6.302497760374652e-05, |
|
"loss": 0.7204, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.2078830260648443, |
|
"grad_norm": 0.30603456497192383, |
|
"learning_rate": 6.097583401847412e-05, |
|
"loss": 0.6686, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.2237762237762237, |
|
"grad_norm": 0.27340492606163025, |
|
"learning_rate": 5.893748360803385e-05, |
|
"loss": 0.6608, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.2396694214876034, |
|
"grad_norm": 0.1994735151529312, |
|
"learning_rate": 5.691149511258303e-05, |
|
"loss": 0.6903, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.2555626191989828, |
|
"grad_norm": 0.2764354646205902, |
|
"learning_rate": 5.489942775839355e-05, |
|
"loss": 0.6587, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.2714558169103625, |
|
"grad_norm": 0.2392449527978897, |
|
"learning_rate": 5.290283005785156e-05, |
|
"loss": 0.7018, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.287349014621742, |
|
"grad_norm": 0.20800232887268066, |
|
"learning_rate": 5.0923238617702786e-05, |
|
"loss": 0.6528, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.3032422123331213, |
|
"grad_norm": 0.2834407389163971, |
|
"learning_rate": 4.896217695646081e-05, |
|
"loss": 0.6919, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.319135410044501, |
|
"grad_norm": 0.22680579125881195, |
|
"learning_rate": 4.702115433188782e-05, |
|
"loss": 0.668, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.3350286077558806, |
|
"grad_norm": 0.27213868498802185, |
|
"learning_rate": 4.5101664579450994e-05, |
|
"loss": 0.7536, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.35092180546726, |
|
"grad_norm": 0.2941715121269226, |
|
"learning_rate": 4.3205184962647714e-05, |
|
"loss": 0.8361, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.3668150031786395, |
|
"grad_norm": 0.28851884603500366, |
|
"learning_rate": 4.133317503608517e-05, |
|
"loss": 0.6659, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.3827082008900191, |
|
"grad_norm": 0.2723303735256195, |
|
"learning_rate": 3.948707552218857e-05, |
|
"loss": 0.6205, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.3986013986013985, |
|
"grad_norm": 0.34859615564346313, |
|
"learning_rate": 3.766830720240305e-05, |
|
"loss": 0.7091, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.4144945963127782, |
|
"grad_norm": 0.3292659521102905, |
|
"learning_rate": 3.5878269823742584e-05, |
|
"loss": 0.6266, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 1.4303877940241576, |
|
"grad_norm": 0.18559393286705017, |
|
"learning_rate": 3.411834102152669e-05, |
|
"loss": 0.7348, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.4462809917355373, |
|
"grad_norm": 0.2816512882709503, |
|
"learning_rate": 3.238987525913521e-05, |
|
"loss": 0.649, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.4621741894469167, |
|
"grad_norm": 0.23303620517253876, |
|
"learning_rate": 3.0694202785596056e-05, |
|
"loss": 0.7151, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.4780673871582963, |
|
"grad_norm": 0.2300049215555191, |
|
"learning_rate": 2.9032628611808934e-05, |
|
"loss": 0.7383, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.4939605848696758, |
|
"grad_norm": 0.2333756685256958, |
|
"learning_rate": 2.7406431506192453e-05, |
|
"loss": 0.7278, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.5098537825810552, |
|
"grad_norm": 0.285395085811615, |
|
"learning_rate": 2.5816863010527768e-05, |
|
"loss": 0.694, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.5257469802924348, |
|
"grad_norm": 0.24296920001506805, |
|
"learning_rate": 2.4265146476756423e-05, |
|
"loss": 0.6776, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.5416401780038145, |
|
"grad_norm": 0.27042725682258606, |
|
"learning_rate": 2.2752476125473068e-05, |
|
"loss": 0.717, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.557533375715194, |
|
"grad_norm": 0.27852827310562134, |
|
"learning_rate": 2.1280016126838474e-05, |
|
"loss": 0.7223, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.5734265734265733, |
|
"grad_norm": 0.3818005919456482, |
|
"learning_rate": 1.984889970461933e-05, |
|
"loss": 0.5883, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.589319771137953, |
|
"grad_norm": 0.22705549001693726, |
|
"learning_rate": 1.8460228264045072e-05, |
|
"loss": 0.6397, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.6052129688493326, |
|
"grad_norm": 0.233836829662323, |
|
"learning_rate": 1.711507054415244e-05, |
|
"loss": 0.667, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.621106166560712, |
|
"grad_norm": 0.21357496082782745, |
|
"learning_rate": 1.581446179527049e-05, |
|
"loss": 0.7254, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.6369993642720915, |
|
"grad_norm": 0.2109714299440384, |
|
"learning_rate": 1.4559402982278968e-05, |
|
"loss": 0.6232, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.6528925619834711, |
|
"grad_norm": 0.23249782621860504, |
|
"learning_rate": 1.3350860014253032e-05, |
|
"loss": 0.6821, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.6687857596948508, |
|
"grad_norm": 0.22555020451545715, |
|
"learning_rate": 1.2189763001087526e-05, |
|
"loss": 0.7022, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.6846789574062302, |
|
"grad_norm": 0.2770826518535614, |
|
"learning_rate": 1.1077005537672596e-05, |
|
"loss": 0.6495, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.7005721551176096, |
|
"grad_norm": 0.32234126329421997, |
|
"learning_rate": 1.001344401617192e-05, |
|
"loss": 0.6353, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.716465352828989, |
|
"grad_norm": 0.17805339395999908, |
|
"learning_rate": 8.999896966932396e-06, |
|
"loss": 0.7182, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.7323585505403687, |
|
"grad_norm": 0.2321351170539856, |
|
"learning_rate": 8.037144428532986e-06, |
|
"loss": 0.7443, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 1.7482517482517483, |
|
"grad_norm": 0.24086669087409973, |
|
"learning_rate": 7.1259273474571225e-06, |
|
"loss": 0.7209, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.7641449459631278, |
|
"grad_norm": 0.2397790104150772, |
|
"learning_rate": 6.266947007850945e-06, |
|
"loss": 0.7198, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.7800381436745072, |
|
"grad_norm": 0.27960214018821716, |
|
"learning_rate": 5.460864491806263e-06, |
|
"loss": 0.6878, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.7959313413858868, |
|
"grad_norm": 0.2695086598396301, |
|
"learning_rate": 4.7083001705833735e-06, |
|
"loss": 0.7547, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 1.8118245390972665, |
|
"grad_norm": 0.3778378963470459, |
|
"learning_rate": 4.009833227165573e-06, |
|
"loss": 0.5804, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.827717736808646, |
|
"grad_norm": 0.28731873631477356, |
|
"learning_rate": 3.3660012105126336e-06, |
|
"loss": 0.738, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 1.8436109345200253, |
|
"grad_norm": 0.2649593651294708, |
|
"learning_rate": 2.777299621856283e-06, |
|
"loss": 0.6167, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.859504132231405, |
|
"grad_norm": 0.28797072172164917, |
|
"learning_rate": 2.244181533356318e-06, |
|
"loss": 0.7371, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 1.8753973299427846, |
|
"grad_norm": 0.3841162323951721, |
|
"learning_rate": 1.7670572394104715e-06, |
|
"loss": 0.674, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.891290527654164, |
|
"grad_norm": 0.2641618847846985, |
|
"learning_rate": 1.346293940886739e-06, |
|
"loss": 0.6912, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 1.9071837253655435, |
|
"grad_norm": 0.25159719586372375, |
|
"learning_rate": 9.822154625209217e-07, |
|
"loss": 0.7177, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.9230769230769231, |
|
"grad_norm": 0.2949710488319397, |
|
"learning_rate": 6.751020036970728e-07, |
|
"loss": 0.7384, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 1.9389701207883026, |
|
"grad_norm": 0.29616832733154297, |
|
"learning_rate": 4.2518992280245465e-07, |
|
"loss": 0.6868, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.9548633184996822, |
|
"grad_norm": 0.2574808895587921, |
|
"learning_rate": 2.3267155532321335e-07, |
|
"loss": 0.7261, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 1.9707565162110616, |
|
"grad_norm": 0.18125727772712708, |
|
"learning_rate": 9.769506582052722e-08, |
|
"loss": 0.7009, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.986649713922441, |
|
"grad_norm": 0.29592961072921753, |
|
"learning_rate": 2.036433390130432e-08, |
|
"loss": 0.6884, |
|
"step": 3125 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 3146, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 0, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.1800418501722112e+17, |
|
"train_batch_size": 3, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|