| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.9993436884707942, |
| "eval_steps": 500, |
| "global_step": 4570, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0, |
| "eval_loss": 2.3424127101898193, |
| "eval_runtime": 1.1989, |
| "eval_samples_per_second": 16.682, |
| "eval_steps_per_second": 4.171, |
| "step": 0 |
| }, |
| { |
| "epoch": 0.0004375410194705754, |
| "grad_norm": 11.591988563537598, |
| "learning_rate": 3.6231884057971015e-07, |
| "loss": 1.7217, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.004375410194705754, |
| "grad_norm": 7.750391006469727, |
| "learning_rate": 3.6231884057971017e-06, |
| "loss": 1.6447, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.008750820389411508, |
| "grad_norm": 4.005843162536621, |
| "learning_rate": 7.246376811594203e-06, |
| "loss": 1.5732, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.01312623058411726, |
| "grad_norm": 2.8733792304992676, |
| "learning_rate": 1.0869565217391305e-05, |
| "loss": 1.4077, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.017501640778823015, |
| "grad_norm": 2.6543965339660645, |
| "learning_rate": 1.4492753623188407e-05, |
| "loss": 1.3353, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.021877050973528767, |
| "grad_norm": 2.3828840255737305, |
| "learning_rate": 1.8115942028985507e-05, |
| "loss": 1.2485, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.02625246116823452, |
| "grad_norm": 2.1744871139526367, |
| "learning_rate": 2.173913043478261e-05, |
| "loss": 1.2685, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.030627871362940276, |
| "grad_norm": 2.292287588119507, |
| "learning_rate": 2.5362318840579714e-05, |
| "loss": 1.1884, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.03500328155764603, |
| "grad_norm": 2.3060696125030518, |
| "learning_rate": 2.8985507246376814e-05, |
| "loss": 1.2874, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.039378691752351785, |
| "grad_norm": 2.0335893630981445, |
| "learning_rate": 3.260869565217392e-05, |
| "loss": 1.2227, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.04375410194705753, |
| "grad_norm": 2.4152441024780273, |
| "learning_rate": 3.6231884057971014e-05, |
| "loss": 1.192, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.04812951214176329, |
| "grad_norm": 2.216953992843628, |
| "learning_rate": 3.985507246376812e-05, |
| "loss": 1.1883, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.05250492233646904, |
| "grad_norm": 2.3724114894866943, |
| "learning_rate": 4.347826086956522e-05, |
| "loss": 1.1811, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.0568803325311748, |
| "grad_norm": 2.238722801208496, |
| "learning_rate": 4.710144927536232e-05, |
| "loss": 1.168, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.06125574272588055, |
| "grad_norm": 2.2097883224487305, |
| "learning_rate": 4.999997487707664e-05, |
| "loss": 1.1848, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.0656311529205863, |
| "grad_norm": 2.2063302993774414, |
| "learning_rate": 4.999909558006067e-05, |
| "loss": 1.2155, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.07000656311529206, |
| "grad_norm": 1.8397595882415771, |
| "learning_rate": 4.9996960187368994e-05, |
| "loss": 1.1507, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.07438197330999781, |
| "grad_norm": 1.8552066087722778, |
| "learning_rate": 4.9993568806295786e-05, |
| "loss": 1.2109, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.07875738350470357, |
| "grad_norm": 2.173321485519409, |
| "learning_rate": 4.998892160724318e-05, |
| "loss": 1.1784, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.08313279369940932, |
| "grad_norm": 2.0949854850769043, |
| "learning_rate": 4.998301882371268e-05, |
| "loss": 1.1967, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.08750820389411507, |
| "grad_norm": 1.8619498014450073, |
| "learning_rate": 4.997586075229346e-05, |
| "loss": 1.1761, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.09188361408882083, |
| "grad_norm": 1.9095852375030518, |
| "learning_rate": 4.996744775264743e-05, |
| "loss": 1.15, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.09625902428352658, |
| "grad_norm": 2.0714683532714844, |
| "learning_rate": 4.995778024749118e-05, |
| "loss": 1.1477, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.10063443447823234, |
| "grad_norm": 1.9555009603500366, |
| "learning_rate": 4.9946858722574743e-05, |
| "loss": 1.1602, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.10500984467293809, |
| "grad_norm": 1.9120110273361206, |
| "learning_rate": 4.993468372665718e-05, |
| "loss": 1.1516, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.10938525486764385, |
| "grad_norm": 1.8210362195968628, |
| "learning_rate": 4.9921255871479e-05, |
| "loss": 1.1404, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.1137606650623496, |
| "grad_norm": 1.8263871669769287, |
| "learning_rate": 4.990657583173146e-05, |
| "loss": 1.1629, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.11813607525705534, |
| "grad_norm": 1.906760811805725, |
| "learning_rate": 4.989064434502262e-05, |
| "loss": 1.1364, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.1225114854517611, |
| "grad_norm": 1.7693920135498047, |
| "learning_rate": 4.9873462211840296e-05, |
| "loss": 1.1396, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.12688689564646685, |
| "grad_norm": 1.8662511110305786, |
| "learning_rate": 4.985503029551184e-05, |
| "loss": 1.1859, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.1312623058411726, |
| "grad_norm": 1.7560139894485474, |
| "learning_rate": 4.983534952216079e-05, |
| "loss": 1.1419, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.13563771603587838, |
| "grad_norm": 1.8666057586669922, |
| "learning_rate": 4.981442088066029e-05, |
| "loss": 1.1686, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.14001312623058412, |
| "grad_norm": 1.7406998872756958, |
| "learning_rate": 4.9792245422583406e-05, |
| "loss": 1.1275, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.14438853642528987, |
| "grad_norm": 1.717785120010376, |
| "learning_rate": 4.9768824262150344e-05, |
| "loss": 1.1236, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.14876394661999562, |
| "grad_norm": 1.7829722166061401, |
| "learning_rate": 4.9744158576172406e-05, |
| "loss": 1.1297, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.15313935681470137, |
| "grad_norm": 1.8614799976348877, |
| "learning_rate": 4.971824960399289e-05, |
| "loss": 1.2018, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.15751476700940714, |
| "grad_norm": 1.9301059246063232, |
| "learning_rate": 4.9691098647424806e-05, |
| "loss": 1.1491, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.1618901772041129, |
| "grad_norm": 1.7714897394180298, |
| "learning_rate": 4.9662707070685476e-05, |
| "loss": 1.1393, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.16626558739881864, |
| "grad_norm": 1.7732449769973755, |
| "learning_rate": 4.963307630032797e-05, |
| "loss": 1.1029, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.17064099759352439, |
| "grad_norm": 1.6674808263778687, |
| "learning_rate": 4.960220782516946e-05, |
| "loss": 1.1385, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.17501640778823013, |
| "grad_norm": 1.8136391639709473, |
| "learning_rate": 4.957010319621638e-05, |
| "loss": 1.0898, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.1793918179829359, |
| "grad_norm": 1.7509042024612427, |
| "learning_rate": 4.95367640265865e-05, |
| "loss": 1.1914, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.18376722817764166, |
| "grad_norm": 1.7148741483688354, |
| "learning_rate": 4.950219199142791e-05, |
| "loss": 1.1228, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.1881426383723474, |
| "grad_norm": 1.8227111101150513, |
| "learning_rate": 4.9466388827834795e-05, |
| "loss": 1.1556, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.19251804856705315, |
| "grad_norm": 1.6873571872711182, |
| "learning_rate": 4.9429356334760205e-05, |
| "loss": 1.1551, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.19689345876175893, |
| "grad_norm": 1.6948832273483276, |
| "learning_rate": 4.9391096372925626e-05, |
| "loss": 1.1085, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.20126886895646467, |
| "grad_norm": 1.7749884128570557, |
| "learning_rate": 4.9351610864727504e-05, |
| "loss": 1.1421, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.20564427915117042, |
| "grad_norm": 1.607061743736267, |
| "learning_rate": 4.9310901794140665e-05, |
| "loss": 1.155, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.21001968934587617, |
| "grad_norm": 1.6496872901916504, |
| "learning_rate": 4.92689712066186e-05, |
| "loss": 1.1552, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.21439509954058192, |
| "grad_norm": 1.5627405643463135, |
| "learning_rate": 4.922582120899072e-05, |
| "loss": 1.1419, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.2187705097352877, |
| "grad_norm": 1.7042529582977295, |
| "learning_rate": 4.918145396935648e-05, |
| "loss": 1.1652, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.22314591992999344, |
| "grad_norm": 1.5909007787704468, |
| "learning_rate": 4.913587171697643e-05, |
| "loss": 1.1294, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.2275213301246992, |
| "grad_norm": 1.7357898950576782, |
| "learning_rate": 4.908907674216023e-05, |
| "loss": 1.1119, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.23189674031940494, |
| "grad_norm": 1.6394789218902588, |
| "learning_rate": 4.9041071396151585e-05, |
| "loss": 1.1626, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.23627215051411068, |
| "grad_norm": 1.5801527500152588, |
| "learning_rate": 4.899185809101003e-05, |
| "loss": 1.1211, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.24064756070881646, |
| "grad_norm": 1.8420217037200928, |
| "learning_rate": 4.894143929948982e-05, |
| "loss": 1.1103, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.2450229709035222, |
| "grad_norm": 1.671028971672058, |
| "learning_rate": 4.8889817554915645e-05, |
| "loss": 1.1461, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.24939838109822796, |
| "grad_norm": 1.6634349822998047, |
| "learning_rate": 4.8836995451055344e-05, |
| "loss": 1.1562, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.2537737912929337, |
| "grad_norm": 1.5025454759597778, |
| "learning_rate": 4.878297564198959e-05, |
| "loss": 1.0946, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.2581492014876395, |
| "grad_norm": 1.7138441801071167, |
| "learning_rate": 4.872776084197851e-05, |
| "loss": 1.1093, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.2625246116823452, |
| "grad_norm": 1.668101191520691, |
| "learning_rate": 4.867135382532533e-05, |
| "loss": 1.153, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.266900021877051, |
| "grad_norm": 1.706669569015503, |
| "learning_rate": 4.861375742623697e-05, |
| "loss": 1.1281, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.27127543207175675, |
| "grad_norm": 2.0017876625061035, |
| "learning_rate": 4.855497453868164e-05, |
| "loss": 1.1056, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.27565084226646247, |
| "grad_norm": 1.611232042312622, |
| "learning_rate": 4.849500811624343e-05, |
| "loss": 1.1271, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.28002625246116825, |
| "grad_norm": 1.463722586631775, |
| "learning_rate": 4.8433861171973896e-05, |
| "loss": 1.1402, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.28440166265587397, |
| "grad_norm": 1.5447578430175781, |
| "learning_rate": 4.837153677824067e-05, |
| "loss": 1.1493, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.28877707285057974, |
| "grad_norm": 1.496686339378357, |
| "learning_rate": 4.830803806657311e-05, |
| "loss": 1.1333, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.2931524830452855, |
| "grad_norm": 1.6186118125915527, |
| "learning_rate": 4.8243368227504924e-05, |
| "loss": 1.1545, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.29752789323999124, |
| "grad_norm": 1.5591745376586914, |
| "learning_rate": 4.817753051041387e-05, |
| "loss": 1.1189, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.301903303434697, |
| "grad_norm": 1.8602070808410645, |
| "learning_rate": 4.811052822335849e-05, |
| "loss": 1.1486, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.30627871362940273, |
| "grad_norm": 1.5043679475784302, |
| "learning_rate": 4.80423647329119e-05, |
| "loss": 1.0961, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.3106541238241085, |
| "grad_norm": 1.6095219850540161, |
| "learning_rate": 4.797304346399263e-05, |
| "loss": 1.141, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.3150295340188143, |
| "grad_norm": 1.628132939338684, |
| "learning_rate": 4.790256789969252e-05, |
| "loss": 1.1784, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.31940494421352, |
| "grad_norm": 1.5121166706085205, |
| "learning_rate": 4.783094158110174e-05, |
| "loss": 1.1193, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.3237803544082258, |
| "grad_norm": 1.5826560258865356, |
| "learning_rate": 4.775816810713086e-05, |
| "loss": 1.1206, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.3281557646029315, |
| "grad_norm": 1.5174522399902344, |
| "learning_rate": 4.768425113433e-05, |
| "loss": 1.1566, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.3325311747976373, |
| "grad_norm": 1.6296701431274414, |
| "learning_rate": 4.7609194376705105e-05, |
| "loss": 1.1388, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.33690658499234305, |
| "grad_norm": 1.798865556716919, |
| "learning_rate": 4.753300160553136e-05, |
| "loss": 1.1889, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.34128199518704877, |
| "grad_norm": 1.6122382879257202, |
| "learning_rate": 4.745567664916368e-05, |
| "loss": 1.125, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.34565740538175455, |
| "grad_norm": 1.6329957246780396, |
| "learning_rate": 4.737722339284436e-05, |
| "loss": 1.121, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.35003281557646027, |
| "grad_norm": 1.697094440460205, |
| "learning_rate": 4.7297645778507835e-05, |
| "loss": 1.0644, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.35440822577116604, |
| "grad_norm": 1.6578021049499512, |
| "learning_rate": 4.721694780458266e-05, |
| "loss": 1.1579, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.3587836359658718, |
| "grad_norm": 1.478832483291626, |
| "learning_rate": 4.7135133525790544e-05, |
| "loss": 1.1512, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.36315904616057754, |
| "grad_norm": 1.513938546180725, |
| "learning_rate": 4.7052207052942685e-05, |
| "loss": 1.0791, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.3675344563552833, |
| "grad_norm": 1.5836193561553955, |
| "learning_rate": 4.6968172552733185e-05, |
| "loss": 1.1259, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.3719098665499891, |
| "grad_norm": 1.617882490158081, |
| "learning_rate": 4.688303424752969e-05, |
| "loss": 1.1001, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.3762852767446948, |
| "grad_norm": 1.4709640741348267, |
| "learning_rate": 4.679679641516122e-05, |
| "loss": 1.1085, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.3806606869394006, |
| "grad_norm": 1.4871046543121338, |
| "learning_rate": 4.670946338870329e-05, |
| "loss": 1.0863, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.3850360971341063, |
| "grad_norm": 1.5609277486801147, |
| "learning_rate": 4.6621039556260095e-05, |
| "loss": 1.1255, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.3894115073288121, |
| "grad_norm": 1.513523817062378, |
| "learning_rate": 4.653152936074413e-05, |
| "loss": 1.1101, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.39378691752351785, |
| "grad_norm": 1.6045000553131104, |
| "learning_rate": 4.644093729965287e-05, |
| "loss": 1.1041, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.3981623277182236, |
| "grad_norm": 1.5579705238342285, |
| "learning_rate": 4.634926792484284e-05, |
| "loss": 1.1592, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.40253773791292935, |
| "grad_norm": 1.4306694269180298, |
| "learning_rate": 4.625652584230089e-05, |
| "loss": 1.1465, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.40691314810763507, |
| "grad_norm": 1.3425192832946777, |
| "learning_rate": 4.616271571191273e-05, |
| "loss": 1.1555, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.41128855830234085, |
| "grad_norm": 1.5686333179473877, |
| "learning_rate": 4.606784224722887e-05, |
| "loss": 1.1394, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.4156639684970466, |
| "grad_norm": 1.4096781015396118, |
| "learning_rate": 4.5971910215227684e-05, |
| "loss": 1.0751, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.42003937869175234, |
| "grad_norm": 1.3961308002471924, |
| "learning_rate": 4.5874924436075996e-05, |
| "loss": 1.1281, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.4244147888864581, |
| "grad_norm": 1.4717061519622803, |
| "learning_rate": 4.577688978288681e-05, |
| "loss": 1.1441, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.42879019908116384, |
| "grad_norm": 1.4169813394546509, |
| "learning_rate": 4.567781118147448e-05, |
| "loss": 1.1381, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.4331656092758696, |
| "grad_norm": 1.4888379573822021, |
| "learning_rate": 4.557769361010721e-05, |
| "loss": 1.0826, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.4375410194705754, |
| "grad_norm": 1.572547435760498, |
| "learning_rate": 4.547654209925693e-05, |
| "loss": 1.1131, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.4419164296652811, |
| "grad_norm": 1.5723587274551392, |
| "learning_rate": 4.5374361731346526e-05, |
| "loss": 1.1088, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.4462918398599869, |
| "grad_norm": 1.5646806955337524, |
| "learning_rate": 4.5271157640494456e-05, |
| "loss": 1.1313, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.4506672500546926, |
| "grad_norm": 1.6168869733810425, |
| "learning_rate": 4.51669350122568e-05, |
| "loss": 1.1452, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.4550426602493984, |
| "grad_norm": 1.565577745437622, |
| "learning_rate": 4.506169908336673e-05, |
| "loss": 1.1473, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.45941807044410415, |
| "grad_norm": 1.6016472578048706, |
| "learning_rate": 4.495545514147134e-05, |
| "loss": 1.1006, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.4637934806388099, |
| "grad_norm": 1.628869891166687, |
| "learning_rate": 4.484820852486598e-05, |
| "loss": 1.1327, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.46816889083351565, |
| "grad_norm": 1.4887852668762207, |
| "learning_rate": 4.473996462222606e-05, |
| "loss": 1.1011, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.47254430102822137, |
| "grad_norm": 1.6507240533828735, |
| "learning_rate": 4.463072887233625e-05, |
| "loss": 1.15, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.47691971122292715, |
| "grad_norm": 1.5163503885269165, |
| "learning_rate": 4.452050676381725e-05, |
| "loss": 1.1598, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.4812951214176329, |
| "grad_norm": 1.6033108234405518, |
| "learning_rate": 4.440930383484995e-05, |
| "loss": 1.118, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.48567053161233864, |
| "grad_norm": 1.4987646341323853, |
| "learning_rate": 4.429712567289722e-05, |
| "loss": 1.1352, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.4900459418070444, |
| "grad_norm": 1.51768159866333, |
| "learning_rate": 4.418397791442314e-05, |
| "loss": 1.1433, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.4944213520017502, |
| "grad_norm": 1.3354873657226562, |
| "learning_rate": 4.406986624460979e-05, |
| "loss": 1.147, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.4987967621964559, |
| "grad_norm": 1.5693355798721313, |
| "learning_rate": 4.395479639707158e-05, |
| "loss": 1.0808, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.5031721723911616, |
| "grad_norm": 1.5598881244659424, |
| "learning_rate": 4.3838774153567195e-05, |
| "loss": 1.1247, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.5075475825858674, |
| "grad_norm": 1.4272862672805786, |
| "learning_rate": 4.372180534370908e-05, |
| "loss": 1.1122, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.5119229927805732, |
| "grad_norm": 1.5112720727920532, |
| "learning_rate": 4.3603895844670495e-05, |
| "loss": 1.102, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.516298402975279, |
| "grad_norm": 1.589198350906372, |
| "learning_rate": 4.3485051580890236e-05, |
| "loss": 1.1435, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.5206738131699847, |
| "grad_norm": 1.4863295555114746, |
| "learning_rate": 4.336527852377497e-05, |
| "loss": 1.1374, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.5250492233646904, |
| "grad_norm": 1.4931763410568237, |
| "learning_rate": 4.324458269139916e-05, |
| "loss": 1.111, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.5294246335593962, |
| "grad_norm": 1.5186004638671875, |
| "learning_rate": 4.3122970148202744e-05, |
| "loss": 1.1213, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.533800043754102, |
| "grad_norm": 1.4999425411224365, |
| "learning_rate": 4.300044700468635e-05, |
| "loss": 1.1388, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.5381754539488077, |
| "grad_norm": 1.5555874109268188, |
| "learning_rate": 4.2877019417104334e-05, |
| "loss": 1.1028, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.5425508641435135, |
| "grad_norm": 1.4953700304031372, |
| "learning_rate": 4.27526935871554e-05, |
| "loss": 1.1342, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.5469262743382192, |
| "grad_norm": 1.4581255912780762, |
| "learning_rate": 4.262747576167106e-05, |
| "loss": 1.0921, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.5513016845329249, |
| "grad_norm": 1.552866816520691, |
| "learning_rate": 4.250137223230165e-05, |
| "loss": 1.1089, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.5556770947276307, |
| "grad_norm": 1.5412286520004272, |
| "learning_rate": 4.2374389335200335e-05, |
| "loss": 1.1261, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.5600525049223365, |
| "grad_norm": 1.64509117603302, |
| "learning_rate": 4.224653345070466e-05, |
| "loss": 1.0668, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.5644279151170423, |
| "grad_norm": 1.4940526485443115, |
| "learning_rate": 4.211781100301596e-05, |
| "loss": 1.1372, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.5688033253117479, |
| "grad_norm": 1.4879716634750366, |
| "learning_rate": 4.198822845987661e-05, |
| "loss": 1.1166, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.5731787355064537, |
| "grad_norm": 1.5581066608428955, |
| "learning_rate": 4.185779233224506e-05, |
| "loss": 1.0663, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.5775541457011595, |
| "grad_norm": 1.5502949953079224, |
| "learning_rate": 4.172650917396863e-05, |
| "loss": 1.165, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.5819295558958653, |
| "grad_norm": 1.4957443475723267, |
| "learning_rate": 4.1594385581454245e-05, |
| "loss": 1.1723, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.586304966090571, |
| "grad_norm": 1.4180186986923218, |
| "learning_rate": 4.146142819333702e-05, |
| "loss": 1.1181, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.5906803762852767, |
| "grad_norm": 1.5630731582641602, |
| "learning_rate": 4.132764369014662e-05, |
| "loss": 1.1731, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.5950557864799825, |
| "grad_norm": 1.5089911222457886, |
| "learning_rate": 4.119303879397169e-05, |
| "loss": 1.1199, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.5994311966746882, |
| "grad_norm": 1.4724007844924927, |
| "learning_rate": 4.1057620268122004e-05, |
| "loss": 1.1108, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.603806606869394, |
| "grad_norm": 1.6644493341445923, |
| "learning_rate": 4.092139491678873e-05, |
| "loss": 1.0948, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.6081820170640998, |
| "grad_norm": 1.598973035812378, |
| "learning_rate": 4.078436958470244e-05, |
| "loss": 1.0948, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.6125574272588055, |
| "grad_norm": 1.4203752279281616, |
| "learning_rate": 4.064655115678933e-05, |
| "loss": 1.0704, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.6169328374535112, |
| "grad_norm": 1.367942214012146, |
| "learning_rate": 4.050794655782515e-05, |
| "loss": 1.0958, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.621308247648217, |
| "grad_norm": 1.553614616394043, |
| "learning_rate": 4.036856275208736e-05, |
| "loss": 1.0756, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.6256836578429228, |
| "grad_norm": 1.4172255992889404, |
| "learning_rate": 4.022840674300512e-05, |
| "loss": 1.1166, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.6300590680376286, |
| "grad_norm": 1.6529884338378906, |
| "learning_rate": 4.0087485572807485e-05, |
| "loss": 1.069, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.6344344782323342, |
| "grad_norm": 1.5031507015228271, |
| "learning_rate": 3.994580632216952e-05, |
| "loss": 1.1368, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.63880988842704, |
| "grad_norm": 1.6318256855010986, |
| "learning_rate": 3.980337610985651e-05, |
| "loss": 1.1184, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.6431852986217458, |
| "grad_norm": 1.5290026664733887, |
| "learning_rate": 3.9660202092366316e-05, |
| "loss": 1.1252, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.6475607088164516, |
| "grad_norm": 1.4681955575942993, |
| "learning_rate": 3.951629146356975e-05, |
| "loss": 1.0866, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.6519361190111573, |
| "grad_norm": 1.4883171319961548, |
| "learning_rate": 3.937165145434914e-05, |
| "loss": 1.1292, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.656311529205863, |
| "grad_norm": 1.5238957405090332, |
| "learning_rate": 3.922628933223502e-05, |
| "loss": 1.1547, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.6606869394005688, |
| "grad_norm": 1.5324879884719849, |
| "learning_rate": 3.9080212401040914e-05, |
| "loss": 1.0668, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.6650623495952745, |
| "grad_norm": 1.4942597150802612, |
| "learning_rate": 3.8933428000496455e-05, |
| "loss": 1.1394, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.6694377597899803, |
| "grad_norm": 1.5268304347991943, |
| "learning_rate": 3.878594350587844e-05, |
| "loss": 1.0647, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.6738131699846861, |
| "grad_norm": 1.5157787799835205, |
| "learning_rate": 3.863776632764043e-05, |
| "loss": 1.1521, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.6781885801793918, |
| "grad_norm": 1.5063765048980713, |
| "learning_rate": 3.848890391104028e-05, |
| "loss": 1.0954, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.6825639903740975, |
| "grad_norm": 1.4812955856323242, |
| "learning_rate": 3.833936373576606e-05, |
| "loss": 1.1054, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.6869394005688033, |
| "grad_norm": 1.4586228132247925, |
| "learning_rate": 3.8189153315560324e-05, |
| "loss": 1.1125, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.6913148107635091, |
| "grad_norm": 1.3584743738174438, |
| "learning_rate": 3.803828019784248e-05, |
| "loss": 1.103, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.6956902209582149, |
| "grad_norm": 1.5196596384048462, |
| "learning_rate": 3.788675196332959e-05, |
| "loss": 1.1642, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.7000656311529205, |
| "grad_norm": 1.5605676174163818, |
| "learning_rate": 3.7734576225655504e-05, |
| "loss": 1.1656, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.7044410413476263, |
| "grad_norm": 1.4787431955337524, |
| "learning_rate": 3.758176063098829e-05, |
| "loss": 1.0855, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.7088164515423321, |
| "grad_norm": 1.4558417797088623, |
| "learning_rate": 3.742831285764602e-05, |
| "loss": 1.0855, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.7131918617370379, |
| "grad_norm": 1.552485466003418, |
| "learning_rate": 3.7274240615710986e-05, |
| "loss": 1.0949, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.7175672719317436, |
| "grad_norm": 1.5539509057998657, |
| "learning_rate": 3.711955164664235e-05, |
| "loss": 1.1285, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.7219426821264494, |
| "grad_norm": 1.4706348180770874, |
| "learning_rate": 3.6964253722887075e-05, |
| "loss": 1.1041, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.7263180923211551, |
| "grad_norm": 1.460062861442566, |
| "learning_rate": 3.680835464748949e-05, |
| "loss": 1.0845, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.7306935025158608, |
| "grad_norm": 1.4356744289398193, |
| "learning_rate": 3.6651862253699144e-05, |
| "loss": 1.0716, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.7350689127105666, |
| "grad_norm": 1.6479544639587402, |
| "learning_rate": 3.649478440457726e-05, |
| "loss": 1.1017, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.7394443229052724, |
| "grad_norm": 1.4607200622558594, |
| "learning_rate": 3.633712899260166e-05, |
| "loss": 1.0853, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.7438197330999782, |
| "grad_norm": 1.5237467288970947, |
| "learning_rate": 3.617890393927015e-05, |
| "loss": 1.1646, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.7481951432946838, |
| "grad_norm": 1.464272379875183, |
| "learning_rate": 3.6020117194702555e-05, |
| "loss": 1.0863, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.7525705534893896, |
| "grad_norm": 1.440658688545227, |
| "learning_rate": 3.586077673724124e-05, |
| "loss": 1.128, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.7569459636840954, |
| "grad_norm": 1.5667482614517212, |
| "learning_rate": 3.570089057305022e-05, |
| "loss": 1.1441, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.7613213738788012, |
| "grad_norm": 1.4171441793441772, |
| "learning_rate": 3.5540466735712906e-05, |
| "loss": 1.1002, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.7656967840735069, |
| "grad_norm": 1.412792444229126, |
| "learning_rate": 3.5379513285828433e-05, |
| "loss": 1.0983, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.7700721942682126, |
| "grad_norm": 1.468995213508606, |
| "learning_rate": 3.521803831060668e-05, |
| "loss": 1.0736, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.7744476044629184, |
| "grad_norm": 1.570688247680664, |
| "learning_rate": 3.505604992346187e-05, |
| "loss": 1.0935, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.7788230146576242, |
| "grad_norm": 1.4552085399627686, |
| "learning_rate": 3.489355626360497e-05, |
| "loss": 1.1388, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.7831984248523299, |
| "grad_norm": 1.6429483890533447, |
| "learning_rate": 3.473056549563469e-05, |
| "loss": 1.1082, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.7875738350470357, |
| "grad_norm": 1.6186859607696533, |
| "learning_rate": 3.456708580912725e-05, |
| "loss": 1.1182, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.7919492452417414, |
| "grad_norm": 1.3440533876419067, |
| "learning_rate": 3.44031254182249e-05, |
| "loss": 1.0769, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.7963246554364471, |
| "grad_norm": 1.3511815071105957, |
| "learning_rate": 3.42386925612232e-05, |
| "loss": 1.0761, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.8007000656311529, |
| "grad_norm": 1.4499377012252808, |
| "learning_rate": 3.407379550015707e-05, |
| "loss": 1.1344, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.8050754758258587, |
| "grad_norm": 1.4759228229522705, |
| "learning_rate": 3.390844252038565e-05, |
| "loss": 1.0874, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.8094508860205645, |
| "grad_norm": 1.4827808141708374, |
| "learning_rate": 3.3742641930176045e-05, |
| "loss": 1.0313, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.8138262962152701, |
| "grad_norm": 1.6069589853286743, |
| "learning_rate": 3.3576402060285786e-05, |
| "loss": 1.1089, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.8182017064099759, |
| "grad_norm": 1.4358799457550049, |
| "learning_rate": 3.3409731263544345e-05, |
| "loss": 1.0682, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.8225771166046817, |
| "grad_norm": 1.4865995645523071, |
| "learning_rate": 3.324263791443333e-05, |
| "loss": 1.0924, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.8269525267993875, |
| "grad_norm": 1.4410614967346191, |
| "learning_rate": 3.307513040866584e-05, |
| "loss": 1.0754, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.8313279369940932, |
| "grad_norm": 1.3776915073394775, |
| "learning_rate": 3.290721716276448e-05, |
| "loss": 1.097, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.8357033471887989, |
| "grad_norm": 1.5188299417495728, |
| "learning_rate": 3.273890661363855e-05, |
| "loss": 1.1616, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.8400787573835047, |
| "grad_norm": 1.5575170516967773, |
| "learning_rate": 3.257020721816013e-05, |
| "loss": 1.1246, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.8444541675782105, |
| "grad_norm": 1.5193071365356445, |
| "learning_rate": 3.240112745273911e-05, |
| "loss": 1.1162, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.8488295777729162, |
| "grad_norm": 1.451002836227417, |
| "learning_rate": 3.223167581289729e-05, |
| "loss": 1.1106, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.853204987967622, |
| "grad_norm": 1.54093599319458, |
| "learning_rate": 3.206186081284161e-05, |
| "loss": 1.071, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.8575803981623277, |
| "grad_norm": 1.5598371028900146, |
| "learning_rate": 3.1891690985036215e-05, |
| "loss": 1.0986, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.8619558083570334, |
| "grad_norm": 1.4229456186294556, |
| "learning_rate": 3.1721174879773826e-05, |
| "loss": 1.0839, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.8663312185517392, |
| "grad_norm": 1.5579663515090942, |
| "learning_rate": 3.1550321064746086e-05, |
| "loss": 1.0854, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.870706628746445, |
| "grad_norm": 1.4583122730255127, |
| "learning_rate": 3.1379138124613105e-05, |
| "loss": 1.0916, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.8750820389411508, |
| "grad_norm": 1.577874779701233, |
| "learning_rate": 3.120763466057206e-05, |
| "loss": 1.061, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.8794574491358564, |
| "grad_norm": 1.5133827924728394, |
| "learning_rate": 3.10358192899251e-05, |
| "loss": 1.1227, |
| "step": 2010 |
| }, |
| { |
| "epoch": 0.8838328593305622, |
| "grad_norm": 1.4519861936569214, |
| "learning_rate": 3.086370064564629e-05, |
| "loss": 1.086, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.888208269525268, |
| "grad_norm": 1.6563777923583984, |
| "learning_rate": 3.069128737594789e-05, |
| "loss": 1.1102, |
| "step": 2030 |
| }, |
| { |
| "epoch": 0.8925836797199738, |
| "grad_norm": 1.3476353883743286, |
| "learning_rate": 3.051858814384581e-05, |
| "loss": 1.0804, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.8969590899146795, |
| "grad_norm": 1.501389503479004, |
| "learning_rate": 3.0345611626724285e-05, |
| "loss": 1.1101, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.9013345001093852, |
| "grad_norm": 1.483244776725769, |
| "learning_rate": 3.0172366515899982e-05, |
| "loss": 1.0964, |
| "step": 2060 |
| }, |
| { |
| "epoch": 0.905709910304091, |
| "grad_norm": 1.4610023498535156, |
| "learning_rate": 2.9998861516185173e-05, |
| "loss": 1.0953, |
| "step": 2070 |
| }, |
| { |
| "epoch": 0.9100853204987968, |
| "grad_norm": 1.4304044246673584, |
| "learning_rate": 2.982510534545043e-05, |
| "loss": 1.0687, |
| "step": 2080 |
| }, |
| { |
| "epoch": 0.9144607306935025, |
| "grad_norm": 1.461287498474121, |
| "learning_rate": 2.9651106734186618e-05, |
| "loss": 1.0875, |
| "step": 2090 |
| }, |
| { |
| "epoch": 0.9188361408882083, |
| "grad_norm": 1.484709620475769, |
| "learning_rate": 2.9476874425066105e-05, |
| "loss": 1.1122, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.923211551082914, |
| "grad_norm": 1.4673666954040527, |
| "learning_rate": 2.9302417172503622e-05, |
| "loss": 1.1506, |
| "step": 2110 |
| }, |
| { |
| "epoch": 0.9275869612776197, |
| "grad_norm": 1.456113576889038, |
| "learning_rate": 2.9127743742216303e-05, |
| "loss": 1.0851, |
| "step": 2120 |
| }, |
| { |
| "epoch": 0.9319623714723255, |
| "grad_norm": 1.524404525756836, |
| "learning_rate": 2.8952862910783267e-05, |
| "loss": 1.1047, |
| "step": 2130 |
| }, |
| { |
| "epoch": 0.9363377816670313, |
| "grad_norm": 1.3824920654296875, |
| "learning_rate": 2.877778346520466e-05, |
| "loss": 1.069, |
| "step": 2140 |
| }, |
| { |
| "epoch": 0.9407131918617371, |
| "grad_norm": 1.3911488056182861, |
| "learning_rate": 2.860251420246009e-05, |
| "loss": 1.1128, |
| "step": 2150 |
| }, |
| { |
| "epoch": 0.9450886020564427, |
| "grad_norm": 1.6120619773864746, |
| "learning_rate": 2.842706392906669e-05, |
| "loss": 1.1468, |
| "step": 2160 |
| }, |
| { |
| "epoch": 0.9494640122511485, |
| "grad_norm": 1.4727811813354492, |
| "learning_rate": 2.825144146063654e-05, |
| "loss": 1.1405, |
| "step": 2170 |
| }, |
| { |
| "epoch": 0.9538394224458543, |
| "grad_norm": 1.4946926832199097, |
| "learning_rate": 2.8075655621433826e-05, |
| "loss": 1.0887, |
| "step": 2180 |
| }, |
| { |
| "epoch": 0.9582148326405601, |
| "grad_norm": 1.575395107269287, |
| "learning_rate": 2.7899715243931375e-05, |
| "loss": 1.077, |
| "step": 2190 |
| }, |
| { |
| "epoch": 0.9625902428352658, |
| "grad_norm": 1.483463168144226, |
| "learning_rate": 2.7723629168366883e-05, |
| "loss": 1.1137, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.9669656530299715, |
| "grad_norm": 1.5409109592437744, |
| "learning_rate": 2.754740624229875e-05, |
| "loss": 1.0749, |
| "step": 2210 |
| }, |
| { |
| "epoch": 0.9713410632246773, |
| "grad_norm": 1.4951428174972534, |
| "learning_rate": 2.737105532016152e-05, |
| "loss": 1.0779, |
| "step": 2220 |
| }, |
| { |
| "epoch": 0.9757164734193831, |
| "grad_norm": 1.6109696626663208, |
| "learning_rate": 2.719458526282099e-05, |
| "loss": 1.1006, |
| "step": 2230 |
| }, |
| { |
| "epoch": 0.9800918836140888, |
| "grad_norm": 1.4150285720825195, |
| "learning_rate": 2.7018004937128937e-05, |
| "loss": 1.0498, |
| "step": 2240 |
| }, |
| { |
| "epoch": 0.9844672938087946, |
| "grad_norm": 1.5306379795074463, |
| "learning_rate": 2.6841323215477714e-05, |
| "loss": 1.0943, |
| "step": 2250 |
| }, |
| { |
| "epoch": 0.9888427040035004, |
| "grad_norm": 1.3424153327941895, |
| "learning_rate": 2.6664548975354332e-05, |
| "loss": 1.079, |
| "step": 2260 |
| }, |
| { |
| "epoch": 0.993218114198206, |
| "grad_norm": 1.4553660154342651, |
| "learning_rate": 2.6487691098894447e-05, |
| "loss": 1.0663, |
| "step": 2270 |
| }, |
| { |
| "epoch": 0.9975935243929118, |
| "grad_norm": 1.5150882005691528, |
| "learning_rate": 2.6310758472436103e-05, |
| "loss": 1.0449, |
| "step": 2280 |
| }, |
| { |
| "epoch": 1.0017501640778823, |
| "grad_norm": 1.4436382055282593, |
| "learning_rate": 2.6133759986073197e-05, |
| "loss": 1.1167, |
| "step": 2290 |
| }, |
| { |
| "epoch": 1.006125574272588, |
| "grad_norm": 1.6366405487060547, |
| "learning_rate": 2.595670453320879e-05, |
| "loss": 1.0539, |
| "step": 2300 |
| }, |
| { |
| "epoch": 1.0105009844672939, |
| "grad_norm": 1.5061817169189453, |
| "learning_rate": 2.5779601010108273e-05, |
| "loss": 1.0694, |
| "step": 2310 |
| }, |
| { |
| "epoch": 1.0148763946619996, |
| "grad_norm": 1.5583983659744263, |
| "learning_rate": 2.5602458315452355e-05, |
| "loss": 1.0598, |
| "step": 2320 |
| }, |
| { |
| "epoch": 1.0192518048567054, |
| "grad_norm": 1.4948369264602661, |
| "learning_rate": 2.5425285349889938e-05, |
| "loss": 1.0323, |
| "step": 2330 |
| }, |
| { |
| "epoch": 1.023627215051411, |
| "grad_norm": 1.4352792501449585, |
| "learning_rate": 2.5248091015590907e-05, |
| "loss": 1.0418, |
| "step": 2340 |
| }, |
| { |
| "epoch": 1.0280026252461167, |
| "grad_norm": 1.5330169200897217, |
| "learning_rate": 2.507088421579884e-05, |
| "loss": 1.0375, |
| "step": 2350 |
| }, |
| { |
| "epoch": 1.0323780354408225, |
| "grad_norm": 1.4603830575942993, |
| "learning_rate": 2.489367385438365e-05, |
| "loss": 1.1019, |
| "step": 2360 |
| }, |
| { |
| "epoch": 1.0367534456355283, |
| "grad_norm": 1.3922033309936523, |
| "learning_rate": 2.4716468835394195e-05, |
| "loss": 1.0636, |
| "step": 2370 |
| }, |
| { |
| "epoch": 1.041128855830234, |
| "grad_norm": 1.389041543006897, |
| "learning_rate": 2.4539278062610928e-05, |
| "loss": 1.0634, |
| "step": 2380 |
| }, |
| { |
| "epoch": 1.0455042660249398, |
| "grad_norm": 1.577828049659729, |
| "learning_rate": 2.4362110439098443e-05, |
| "loss": 1.0551, |
| "step": 2390 |
| }, |
| { |
| "epoch": 1.0498796762196456, |
| "grad_norm": 1.4485417604446411, |
| "learning_rate": 2.4184974866758232e-05, |
| "loss": 1.0447, |
| "step": 2400 |
| }, |
| { |
| "epoch": 1.0542550864143514, |
| "grad_norm": 1.3846266269683838, |
| "learning_rate": 2.400788024588134e-05, |
| "loss": 1.0129, |
| "step": 2410 |
| }, |
| { |
| "epoch": 1.0586304966090572, |
| "grad_norm": 1.612506628036499, |
| "learning_rate": 2.3830835474701158e-05, |
| "loss": 1.0622, |
| "step": 2420 |
| }, |
| { |
| "epoch": 1.063005906803763, |
| "grad_norm": 1.3812724351882935, |
| "learning_rate": 2.365384944894638e-05, |
| "loss": 1.024, |
| "step": 2430 |
| }, |
| { |
| "epoch": 1.0673813169984685, |
| "grad_norm": 1.5689557790756226, |
| "learning_rate": 2.3476931061393968e-05, |
| "loss": 1.0753, |
| "step": 2440 |
| }, |
| { |
| "epoch": 1.0717567271931743, |
| "grad_norm": 1.5560890436172485, |
| "learning_rate": 2.33000892014224e-05, |
| "loss": 1.0436, |
| "step": 2450 |
| }, |
| { |
| "epoch": 1.07613213738788, |
| "grad_norm": 1.625240445137024, |
| "learning_rate": 2.312333275456492e-05, |
| "loss": 1.0584, |
| "step": 2460 |
| }, |
| { |
| "epoch": 1.0805075475825858, |
| "grad_norm": 1.5473369359970093, |
| "learning_rate": 2.2946670602063175e-05, |
| "loss": 1.0547, |
| "step": 2470 |
| }, |
| { |
| "epoch": 1.0848829577772916, |
| "grad_norm": 1.529844045639038, |
| "learning_rate": 2.2770111620420935e-05, |
| "loss": 1.0644, |
| "step": 2480 |
| }, |
| { |
| "epoch": 1.0892583679719974, |
| "grad_norm": 1.512666940689087, |
| "learning_rate": 2.2593664680958045e-05, |
| "loss": 1.0301, |
| "step": 2490 |
| }, |
| { |
| "epoch": 1.0936337781667032, |
| "grad_norm": 1.5139986276626587, |
| "learning_rate": 2.2417338649364765e-05, |
| "loss": 1.0677, |
| "step": 2500 |
| }, |
| { |
| "epoch": 1.098009188361409, |
| "grad_norm": 1.4566220045089722, |
| "learning_rate": 2.2241142385256237e-05, |
| "loss": 1.0807, |
| "step": 2510 |
| }, |
| { |
| "epoch": 1.1023845985561147, |
| "grad_norm": 1.644047737121582, |
| "learning_rate": 2.2065084741727337e-05, |
| "loss": 1.0405, |
| "step": 2520 |
| }, |
| { |
| "epoch": 1.1067600087508205, |
| "grad_norm": 1.5637203454971313, |
| "learning_rate": 2.1889174564907898e-05, |
| "loss": 1.0686, |
| "step": 2530 |
| }, |
| { |
| "epoch": 1.1111354189455263, |
| "grad_norm": 1.4541386365890503, |
| "learning_rate": 2.1713420693518155e-05, |
| "loss": 1.0723, |
| "step": 2540 |
| }, |
| { |
| "epoch": 1.1155108291402318, |
| "grad_norm": 1.5103000402450562, |
| "learning_rate": 2.153783195842472e-05, |
| "loss": 1.0749, |
| "step": 2550 |
| }, |
| { |
| "epoch": 1.1198862393349376, |
| "grad_norm": 1.3568867444992065, |
| "learning_rate": 2.136241718219677e-05, |
| "loss": 1.0348, |
| "step": 2560 |
| }, |
| { |
| "epoch": 1.1242616495296434, |
| "grad_norm": 1.4986484050750732, |
| "learning_rate": 2.118718517866286e-05, |
| "loss": 1.0317, |
| "step": 2570 |
| }, |
| { |
| "epoch": 1.1286370597243491, |
| "grad_norm": 1.5433015823364258, |
| "learning_rate": 2.101214475246798e-05, |
| "loss": 1.0577, |
| "step": 2580 |
| }, |
| { |
| "epoch": 1.133012469919055, |
| "grad_norm": 1.5786523818969727, |
| "learning_rate": 2.0837304698631195e-05, |
| "loss": 1.0226, |
| "step": 2590 |
| }, |
| { |
| "epoch": 1.1373878801137607, |
| "grad_norm": 1.6050409078598022, |
| "learning_rate": 2.066267380210375e-05, |
| "loss": 1.08, |
| "step": 2600 |
| }, |
| { |
| "epoch": 1.1417632903084665, |
| "grad_norm": 1.551988959312439, |
| "learning_rate": 2.0488260837327626e-05, |
| "loss": 1.0365, |
| "step": 2610 |
| }, |
| { |
| "epoch": 1.1461387005031722, |
| "grad_norm": 1.4551851749420166, |
| "learning_rate": 2.0314074567794704e-05, |
| "loss": 1.0489, |
| "step": 2620 |
| }, |
| { |
| "epoch": 1.150514110697878, |
| "grad_norm": 1.579973816871643, |
| "learning_rate": 2.0140123745606397e-05, |
| "loss": 1.0423, |
| "step": 2630 |
| }, |
| { |
| "epoch": 1.1548895208925836, |
| "grad_norm": 1.5528831481933594, |
| "learning_rate": 1.9966417111033913e-05, |
| "loss": 1.013, |
| "step": 2640 |
| }, |
| { |
| "epoch": 1.1592649310872893, |
| "grad_norm": 1.410082459449768, |
| "learning_rate": 1.979296339207912e-05, |
| "loss": 1.0464, |
| "step": 2650 |
| }, |
| { |
| "epoch": 1.1636403412819951, |
| "grad_norm": 1.5580497980117798, |
| "learning_rate": 1.961977130403595e-05, |
| "loss": 1.0405, |
| "step": 2660 |
| }, |
| { |
| "epoch": 1.168015751476701, |
| "grad_norm": 1.6045440435409546, |
| "learning_rate": 1.9446849549052542e-05, |
| "loss": 1.0094, |
| "step": 2670 |
| }, |
| { |
| "epoch": 1.1723911616714067, |
| "grad_norm": 1.4971415996551514, |
| "learning_rate": 1.9274206815693957e-05, |
| "loss": 1.0706, |
| "step": 2680 |
| }, |
| { |
| "epoch": 1.1767665718661124, |
| "grad_norm": 1.5157324075698853, |
| "learning_rate": 1.9101851778505664e-05, |
| "loss": 1.0354, |
| "step": 2690 |
| }, |
| { |
| "epoch": 1.1811419820608182, |
| "grad_norm": 1.4750152826309204, |
| "learning_rate": 1.8929793097577613e-05, |
| "loss": 1.0663, |
| "step": 2700 |
| }, |
| { |
| "epoch": 1.185517392255524, |
| "grad_norm": 1.6027971506118774, |
| "learning_rate": 1.8758039418109167e-05, |
| "loss": 1.0404, |
| "step": 2710 |
| }, |
| { |
| "epoch": 1.1898928024502298, |
| "grad_norm": 1.4709988832473755, |
| "learning_rate": 1.85865993699747e-05, |
| "loss": 0.996, |
| "step": 2720 |
| }, |
| { |
| "epoch": 1.1942682126449355, |
| "grad_norm": 1.4839427471160889, |
| "learning_rate": 1.8415481567289954e-05, |
| "loss": 1.0368, |
| "step": 2730 |
| }, |
| { |
| "epoch": 1.1986436228396413, |
| "grad_norm": 1.5311963558197021, |
| "learning_rate": 1.8244694607979265e-05, |
| "loss": 1.0569, |
| "step": 2740 |
| }, |
| { |
| "epoch": 1.2030190330343469, |
| "grad_norm": 1.4894174337387085, |
| "learning_rate": 1.8074247073343508e-05, |
| "loss": 1.0441, |
| "step": 2750 |
| }, |
| { |
| "epoch": 1.2073944432290526, |
| "grad_norm": 1.5051078796386719, |
| "learning_rate": 1.7904147527628978e-05, |
| "loss": 1.0631, |
| "step": 2760 |
| }, |
| { |
| "epoch": 1.2117698534237584, |
| "grad_norm": 1.6134063005447388, |
| "learning_rate": 1.7734404517597004e-05, |
| "loss": 1.0481, |
| "step": 2770 |
| }, |
| { |
| "epoch": 1.2161452636184642, |
| "grad_norm": 1.4523193836212158, |
| "learning_rate": 1.7565026572094563e-05, |
| "loss": 1.0614, |
| "step": 2780 |
| }, |
| { |
| "epoch": 1.22052067381317, |
| "grad_norm": 1.3861931562423706, |
| "learning_rate": 1.7396022201625755e-05, |
| "loss": 1.062, |
| "step": 2790 |
| }, |
| { |
| "epoch": 1.2248960840078758, |
| "grad_norm": 1.4329195022583008, |
| "learning_rate": 1.7227399897924128e-05, |
| "loss": 1.0014, |
| "step": 2800 |
| }, |
| { |
| "epoch": 1.2292714942025815, |
| "grad_norm": 1.4674146175384521, |
| "learning_rate": 1.7059168133526043e-05, |
| "loss": 1.0915, |
| "step": 2810 |
| }, |
| { |
| "epoch": 1.2336469043972873, |
| "grad_norm": 1.4468815326690674, |
| "learning_rate": 1.6891335361344978e-05, |
| "loss": 1.0788, |
| "step": 2820 |
| }, |
| { |
| "epoch": 1.238022314591993, |
| "grad_norm": 1.3832966089248657, |
| "learning_rate": 1.6723910014246798e-05, |
| "loss": 1.0282, |
| "step": 2830 |
| }, |
| { |
| "epoch": 1.2423977247866986, |
| "grad_norm": 1.4221124649047852, |
| "learning_rate": 1.6556900504625998e-05, |
| "loss": 1.0688, |
| "step": 2840 |
| }, |
| { |
| "epoch": 1.2467731349814044, |
| "grad_norm": 1.5301196575164795, |
| "learning_rate": 1.6390315223983066e-05, |
| "loss": 1.0247, |
| "step": 2850 |
| }, |
| { |
| "epoch": 1.2511485451761102, |
| "grad_norm": 1.434435248374939, |
| "learning_rate": 1.6224162542502857e-05, |
| "loss": 1.0071, |
| "step": 2860 |
| }, |
| { |
| "epoch": 1.255523955370816, |
| "grad_norm": 1.5031973123550415, |
| "learning_rate": 1.6058450808633958e-05, |
| "loss": 1.0898, |
| "step": 2870 |
| }, |
| { |
| "epoch": 1.2598993655655217, |
| "grad_norm": 1.5583535432815552, |
| "learning_rate": 1.589318834866928e-05, |
| "loss": 1.0726, |
| "step": 2880 |
| }, |
| { |
| "epoch": 1.2642747757602275, |
| "grad_norm": 1.5489450693130493, |
| "learning_rate": 1.5728383466327684e-05, |
| "loss": 1.0498, |
| "step": 2890 |
| }, |
| { |
| "epoch": 1.2686501859549333, |
| "grad_norm": 1.3748371601104736, |
| "learning_rate": 1.5564044442336755e-05, |
| "loss": 1.0071, |
| "step": 2900 |
| }, |
| { |
| "epoch": 1.273025596149639, |
| "grad_norm": 1.581483006477356, |
| "learning_rate": 1.54001795340167e-05, |
| "loss": 0.9992, |
| "step": 2910 |
| }, |
| { |
| "epoch": 1.2774010063443448, |
| "grad_norm": 1.5674552917480469, |
| "learning_rate": 1.5236796974865481e-05, |
| "loss": 1.0497, |
| "step": 2920 |
| }, |
| { |
| "epoch": 1.2817764165390506, |
| "grad_norm": 1.5625611543655396, |
| "learning_rate": 1.5073904974145144e-05, |
| "loss": 0.997, |
| "step": 2930 |
| }, |
| { |
| "epoch": 1.2861518267337564, |
| "grad_norm": 1.625352382659912, |
| "learning_rate": 1.4911511716469279e-05, |
| "loss": 1.026, |
| "step": 2940 |
| }, |
| { |
| "epoch": 1.2905272369284622, |
| "grad_norm": 1.4870202541351318, |
| "learning_rate": 1.4749625361391815e-05, |
| "loss": 1.0561, |
| "step": 2950 |
| }, |
| { |
| "epoch": 1.2949026471231677, |
| "grad_norm": 1.551835060119629, |
| "learning_rate": 1.4588254042997057e-05, |
| "loss": 1.0484, |
| "step": 2960 |
| }, |
| { |
| "epoch": 1.2992780573178735, |
| "grad_norm": 1.4249804019927979, |
| "learning_rate": 1.4427405869490923e-05, |
| "loss": 1.068, |
| "step": 2970 |
| }, |
| { |
| "epoch": 1.3036534675125793, |
| "grad_norm": 1.5101299285888672, |
| "learning_rate": 1.4267088922793615e-05, |
| "loss": 1.0761, |
| "step": 2980 |
| }, |
| { |
| "epoch": 1.308028877707285, |
| "grad_norm": 1.5725096464157104, |
| "learning_rate": 1.4107311258133485e-05, |
| "loss": 1.0459, |
| "step": 2990 |
| }, |
| { |
| "epoch": 1.3124042879019908, |
| "grad_norm": 1.4517625570297241, |
| "learning_rate": 1.3948080903642307e-05, |
| "loss": 1.0429, |
| "step": 3000 |
| }, |
| { |
| "epoch": 1.3167796980966966, |
| "grad_norm": 1.750517725944519, |
| "learning_rate": 1.3789405859951893e-05, |
| "loss": 1.0646, |
| "step": 3010 |
| }, |
| { |
| "epoch": 1.3211551082914024, |
| "grad_norm": 1.3727548122406006, |
| "learning_rate": 1.3631294099792136e-05, |
| "loss": 1.0217, |
| "step": 3020 |
| }, |
| { |
| "epoch": 1.3255305184861081, |
| "grad_norm": 1.5657787322998047, |
| "learning_rate": 1.3473753567590372e-05, |
| "loss": 1.0441, |
| "step": 3030 |
| }, |
| { |
| "epoch": 1.3299059286808137, |
| "grad_norm": 1.4173895120620728, |
| "learning_rate": 1.3316792179072202e-05, |
| "loss": 1.0571, |
| "step": 3040 |
| }, |
| { |
| "epoch": 1.3342813388755195, |
| "grad_norm": 1.4876666069030762, |
| "learning_rate": 1.3160417820863807e-05, |
| "loss": 1.1207, |
| "step": 3050 |
| }, |
| { |
| "epoch": 1.3386567490702252, |
| "grad_norm": 1.635214924812317, |
| "learning_rate": 1.3004638350095627e-05, |
| "loss": 1.0501, |
| "step": 3060 |
| }, |
| { |
| "epoch": 1.343032159264931, |
| "grad_norm": 1.4365153312683105, |
| "learning_rate": 1.2849461594007622e-05, |
| "loss": 1.0522, |
| "step": 3070 |
| }, |
| { |
| "epoch": 1.3474075694596368, |
| "grad_norm": 1.570642352104187, |
| "learning_rate": 1.2694895349555938e-05, |
| "loss": 1.1029, |
| "step": 3080 |
| }, |
| { |
| "epoch": 1.3517829796543426, |
| "grad_norm": 1.5404038429260254, |
| "learning_rate": 1.2540947383021196e-05, |
| "loss": 1.0097, |
| "step": 3090 |
| }, |
| { |
| "epoch": 1.3561583898490484, |
| "grad_norm": 1.4297966957092285, |
| "learning_rate": 1.2387625429618235e-05, |
| "loss": 1.0823, |
| "step": 3100 |
| }, |
| { |
| "epoch": 1.3605338000437541, |
| "grad_norm": 1.5374993085861206, |
| "learning_rate": 1.2234937193107449e-05, |
| "loss": 1.0511, |
| "step": 3110 |
| }, |
| { |
| "epoch": 1.36490921023846, |
| "grad_norm": 1.418187141418457, |
| "learning_rate": 1.208289034540774e-05, |
| "loss": 1.0097, |
| "step": 3120 |
| }, |
| { |
| "epoch": 1.3692846204331657, |
| "grad_norm": 1.599236249923706, |
| "learning_rate": 1.1931492526210988e-05, |
| "loss": 1.0431, |
| "step": 3130 |
| }, |
| { |
| "epoch": 1.3736600306278715, |
| "grad_norm": 1.4665392637252808, |
| "learning_rate": 1.1780751342598254e-05, |
| "loss": 1.0091, |
| "step": 3140 |
| }, |
| { |
| "epoch": 1.3780354408225772, |
| "grad_norm": 1.475703239440918, |
| "learning_rate": 1.1630674368657476e-05, |
| "loss": 1.0821, |
| "step": 3150 |
| }, |
| { |
| "epoch": 1.382410851017283, |
| "grad_norm": 1.515609622001648, |
| "learning_rate": 1.1481269145102985e-05, |
| "loss": 1.0217, |
| "step": 3160 |
| }, |
| { |
| "epoch": 1.3867862612119886, |
| "grad_norm": 1.577500820159912, |
| "learning_rate": 1.1332543178896577e-05, |
| "loss": 1.0153, |
| "step": 3170 |
| }, |
| { |
| "epoch": 1.3911616714066943, |
| "grad_norm": 1.5948362350463867, |
| "learning_rate": 1.1184503942870295e-05, |
| "loss": 1.0654, |
| "step": 3180 |
| }, |
| { |
| "epoch": 1.3955370816014, |
| "grad_norm": 1.543228268623352, |
| "learning_rate": 1.1037158875351026e-05, |
| "loss": 1.0484, |
| "step": 3190 |
| }, |
| { |
| "epoch": 1.3999124917961059, |
| "grad_norm": 1.5313066244125366, |
| "learning_rate": 1.0890515379786664e-05, |
| "loss": 1.0771, |
| "step": 3200 |
| }, |
| { |
| "epoch": 1.4042879019908117, |
| "grad_norm": 1.5253814458847046, |
| "learning_rate": 1.0744580824374217e-05, |
| "loss": 1.0013, |
| "step": 3210 |
| }, |
| { |
| "epoch": 1.4086633121855174, |
| "grad_norm": 1.5002059936523438, |
| "learning_rate": 1.0599362541689493e-05, |
| "loss": 1.0223, |
| "step": 3220 |
| }, |
| { |
| "epoch": 1.4130387223802232, |
| "grad_norm": 1.4917947053909302, |
| "learning_rate": 1.0454867828318742e-05, |
| "loss": 1.0595, |
| "step": 3230 |
| }, |
| { |
| "epoch": 1.4174141325749288, |
| "grad_norm": 1.52601158618927, |
| "learning_rate": 1.0311103944492015e-05, |
| "loss": 1.0335, |
| "step": 3240 |
| }, |
| { |
| "epoch": 1.4217895427696345, |
| "grad_norm": 1.5720328092575073, |
| "learning_rate": 1.0168078113718327e-05, |
| "loss": 1.0451, |
| "step": 3250 |
| }, |
| { |
| "epoch": 1.4261649529643403, |
| "grad_norm": 1.5480356216430664, |
| "learning_rate": 1.0025797522422778e-05, |
| "loss": 1.021, |
| "step": 3260 |
| }, |
| { |
| "epoch": 1.430540363159046, |
| "grad_norm": 1.5697144269943237, |
| "learning_rate": 9.884269319585402e-06, |
| "loss": 1.0263, |
| "step": 3270 |
| }, |
| { |
| "epoch": 1.4349157733537519, |
| "grad_norm": 1.5155223608016968, |
| "learning_rate": 9.743500616382012e-06, |
| "loss": 1.0562, |
| "step": 3280 |
| }, |
| { |
| "epoch": 1.4392911835484576, |
| "grad_norm": 1.5573937892913818, |
| "learning_rate": 9.603498485826848e-06, |
| "loss": 1.0509, |
| "step": 3290 |
| }, |
| { |
| "epoch": 1.4436665937431634, |
| "grad_norm": 1.3795148134231567, |
| "learning_rate": 9.464269962417233e-06, |
| "loss": 1.0498, |
| "step": 3300 |
| }, |
| { |
| "epoch": 1.4480420039378692, |
| "grad_norm": 1.496437430381775, |
| "learning_rate": 9.325822041780105e-06, |
| "loss": 1.0061, |
| "step": 3310 |
| }, |
| { |
| "epoch": 1.452417414132575, |
| "grad_norm": 1.526340126991272, |
| "learning_rate": 9.188161680320486e-06, |
| "loss": 1.0291, |
| "step": 3320 |
| }, |
| { |
| "epoch": 1.4567928243272807, |
| "grad_norm": 1.4637991189956665, |
| "learning_rate": 9.051295794872008e-06, |
| "loss": 1.0587, |
| "step": 3330 |
| }, |
| { |
| "epoch": 1.4611682345219865, |
| "grad_norm": 1.5240123271942139, |
| "learning_rate": 8.915231262349316e-06, |
| "loss": 1.0013, |
| "step": 3340 |
| }, |
| { |
| "epoch": 1.4655436447166923, |
| "grad_norm": 1.3594683408737183, |
| "learning_rate": 8.77997491940259e-06, |
| "loss": 0.9967, |
| "step": 3350 |
| }, |
| { |
| "epoch": 1.469919054911398, |
| "grad_norm": 1.6217098236083984, |
| "learning_rate": 8.645533562073971e-06, |
| "loss": 1.0683, |
| "step": 3360 |
| }, |
| { |
| "epoch": 1.4742944651061036, |
| "grad_norm": 1.5475493669509888, |
| "learning_rate": 8.51191394545615e-06, |
| "loss": 1.0166, |
| "step": 3370 |
| }, |
| { |
| "epoch": 1.4786698753008094, |
| "grad_norm": 1.4708356857299805, |
| "learning_rate": 8.379122783352927e-06, |
| "loss": 1.0105, |
| "step": 3380 |
| }, |
| { |
| "epoch": 1.4830452854955152, |
| "grad_norm": 1.5092549324035645, |
| "learning_rate": 8.247166747941846e-06, |
| "loss": 1.0416, |
| "step": 3390 |
| }, |
| { |
| "epoch": 1.487420695690221, |
| "grad_norm": 1.5428940057754517, |
| "learning_rate": 8.116052469438979e-06, |
| "loss": 1.0268, |
| "step": 3400 |
| }, |
| { |
| "epoch": 1.4917961058849267, |
| "grad_norm": 1.4949884414672852, |
| "learning_rate": 7.985786535765794e-06, |
| "loss": 1.037, |
| "step": 3410 |
| }, |
| { |
| "epoch": 1.4961715160796325, |
| "grad_norm": 1.4505797624588013, |
| "learning_rate": 7.856375492218127e-06, |
| "loss": 1.0374, |
| "step": 3420 |
| }, |
| { |
| "epoch": 1.500546926274338, |
| "grad_norm": 1.6311450004577637, |
| "learning_rate": 7.727825841137282e-06, |
| "loss": 1.0337, |
| "step": 3430 |
| }, |
| { |
| "epoch": 1.5049223364690438, |
| "grad_norm": 1.4141907691955566, |
| "learning_rate": 7.6001440415833705e-06, |
| "loss": 0.9676, |
| "step": 3440 |
| }, |
| { |
| "epoch": 1.5092977466637496, |
| "grad_norm": 1.499178171157837, |
| "learning_rate": 7.473336509010742e-06, |
| "loss": 1.0763, |
| "step": 3450 |
| }, |
| { |
| "epoch": 1.5136731568584554, |
| "grad_norm": 1.3310422897338867, |
| "learning_rate": 7.347409614945621e-06, |
| "loss": 0.9934, |
| "step": 3460 |
| }, |
| { |
| "epoch": 1.5180485670531612, |
| "grad_norm": 1.5936180353164673, |
| "learning_rate": 7.2223696866659865e-06, |
| "loss": 1.0169, |
| "step": 3470 |
| }, |
| { |
| "epoch": 1.522423977247867, |
| "grad_norm": 1.4071223735809326, |
| "learning_rate": 7.098223006883667e-06, |
| "loss": 1.0532, |
| "step": 3480 |
| }, |
| { |
| "epoch": 1.5267993874425727, |
| "grad_norm": 1.381949543952942, |
| "learning_rate": 6.974975813428622e-06, |
| "loss": 0.949, |
| "step": 3490 |
| }, |
| { |
| "epoch": 1.5311747976372785, |
| "grad_norm": 1.548047661781311, |
| "learning_rate": 6.852634298935554e-06, |
| "loss": 1.0571, |
| "step": 3500 |
| }, |
| { |
| "epoch": 1.5355502078319843, |
| "grad_norm": 1.6835612058639526, |
| "learning_rate": 6.73120461053274e-06, |
| "loss": 1.0665, |
| "step": 3510 |
| }, |
| { |
| "epoch": 1.53992561802669, |
| "grad_norm": 1.5767420530319214, |
| "learning_rate": 6.610692849533176e-06, |
| "loss": 1.0711, |
| "step": 3520 |
| }, |
| { |
| "epoch": 1.5443010282213958, |
| "grad_norm": 1.6286526918411255, |
| "learning_rate": 6.491105071127984e-06, |
| "loss": 1.0837, |
| "step": 3530 |
| }, |
| { |
| "epoch": 1.5486764384161016, |
| "grad_norm": 1.5266960859298706, |
| "learning_rate": 6.372447284082186e-06, |
| "loss": 1.0329, |
| "step": 3540 |
| }, |
| { |
| "epoch": 1.5530518486108074, |
| "grad_norm": 1.5030903816223145, |
| "learning_rate": 6.254725450432819e-06, |
| "loss": 1.0385, |
| "step": 3550 |
| }, |
| { |
| "epoch": 1.5574272588055131, |
| "grad_norm": 1.542777419090271, |
| "learning_rate": 6.137945485189292e-06, |
| "loss": 1.0784, |
| "step": 3560 |
| }, |
| { |
| "epoch": 1.561802669000219, |
| "grad_norm": 1.48823082447052, |
| "learning_rate": 6.022113256036268e-06, |
| "loss": 1.0194, |
| "step": 3570 |
| }, |
| { |
| "epoch": 1.5661780791949247, |
| "grad_norm": 1.651016116142273, |
| "learning_rate": 5.907234583038781e-06, |
| "loss": 1.0996, |
| "step": 3580 |
| }, |
| { |
| "epoch": 1.5705534893896302, |
| "grad_norm": 1.4115656614303589, |
| "learning_rate": 5.793315238349834e-06, |
| "loss": 1.0681, |
| "step": 3590 |
| }, |
| { |
| "epoch": 1.574928899584336, |
| "grad_norm": 1.5791130065917969, |
| "learning_rate": 5.68036094592034e-06, |
| "loss": 1.048, |
| "step": 3600 |
| }, |
| { |
| "epoch": 1.5793043097790418, |
| "grad_norm": 1.6106353998184204, |
| "learning_rate": 5.568377381211548e-06, |
| "loss": 1.0411, |
| "step": 3610 |
| }, |
| { |
| "epoch": 1.5836797199737476, |
| "grad_norm": 1.461083173751831, |
| "learning_rate": 5.457370170909878e-06, |
| "loss": 1.0249, |
| "step": 3620 |
| }, |
| { |
| "epoch": 1.5880551301684533, |
| "grad_norm": 1.4923583269119263, |
| "learning_rate": 5.347344892644171e-06, |
| "loss": 1.0462, |
| "step": 3630 |
| }, |
| { |
| "epoch": 1.592430540363159, |
| "grad_norm": 1.4730093479156494, |
| "learning_rate": 5.238307074705481e-06, |
| "loss": 0.9946, |
| "step": 3640 |
| }, |
| { |
| "epoch": 1.5968059505578647, |
| "grad_norm": 1.626094937324524, |
| "learning_rate": 5.130262195769273e-06, |
| "loss": 1.0611, |
| "step": 3650 |
| }, |
| { |
| "epoch": 1.6011813607525704, |
| "grad_norm": 1.3962546586990356, |
| "learning_rate": 5.023215684620159e-06, |
| "loss": 1.0505, |
| "step": 3660 |
| }, |
| { |
| "epoch": 1.6055567709472762, |
| "grad_norm": 1.587723970413208, |
| "learning_rate": 4.917172919879098e-06, |
| "loss": 0.9735, |
| "step": 3670 |
| }, |
| { |
| "epoch": 1.609932181141982, |
| "grad_norm": 1.5360171794891357, |
| "learning_rate": 4.812139229733179e-06, |
| "loss": 1.0593, |
| "step": 3680 |
| }, |
| { |
| "epoch": 1.6143075913366878, |
| "grad_norm": 1.462829351425171, |
| "learning_rate": 4.708119891667892e-06, |
| "loss": 1.0435, |
| "step": 3690 |
| }, |
| { |
| "epoch": 1.6186830015313936, |
| "grad_norm": 1.5362554788589478, |
| "learning_rate": 4.605120132201932e-06, |
| "loss": 1.0653, |
| "step": 3700 |
| }, |
| { |
| "epoch": 1.6230584117260993, |
| "grad_norm": 1.4747408628463745, |
| "learning_rate": 4.503145126624631e-06, |
| "loss": 1.0539, |
| "step": 3710 |
| }, |
| { |
| "epoch": 1.627433821920805, |
| "grad_norm": 1.511655330657959, |
| "learning_rate": 4.402199998735896e-06, |
| "loss": 1.0921, |
| "step": 3720 |
| }, |
| { |
| "epoch": 1.6318092321155109, |
| "grad_norm": 1.5355191230773926, |
| "learning_rate": 4.302289820588762e-06, |
| "loss": 1.0182, |
| "step": 3730 |
| }, |
| { |
| "epoch": 1.6361846423102167, |
| "grad_norm": 1.3255157470703125, |
| "learning_rate": 4.203419612234536e-06, |
| "loss": 1.0586, |
| "step": 3740 |
| }, |
| { |
| "epoch": 1.6405600525049224, |
| "grad_norm": 1.4897997379302979, |
| "learning_rate": 4.105594341470592e-06, |
| "loss": 1.0022, |
| "step": 3750 |
| }, |
| { |
| "epoch": 1.6449354626996282, |
| "grad_norm": 1.5675077438354492, |
| "learning_rate": 4.00881892359074e-06, |
| "loss": 1.0394, |
| "step": 3760 |
| }, |
| { |
| "epoch": 1.649310872894334, |
| "grad_norm": 1.46114182472229, |
| "learning_rate": 3.913098221138237e-06, |
| "loss": 1.0464, |
| "step": 3770 |
| }, |
| { |
| "epoch": 1.6536862830890398, |
| "grad_norm": 1.4704811573028564, |
| "learning_rate": 3.818437043661497e-06, |
| "loss": 1.0428, |
| "step": 3780 |
| }, |
| { |
| "epoch": 1.6580616932837453, |
| "grad_norm": 1.493815302848816, |
| "learning_rate": 3.724840147472422e-06, |
| "loss": 1.0717, |
| "step": 3790 |
| }, |
| { |
| "epoch": 1.662437103478451, |
| "grad_norm": 1.392871618270874, |
| "learning_rate": 3.6323122354074033e-06, |
| "loss": 0.9971, |
| "step": 3800 |
| }, |
| { |
| "epoch": 1.6668125136731569, |
| "grad_norm": 1.5616687536239624, |
| "learning_rate": 3.5408579565910344e-06, |
| "loss": 1.0498, |
| "step": 3810 |
| }, |
| { |
| "epoch": 1.6711879238678626, |
| "grad_norm": 1.5294009447097778, |
| "learning_rate": 3.450481906202524e-06, |
| "loss": 1.0721, |
| "step": 3820 |
| }, |
| { |
| "epoch": 1.6755633340625684, |
| "grad_norm": 1.541881799697876, |
| "learning_rate": 3.3611886252448017e-06, |
| "loss": 1.0331, |
| "step": 3830 |
| }, |
| { |
| "epoch": 1.679938744257274, |
| "grad_norm": 1.4998458623886108, |
| "learning_rate": 3.2729826003163323e-06, |
| "loss": 0.9996, |
| "step": 3840 |
| }, |
| { |
| "epoch": 1.6843141544519797, |
| "grad_norm": 1.4179540872573853, |
| "learning_rate": 3.1858682633857105e-06, |
| "loss": 1.0467, |
| "step": 3850 |
| }, |
| { |
| "epoch": 1.6886895646466855, |
| "grad_norm": 1.5790250301361084, |
| "learning_rate": 3.0998499915689733e-06, |
| "loss": 1.0415, |
| "step": 3860 |
| }, |
| { |
| "epoch": 1.6930649748413913, |
| "grad_norm": 1.5594815015792847, |
| "learning_rate": 3.0149321069096375e-06, |
| "loss": 1.0491, |
| "step": 3870 |
| }, |
| { |
| "epoch": 1.697440385036097, |
| "grad_norm": 1.5254184007644653, |
| "learning_rate": 2.9311188761615703e-06, |
| "loss": 1.0132, |
| "step": 3880 |
| }, |
| { |
| "epoch": 1.7018157952308028, |
| "grad_norm": 1.5314863920211792, |
| "learning_rate": 2.8484145105745903e-06, |
| "loss": 1.0049, |
| "step": 3890 |
| }, |
| { |
| "epoch": 1.7061912054255086, |
| "grad_norm": 1.608102798461914, |
| "learning_rate": 2.7668231656828775e-06, |
| "loss": 1.0627, |
| "step": 3900 |
| }, |
| { |
| "epoch": 1.7105666156202144, |
| "grad_norm": 1.4776244163513184, |
| "learning_rate": 2.6863489410961505e-06, |
| "loss": 1.0374, |
| "step": 3910 |
| }, |
| { |
| "epoch": 1.7149420258149202, |
| "grad_norm": 1.6031432151794434, |
| "learning_rate": 2.606995880293717e-06, |
| "loss": 1.1048, |
| "step": 3920 |
| }, |
| { |
| "epoch": 1.719317436009626, |
| "grad_norm": 1.5214728116989136, |
| "learning_rate": 2.5287679704212836e-06, |
| "loss": 1.0272, |
| "step": 3930 |
| }, |
| { |
| "epoch": 1.7236928462043317, |
| "grad_norm": 1.4273405075073242, |
| "learning_rate": 2.451669142090615e-06, |
| "loss": 1.0509, |
| "step": 3940 |
| }, |
| { |
| "epoch": 1.7280682563990375, |
| "grad_norm": 1.5345888137817383, |
| "learning_rate": 2.3757032691820476e-06, |
| "loss": 1.0378, |
| "step": 3950 |
| }, |
| { |
| "epoch": 1.7324436665937433, |
| "grad_norm": 1.635636329650879, |
| "learning_rate": 2.3008741686498498e-06, |
| "loss": 1.0308, |
| "step": 3960 |
| }, |
| { |
| "epoch": 1.736819076788449, |
| "grad_norm": 1.5404716730117798, |
| "learning_rate": 2.2271856003304253e-06, |
| "loss": 1.054, |
| "step": 3970 |
| }, |
| { |
| "epoch": 1.7411944869831548, |
| "grad_norm": 1.5047839879989624, |
| "learning_rate": 2.154641266753393e-06, |
| "loss": 1.0234, |
| "step": 3980 |
| }, |
| { |
| "epoch": 1.7455698971778604, |
| "grad_norm": 1.4826009273529053, |
| "learning_rate": 2.083244812955573e-06, |
| "loss": 1.064, |
| "step": 3990 |
| }, |
| { |
| "epoch": 1.7499453073725662, |
| "grad_norm": 1.5155521631240845, |
| "learning_rate": 2.012999826297823e-06, |
| "loss": 1.0188, |
| "step": 4000 |
| }, |
| { |
| "epoch": 1.754320717567272, |
| "grad_norm": 1.5202479362487793, |
| "learning_rate": 1.9439098362847825e-06, |
| "loss": 1.0179, |
| "step": 4010 |
| }, |
| { |
| "epoch": 1.7586961277619777, |
| "grad_norm": 1.3785141706466675, |
| "learning_rate": 1.8759783143875443e-06, |
| "loss": 1.0683, |
| "step": 4020 |
| }, |
| { |
| "epoch": 1.7630715379566835, |
| "grad_norm": 1.4348698854446411, |
| "learning_rate": 1.8092086738692282e-06, |
| "loss": 1.0171, |
| "step": 4030 |
| }, |
| { |
| "epoch": 1.767446948151389, |
| "grad_norm": 1.4248477220535278, |
| "learning_rate": 1.7436042696134747e-06, |
| "loss": 1.0062, |
| "step": 4040 |
| }, |
| { |
| "epoch": 1.7718223583460948, |
| "grad_norm": 1.50611412525177, |
| "learning_rate": 1.6791683979558686e-06, |
| "loss": 1.0302, |
| "step": 4050 |
| }, |
| { |
| "epoch": 1.7761977685408006, |
| "grad_norm": 1.5289087295532227, |
| "learning_rate": 1.6159042965183335e-06, |
| "loss": 1.006, |
| "step": 4060 |
| }, |
| { |
| "epoch": 1.7805731787355064, |
| "grad_norm": 1.319069266319275, |
| "learning_rate": 1.5538151440464332e-06, |
| "loss": 0.9973, |
| "step": 4070 |
| }, |
| { |
| "epoch": 1.7849485889302121, |
| "grad_norm": 1.4898905754089355, |
| "learning_rate": 1.4929040602496586e-06, |
| "loss": 1.047, |
| "step": 4080 |
| }, |
| { |
| "epoch": 1.789323999124918, |
| "grad_norm": 1.5255054235458374, |
| "learning_rate": 1.4331741056446968e-06, |
| "loss": 1.0525, |
| "step": 4090 |
| }, |
| { |
| "epoch": 1.7936994093196237, |
| "grad_norm": 1.536674976348877, |
| "learning_rate": 1.3746282814016242e-06, |
| "loss": 1.0171, |
| "step": 4100 |
| }, |
| { |
| "epoch": 1.7980748195143295, |
| "grad_norm": 1.4357112646102905, |
| "learning_rate": 1.3172695291931353e-06, |
| "loss": 1.0532, |
| "step": 4110 |
| }, |
| { |
| "epoch": 1.8024502297090352, |
| "grad_norm": 1.4620466232299805, |
| "learning_rate": 1.2611007310467049e-06, |
| "loss": 1.0177, |
| "step": 4120 |
| }, |
| { |
| "epoch": 1.806825639903741, |
| "grad_norm": 1.5015560388565063, |
| "learning_rate": 1.2061247091998211e-06, |
| "loss": 1.0575, |
| "step": 4130 |
| }, |
| { |
| "epoch": 1.8112010500984468, |
| "grad_norm": 1.4859633445739746, |
| "learning_rate": 1.1523442259581435e-06, |
| "loss": 1.0373, |
| "step": 4140 |
| }, |
| { |
| "epoch": 1.8155764602931526, |
| "grad_norm": 1.6235337257385254, |
| "learning_rate": 1.0997619835567241e-06, |
| "loss": 1.0727, |
| "step": 4150 |
| }, |
| { |
| "epoch": 1.8199518704878583, |
| "grad_norm": 1.6077338457107544, |
| "learning_rate": 1.0483806240242366e-06, |
| "loss": 1.0183, |
| "step": 4160 |
| }, |
| { |
| "epoch": 1.8243272806825641, |
| "grad_norm": 1.5733646154403687, |
| "learning_rate": 9.982027290502238e-07, |
| "loss": 1.0867, |
| "step": 4170 |
| }, |
| { |
| "epoch": 1.8287026908772699, |
| "grad_norm": 1.4715242385864258, |
| "learning_rate": 9.492308198553707e-07, |
| "loss": 1.0763, |
| "step": 4180 |
| }, |
| { |
| "epoch": 1.8330781010719757, |
| "grad_norm": 1.5703119039535522, |
| "learning_rate": 9.014673570648253e-07, |
| "loss": 1.0661, |
| "step": 4190 |
| }, |
| { |
| "epoch": 1.8374535112666812, |
| "grad_norm": 1.4015430212020874, |
| "learning_rate": 8.549147405845781e-07, |
| "loss": 1.0542, |
| "step": 4200 |
| }, |
| { |
| "epoch": 1.841828921461387, |
| "grad_norm": 1.6461395025253296, |
| "learning_rate": 8.095753094808506e-07, |
| "loss": 1.0412, |
| "step": 4210 |
| }, |
| { |
| "epoch": 1.8462043316560928, |
| "grad_norm": 1.4549918174743652, |
| "learning_rate": 7.654513418625941e-07, |
| "loss": 0.9894, |
| "step": 4220 |
| }, |
| { |
| "epoch": 1.8505797418507985, |
| "grad_norm": 1.447482705116272, |
| "learning_rate": 7.225450547670126e-07, |
| "loss": 1.0665, |
| "step": 4230 |
| }, |
| { |
| "epoch": 1.8549551520455043, |
| "grad_norm": 1.479117512702942, |
| "learning_rate": 6.808586040481651e-07, |
| "loss": 1.0133, |
| "step": 4240 |
| }, |
| { |
| "epoch": 1.8593305622402099, |
| "grad_norm": 1.3185451030731201, |
| "learning_rate": 6.403940842686474e-07, |
| "loss": 1.0564, |
| "step": 4250 |
| }, |
| { |
| "epoch": 1.8637059724349156, |
| "grad_norm": 1.5025280714035034, |
| "learning_rate": 6.011535285943392e-07, |
| "loss": 0.9987, |
| "step": 4260 |
| }, |
| { |
| "epoch": 1.8680813826296214, |
| "grad_norm": 1.4831477403640747, |
| "learning_rate": 5.631389086922678e-07, |
| "loss": 1.055, |
| "step": 4270 |
| }, |
| { |
| "epoch": 1.8724567928243272, |
| "grad_norm": 1.4341115951538086, |
| "learning_rate": 5.263521346315192e-07, |
| "loss": 1.0502, |
| "step": 4280 |
| }, |
| { |
| "epoch": 1.876832203019033, |
| "grad_norm": 1.3913581371307373, |
| "learning_rate": 4.907950547872686e-07, |
| "loss": 1.0273, |
| "step": 4290 |
| }, |
| { |
| "epoch": 1.8812076132137387, |
| "grad_norm": 1.5028200149536133, |
| "learning_rate": 4.564694557479238e-07, |
| "loss": 1.0131, |
| "step": 4300 |
| }, |
| { |
| "epoch": 1.8855830234084445, |
| "grad_norm": 1.5376793146133423, |
| "learning_rate": 4.2337706222533856e-07, |
| "loss": 1.0292, |
| "step": 4310 |
| }, |
| { |
| "epoch": 1.8899584336031503, |
| "grad_norm": 1.5637511014938354, |
| "learning_rate": 3.915195369681629e-07, |
| "loss": 1.0262, |
| "step": 4320 |
| }, |
| { |
| "epoch": 1.894333843797856, |
| "grad_norm": 1.5448585748672485, |
| "learning_rate": 3.608984806782928e-07, |
| "loss": 1.0492, |
| "step": 4330 |
| }, |
| { |
| "epoch": 1.8987092539925619, |
| "grad_norm": 1.4419684410095215, |
| "learning_rate": 3.315154319304431e-07, |
| "loss": 1.0724, |
| "step": 4340 |
| }, |
| { |
| "epoch": 1.9030846641872676, |
| "grad_norm": 1.4939939975738525, |
| "learning_rate": 3.033718670948482e-07, |
| "loss": 1.0234, |
| "step": 4350 |
| }, |
| { |
| "epoch": 1.9074600743819734, |
| "grad_norm": 1.4496331214904785, |
| "learning_rate": 2.764692002630631e-07, |
| "loss": 1.0371, |
| "step": 4360 |
| }, |
| { |
| "epoch": 1.9118354845766792, |
| "grad_norm": 1.5224913358688354, |
| "learning_rate": 2.5080878317693125e-07, |
| "loss": 1.067, |
| "step": 4370 |
| }, |
| { |
| "epoch": 1.916210894771385, |
| "grad_norm": 1.3078250885009766, |
| "learning_rate": 2.2639190516065277e-07, |
| "loss": 1.015, |
| "step": 4380 |
| }, |
| { |
| "epoch": 1.9205863049660907, |
| "grad_norm": 1.5351719856262207, |
| "learning_rate": 2.032197930560059e-07, |
| "loss": 1.0435, |
| "step": 4390 |
| }, |
| { |
| "epoch": 1.9249617151607963, |
| "grad_norm": 1.3579761981964111, |
| "learning_rate": 1.812936111607072e-07, |
| "loss": 0.9982, |
| "step": 4400 |
| }, |
| { |
| "epoch": 1.929337125355502, |
| "grad_norm": 1.4030203819274902, |
| "learning_rate": 1.6061446116990008e-07, |
| "loss": 1.0374, |
| "step": 4410 |
| }, |
| { |
| "epoch": 1.9337125355502078, |
| "grad_norm": 1.5246331691741943, |
| "learning_rate": 1.4118338212081028e-07, |
| "loss": 1.0612, |
| "step": 4420 |
| }, |
| { |
| "epoch": 1.9380879457449136, |
| "grad_norm": 1.4720394611358643, |
| "learning_rate": 1.2300135034052916e-07, |
| "loss": 1.084, |
| "step": 4430 |
| }, |
| { |
| "epoch": 1.9424633559396194, |
| "grad_norm": 1.529296875, |
| "learning_rate": 1.0606927939696976e-07, |
| "loss": 1.0421, |
| "step": 4440 |
| }, |
| { |
| "epoch": 1.946838766134325, |
| "grad_norm": 1.4124305248260498, |
| "learning_rate": 9.03880200529561e-08, |
| "loss": 1.0183, |
| "step": 4450 |
| }, |
| { |
| "epoch": 1.9512141763290307, |
| "grad_norm": 1.4209787845611572, |
| "learning_rate": 7.59583602234687e-08, |
| "loss": 1.0359, |
| "step": 4460 |
| }, |
| { |
| "epoch": 1.9555895865237365, |
| "grad_norm": 1.4830424785614014, |
| "learning_rate": 6.27810249360733e-08, |
| "loss": 1.0425, |
| "step": 4470 |
| }, |
| { |
| "epoch": 1.9599649967184423, |
| "grad_norm": 1.6016472578048706, |
| "learning_rate": 5.08566762944751e-08, |
| "loss": 1.0069, |
| "step": 4480 |
| }, |
| { |
| "epoch": 1.964340406913148, |
| "grad_norm": 1.4748444557189941, |
| "learning_rate": 4.018591344526479e-08, |
| "loss": 0.9978, |
| "step": 4490 |
| }, |
| { |
| "epoch": 1.9687158171078538, |
| "grad_norm": 1.462731122970581, |
| "learning_rate": 3.076927254779538e-08, |
| "loss": 1.0539, |
| "step": 4500 |
| }, |
| { |
| "epoch": 1.9730912273025596, |
| "grad_norm": 1.4741415977478027, |
| "learning_rate": 2.2607226747262122e-08, |
| "loss": 1.0609, |
| "step": 4510 |
| }, |
| { |
| "epoch": 1.9774666374972654, |
| "grad_norm": 1.4613292217254639, |
| "learning_rate": 1.5700186150921503e-08, |
| "loss": 1.0224, |
| "step": 4520 |
| }, |
| { |
| "epoch": 1.9818420476919711, |
| "grad_norm": 1.4673250913619995, |
| "learning_rate": 1.0048497807479963e-08, |
| "loss": 1.0265, |
| "step": 4530 |
| }, |
| { |
| "epoch": 1.986217457886677, |
| "grad_norm": 1.4706196784973145, |
| "learning_rate": 5.652445689660613e-09, |
| "loss": 1.0217, |
| "step": 4540 |
| }, |
| { |
| "epoch": 1.9905928680813827, |
| "grad_norm": 1.5136979818344116, |
| "learning_rate": 2.512250679939654e-09, |
| "loss": 1.0589, |
| "step": 4550 |
| }, |
| { |
| "epoch": 1.9949682782760885, |
| "grad_norm": 1.4204763174057007, |
| "learning_rate": 6.280705594385871e-10, |
| "loss": 1.0342, |
| "step": 4560 |
| }, |
| { |
| "epoch": 1.9993436884707942, |
| "grad_norm": 1.3786492347717285, |
| "learning_rate": 0.0, |
| "loss": 1.0299, |
| "step": 4570 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 4570, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 2, |
| "save_steps": 500.0, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.8983281907294208e+18, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|