| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.87012987012987, |
| "eval_steps": 500, |
| "global_step": 6000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.008116883116883116, |
| "grad_norm": 9.98837947845459, |
| "learning_rate": 3e-06, |
| "loss": 0.7822, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.016233766233766232, |
| "grad_norm": 2.002171516418457, |
| "learning_rate": 6.333333333333334e-06, |
| "loss": 0.4512, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.024350649350649352, |
| "grad_norm": 2.014068365097046, |
| "learning_rate": 9.666666666666667e-06, |
| "loss": 0.2768, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.032467532467532464, |
| "grad_norm": 2.2264082431793213, |
| "learning_rate": 1.3000000000000001e-05, |
| "loss": 0.2343, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.040584415584415584, |
| "grad_norm": 1.8026350736618042, |
| "learning_rate": 1.6333333333333335e-05, |
| "loss": 0.2159, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.048701298701298704, |
| "grad_norm": 0.9995092749595642, |
| "learning_rate": 1.9666666666666666e-05, |
| "loss": 0.1864, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.056818181818181816, |
| "grad_norm": 1.3731173276901245, |
| "learning_rate": 2.3000000000000003e-05, |
| "loss": 0.1687, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.06493506493506493, |
| "grad_norm": 1.4359029531478882, |
| "learning_rate": 2.633333333333333e-05, |
| "loss": 0.1764, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.07305194805194805, |
| "grad_norm": 0.9684350490570068, |
| "learning_rate": 2.9666666666666672e-05, |
| "loss": 0.1504, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.08116883116883117, |
| "grad_norm": 1.092146873474121, |
| "learning_rate": 3.3e-05, |
| "loss": 0.1547, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.08928571428571429, |
| "grad_norm": 1.1403262615203857, |
| "learning_rate": 3.633333333333333e-05, |
| "loss": 0.145, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.09740259740259741, |
| "grad_norm": 1.1365031003952026, |
| "learning_rate": 3.966666666666667e-05, |
| "loss": 0.1327, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.10551948051948051, |
| "grad_norm": 0.8338238596916199, |
| "learning_rate": 4.3e-05, |
| "loss": 0.1262, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.11363636363636363, |
| "grad_norm": 1.0691053867340088, |
| "learning_rate": 4.633333333333333e-05, |
| "loss": 0.1221, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.12175324675324675, |
| "grad_norm": 0.7309530377388, |
| "learning_rate": 4.966666666666667e-05, |
| "loss": 0.1124, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.12987012987012986, |
| "grad_norm": 0.7510144114494324, |
| "learning_rate": 5.300000000000001e-05, |
| "loss": 0.12, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.137987012987013, |
| "grad_norm": 1.4305161237716675, |
| "learning_rate": 5.633333333333334e-05, |
| "loss": 0.1185, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.1461038961038961, |
| "grad_norm": 1.7564723491668701, |
| "learning_rate": 5.966666666666667e-05, |
| "loss": 0.1292, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.15422077922077923, |
| "grad_norm": 0.8886933922767639, |
| "learning_rate": 6.3e-05, |
| "loss": 0.115, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.16233766233766234, |
| "grad_norm": 1.173780083656311, |
| "learning_rate": 6.633333333333334e-05, |
| "loss": 0.1137, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.17045454545454544, |
| "grad_norm": 1.3038924932479858, |
| "learning_rate": 6.966666666666668e-05, |
| "loss": 0.1245, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.17857142857142858, |
| "grad_norm": 0.6309260129928589, |
| "learning_rate": 7.3e-05, |
| "loss": 0.1013, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.18668831168831168, |
| "grad_norm": 1.058510184288025, |
| "learning_rate": 7.633333333333334e-05, |
| "loss": 0.0946, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.19480519480519481, |
| "grad_norm": 0.8704164624214172, |
| "learning_rate": 7.966666666666666e-05, |
| "loss": 0.1005, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.20292207792207792, |
| "grad_norm": 1.6588118076324463, |
| "learning_rate": 8.3e-05, |
| "loss": 0.0949, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.21103896103896103, |
| "grad_norm": 0.46824008226394653, |
| "learning_rate": 8.633333333333334e-05, |
| "loss": 0.0855, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.21915584415584416, |
| "grad_norm": 0.972342312335968, |
| "learning_rate": 8.966666666666666e-05, |
| "loss": 0.0972, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.22727272727272727, |
| "grad_norm": 0.5761755704879761, |
| "learning_rate": 9.300000000000001e-05, |
| "loss": 0.0875, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.2353896103896104, |
| "grad_norm": 1.3555821180343628, |
| "learning_rate": 9.633333333333335e-05, |
| "loss": 0.0966, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.2435064935064935, |
| "grad_norm": 0.8987582325935364, |
| "learning_rate": 9.966666666666667e-05, |
| "loss": 0.083, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.25162337662337664, |
| "grad_norm": 0.7900184988975525, |
| "learning_rate": 9.999938485971279e-05, |
| "loss": 0.0891, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.2597402597402597, |
| "grad_norm": 0.7040216326713562, |
| "learning_rate": 9.999725846827562e-05, |
| "loss": 0.078, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.26785714285714285, |
| "grad_norm": 0.6510125994682312, |
| "learning_rate": 9.999361329594254e-05, |
| "loss": 0.0782, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.275974025974026, |
| "grad_norm": 1.0718351602554321, |
| "learning_rate": 9.998844945344405e-05, |
| "loss": 0.075, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.2840909090909091, |
| "grad_norm": 0.9426969289779663, |
| "learning_rate": 9.99817670976436e-05, |
| "loss": 0.0811, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.2922077922077922, |
| "grad_norm": 0.6431786417961121, |
| "learning_rate": 9.997356643153303e-05, |
| "loss": 0.0749, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.3003246753246753, |
| "grad_norm": 0.6744059324264526, |
| "learning_rate": 9.996384770422629e-05, |
| "loss": 0.0766, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.30844155844155846, |
| "grad_norm": 0.7747765183448792, |
| "learning_rate": 9.995261121095194e-05, |
| "loss": 0.0729, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.31655844155844154, |
| "grad_norm": 0.6413760185241699, |
| "learning_rate": 9.993985729304408e-05, |
| "loss": 0.0722, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.3246753246753247, |
| "grad_norm": 0.8439412713050842, |
| "learning_rate": 9.992558633793212e-05, |
| "loss": 0.0727, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.3327922077922078, |
| "grad_norm": 0.6304122805595398, |
| "learning_rate": 9.990979877912891e-05, |
| "loss": 0.0745, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.3409090909090909, |
| "grad_norm": 0.47354942560195923, |
| "learning_rate": 9.989249509621759e-05, |
| "loss": 0.0637, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.349025974025974, |
| "grad_norm": 0.4226538836956024, |
| "learning_rate": 9.987367581483705e-05, |
| "loss": 0.0649, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.35714285714285715, |
| "grad_norm": 0.49945083260536194, |
| "learning_rate": 9.985334150666592e-05, |
| "loss": 0.0593, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.3652597402597403, |
| "grad_norm": 0.6287726163864136, |
| "learning_rate": 9.983149278940526e-05, |
| "loss": 0.0606, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.37337662337662336, |
| "grad_norm": 0.6153169274330139, |
| "learning_rate": 9.980813032675974e-05, |
| "loss": 0.0651, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.3814935064935065, |
| "grad_norm": 0.5000476241111755, |
| "learning_rate": 9.978325482841753e-05, |
| "loss": 0.0653, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.38961038961038963, |
| "grad_norm": 0.5937640070915222, |
| "learning_rate": 9.975686705002867e-05, |
| "loss": 0.0647, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.3977272727272727, |
| "grad_norm": 0.6286649703979492, |
| "learning_rate": 9.972896779318219e-05, |
| "loss": 0.0634, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.40584415584415584, |
| "grad_norm": 0.6515055298805237, |
| "learning_rate": 9.969955790538175e-05, |
| "loss": 0.0616, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.413961038961039, |
| "grad_norm": 0.5278387069702148, |
| "learning_rate": 9.966863828001982e-05, |
| "loss": 0.0619, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.42207792207792205, |
| "grad_norm": 0.6017199754714966, |
| "learning_rate": 9.963620985635065e-05, |
| "loss": 0.0609, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.4301948051948052, |
| "grad_norm": 0.5290632843971252, |
| "learning_rate": 9.960227361946164e-05, |
| "loss": 0.0622, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.4383116883116883, |
| "grad_norm": 0.9601761102676392, |
| "learning_rate": 9.95668306002435e-05, |
| "loss": 0.0607, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.44642857142857145, |
| "grad_norm": 0.4279126822948456, |
| "learning_rate": 9.952988187535886e-05, |
| "loss": 0.0551, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.45454545454545453, |
| "grad_norm": 0.747555136680603, |
| "learning_rate": 9.949142856720961e-05, |
| "loss": 0.0561, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.46266233766233766, |
| "grad_norm": 0.5137647986412048, |
| "learning_rate": 9.945147184390278e-05, |
| "loss": 0.05, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.4707792207792208, |
| "grad_norm": 0.4771589934825897, |
| "learning_rate": 9.941001291921512e-05, |
| "loss": 0.0565, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.4788961038961039, |
| "grad_norm": 0.5673993229866028, |
| "learning_rate": 9.936705305255612e-05, |
| "loss": 0.0562, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.487012987012987, |
| "grad_norm": 0.4510548710823059, |
| "learning_rate": 9.932259354892984e-05, |
| "loss": 0.0604, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.49512987012987014, |
| "grad_norm": 0.5102289915084839, |
| "learning_rate": 9.927663575889521e-05, |
| "loss": 0.0625, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.5032467532467533, |
| "grad_norm": 0.5905114412307739, |
| "learning_rate": 9.922918107852504e-05, |
| "loss": 0.0585, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.5113636363636364, |
| "grad_norm": 0.4644724130630493, |
| "learning_rate": 9.918023094936363e-05, |
| "loss": 0.0549, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.5194805194805194, |
| "grad_norm": 0.43552494049072266, |
| "learning_rate": 9.912978685838294e-05, |
| "loss": 0.0508, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.5275974025974026, |
| "grad_norm": 0.5756233334541321, |
| "learning_rate": 9.90778503379374e-05, |
| "loss": 0.0548, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.5357142857142857, |
| "grad_norm": 0.6892876029014587, |
| "learning_rate": 9.902442296571743e-05, |
| "loss": 0.0576, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.5438311688311688, |
| "grad_norm": 0.5225304961204529, |
| "learning_rate": 9.896950636470147e-05, |
| "loss": 0.0595, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.551948051948052, |
| "grad_norm": 0.6475655436515808, |
| "learning_rate": 9.891310220310666e-05, |
| "loss": 0.0577, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.560064935064935, |
| "grad_norm": 0.5821855068206787, |
| "learning_rate": 9.885521219433823e-05, |
| "loss": 0.0526, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.5681818181818182, |
| "grad_norm": 0.40680307149887085, |
| "learning_rate": 9.879583809693738e-05, |
| "loss": 0.0513, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.5762987012987013, |
| "grad_norm": 0.6106568574905396, |
| "learning_rate": 9.873498171452789e-05, |
| "loss": 0.0656, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.5844155844155844, |
| "grad_norm": 0.37523335218429565, |
| "learning_rate": 9.867264489576135e-05, |
| "loss": 0.053, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.5925324675324676, |
| "grad_norm": 0.42965176701545715, |
| "learning_rate": 9.860882953426099e-05, |
| "loss": 0.0556, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.6006493506493507, |
| "grad_norm": 0.461414635181427, |
| "learning_rate": 9.854353756856412e-05, |
| "loss": 0.0493, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.6087662337662337, |
| "grad_norm": 0.49525216221809387, |
| "learning_rate": 9.847677098206332e-05, |
| "loss": 0.0554, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.6168831168831169, |
| "grad_norm": 0.6846104860305786, |
| "learning_rate": 9.840853180294608e-05, |
| "loss": 0.0514, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.625, |
| "grad_norm": 0.4863649606704712, |
| "learning_rate": 9.833882210413332e-05, |
| "loss": 0.0523, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.6331168831168831, |
| "grad_norm": 0.5287837386131287, |
| "learning_rate": 9.826764400321633e-05, |
| "loss": 0.0519, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.6412337662337663, |
| "grad_norm": 0.4653201401233673, |
| "learning_rate": 9.819499966239243e-05, |
| "loss": 0.0482, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.6493506493506493, |
| "grad_norm": 0.5273900032043457, |
| "learning_rate": 9.812089128839938e-05, |
| "loss": 0.0539, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.6574675324675324, |
| "grad_norm": 0.4672171473503113, |
| "learning_rate": 9.804532113244828e-05, |
| "loss": 0.0536, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.6655844155844156, |
| "grad_norm": 0.39550450444221497, |
| "learning_rate": 9.796829149015517e-05, |
| "loss": 0.0512, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.6737012987012987, |
| "grad_norm": 0.6363895535469055, |
| "learning_rate": 9.788980470147132e-05, |
| "loss": 0.0464, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.6818181818181818, |
| "grad_norm": 0.6644861698150635, |
| "learning_rate": 9.780986315061218e-05, |
| "loss": 0.0469, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.689935064935065, |
| "grad_norm": 0.32548630237579346, |
| "learning_rate": 9.772846926598491e-05, |
| "loss": 0.0525, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.698051948051948, |
| "grad_norm": 0.5565819144248962, |
| "learning_rate": 9.76456255201146e-05, |
| "loss": 0.0483, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.7061688311688312, |
| "grad_norm": 0.3733399212360382, |
| "learning_rate": 9.756133442956923e-05, |
| "loss": 0.0496, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.7142857142857143, |
| "grad_norm": 0.5695706605911255, |
| "learning_rate": 9.747559855488313e-05, |
| "loss": 0.0414, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.7224025974025974, |
| "grad_norm": 0.47618597745895386, |
| "learning_rate": 9.73884205004793e-05, |
| "loss": 0.0439, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.7305194805194806, |
| "grad_norm": 0.4174794852733612, |
| "learning_rate": 9.729980291459019e-05, |
| "loss": 0.0495, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.7386363636363636, |
| "grad_norm": 0.52494215965271, |
| "learning_rate": 9.720974848917735e-05, |
| "loss": 0.0429, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.7467532467532467, |
| "grad_norm": 0.6096119284629822, |
| "learning_rate": 9.711825995984957e-05, |
| "loss": 0.0464, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.7548701298701299, |
| "grad_norm": 0.5608004927635193, |
| "learning_rate": 9.702534010577991e-05, |
| "loss": 0.044, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.762987012987013, |
| "grad_norm": 0.550032377243042, |
| "learning_rate": 9.693099174962103e-05, |
| "loss": 0.041, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.7711038961038961, |
| "grad_norm": 0.5039306282997131, |
| "learning_rate": 9.683521775741977e-05, |
| "loss": 0.052, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.7792207792207793, |
| "grad_norm": 0.6055617332458496, |
| "learning_rate": 9.673802103852979e-05, |
| "loss": 0.0462, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.7873376623376623, |
| "grad_norm": 0.37617576122283936, |
| "learning_rate": 9.663940454552342e-05, |
| "loss": 0.0481, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.7954545454545454, |
| "grad_norm": 0.5203965902328491, |
| "learning_rate": 9.65393712741018e-05, |
| "loss": 0.0533, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.8035714285714286, |
| "grad_norm": 0.6327062845230103, |
| "learning_rate": 9.6437924263004e-05, |
| "loss": 0.0505, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.8116883116883117, |
| "grad_norm": 0.5592052340507507, |
| "learning_rate": 9.63350665939146e-05, |
| "loss": 0.0488, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.8198051948051948, |
| "grad_norm": 0.5474748611450195, |
| "learning_rate": 9.623080139137023e-05, |
| "loss": 0.0425, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.827922077922078, |
| "grad_norm": 0.7658595442771912, |
| "learning_rate": 9.612513182266447e-05, |
| "loss": 0.043, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.836038961038961, |
| "grad_norm": 0.5269732475280762, |
| "learning_rate": 9.601806109775179e-05, |
| "loss": 0.0449, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.8441558441558441, |
| "grad_norm": 0.4510018229484558, |
| "learning_rate": 9.590959246914995e-05, |
| "loss": 0.0495, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.8522727272727273, |
| "grad_norm": 0.40236034989356995, |
| "learning_rate": 9.579972923184122e-05, |
| "loss": 0.0411, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.8603896103896104, |
| "grad_norm": 0.5794676542282104, |
| "learning_rate": 9.568847472317232e-05, |
| "loss": 0.0521, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.8685064935064936, |
| "grad_norm": 0.27767524123191833, |
| "learning_rate": 9.557583232275303e-05, |
| "loss": 0.0486, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.8766233766233766, |
| "grad_norm": 0.4276062846183777, |
| "learning_rate": 9.546180545235344e-05, |
| "loss": 0.0445, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.8847402597402597, |
| "grad_norm": 0.5806272625923157, |
| "learning_rate": 9.534639757580013e-05, |
| "loss": 0.0369, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.8928571428571429, |
| "grad_norm": 0.30585572123527527, |
| "learning_rate": 9.522961219887092e-05, |
| "loss": 0.0406, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.900974025974026, |
| "grad_norm": 0.3119078576564789, |
| "learning_rate": 9.511145286918828e-05, |
| "loss": 0.0398, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.9090909090909091, |
| "grad_norm": 0.381734699010849, |
| "learning_rate": 9.499192317611167e-05, |
| "loss": 0.0425, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.9172077922077922, |
| "grad_norm": 0.23833595216274261, |
| "learning_rate": 9.487102675062851e-05, |
| "loss": 0.0484, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.9253246753246753, |
| "grad_norm": 0.5636361837387085, |
| "learning_rate": 9.474876726524374e-05, |
| "loss": 0.0445, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.9334415584415584, |
| "grad_norm": 0.4658976197242737, |
| "learning_rate": 9.462514843386845e-05, |
| "loss": 0.0443, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.9415584415584416, |
| "grad_norm": 0.4087630808353424, |
| "learning_rate": 9.450017401170689e-05, |
| "loss": 0.039, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.9496753246753247, |
| "grad_norm": 0.414836585521698, |
| "learning_rate": 9.437384779514256e-05, |
| "loss": 0.0421, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.9577922077922078, |
| "grad_norm": 0.4009227156639099, |
| "learning_rate": 9.424617362162271e-05, |
| "loss": 0.044, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.9659090909090909, |
| "grad_norm": 0.5628305077552795, |
| "learning_rate": 9.411715536954196e-05, |
| "loss": 0.0421, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.974025974025974, |
| "grad_norm": 0.5532993674278259, |
| "learning_rate": 9.39867969581243e-05, |
| "loss": 0.0425, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.9821428571428571, |
| "grad_norm": 0.35711079835891724, |
| "learning_rate": 9.385510234730415e-05, |
| "loss": 0.044, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.9902597402597403, |
| "grad_norm": 0.5343226790428162, |
| "learning_rate": 9.372207553760603e-05, |
| "loss": 0.0365, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.9983766233766234, |
| "grad_norm": 0.5358731150627136, |
| "learning_rate": 9.358772057002312e-05, |
| "loss": 0.0468, |
| "step": 1230 |
| }, |
| { |
| "epoch": 1.0064935064935066, |
| "grad_norm": 0.6387543082237244, |
| "learning_rate": 9.345204152589428e-05, |
| "loss": 0.0396, |
| "step": 1240 |
| }, |
| { |
| "epoch": 1.0146103896103895, |
| "grad_norm": 0.4421396851539612, |
| "learning_rate": 9.331504252678037e-05, |
| "loss": 0.0458, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.0227272727272727, |
| "grad_norm": 0.3979133069515228, |
| "learning_rate": 9.317672773433876e-05, |
| "loss": 0.0433, |
| "step": 1260 |
| }, |
| { |
| "epoch": 1.030844155844156, |
| "grad_norm": 0.40404215455055237, |
| "learning_rate": 9.30371013501972e-05, |
| "loss": 0.0443, |
| "step": 1270 |
| }, |
| { |
| "epoch": 1.0389610389610389, |
| "grad_norm": 0.40078988671302795, |
| "learning_rate": 9.289616761582587e-05, |
| "loss": 0.0388, |
| "step": 1280 |
| }, |
| { |
| "epoch": 1.047077922077922, |
| "grad_norm": 0.6261420845985413, |
| "learning_rate": 9.275393081240882e-05, |
| "loss": 0.053, |
| "step": 1290 |
| }, |
| { |
| "epoch": 1.0551948051948052, |
| "grad_norm": 0.4728165864944458, |
| "learning_rate": 9.261039526071374e-05, |
| "loss": 0.0465, |
| "step": 1300 |
| }, |
| { |
| "epoch": 1.0633116883116882, |
| "grad_norm": 0.5890015363693237, |
| "learning_rate": 9.246556532096078e-05, |
| "loss": 0.0463, |
| "step": 1310 |
| }, |
| { |
| "epoch": 1.0714285714285714, |
| "grad_norm": 0.5251089930534363, |
| "learning_rate": 9.231944539269009e-05, |
| "loss": 0.0465, |
| "step": 1320 |
| }, |
| { |
| "epoch": 1.0795454545454546, |
| "grad_norm": 0.4777784049510956, |
| "learning_rate": 9.217203991462815e-05, |
| "loss": 0.0378, |
| "step": 1330 |
| }, |
| { |
| "epoch": 1.0876623376623376, |
| "grad_norm": 0.5128867626190186, |
| "learning_rate": 9.202335336455296e-05, |
| "loss": 0.0386, |
| "step": 1340 |
| }, |
| { |
| "epoch": 1.0957792207792207, |
| "grad_norm": 0.2599307894706726, |
| "learning_rate": 9.187339025915802e-05, |
| "loss": 0.0414, |
| "step": 1350 |
| }, |
| { |
| "epoch": 1.103896103896104, |
| "grad_norm": 0.41156598925590515, |
| "learning_rate": 9.17221551539151e-05, |
| "loss": 0.0395, |
| "step": 1360 |
| }, |
| { |
| "epoch": 1.112012987012987, |
| "grad_norm": 0.5701897144317627, |
| "learning_rate": 9.156965264293586e-05, |
| "loss": 0.0314, |
| "step": 1370 |
| }, |
| { |
| "epoch": 1.12012987012987, |
| "grad_norm": 0.3325595259666443, |
| "learning_rate": 9.141588735883232e-05, |
| "loss": 0.0396, |
| "step": 1380 |
| }, |
| { |
| "epoch": 1.1282467532467533, |
| "grad_norm": 0.31674617528915405, |
| "learning_rate": 9.126086397257612e-05, |
| "loss": 0.037, |
| "step": 1390 |
| }, |
| { |
| "epoch": 1.1363636363636362, |
| "grad_norm": 0.36610284447669983, |
| "learning_rate": 9.110458719335659e-05, |
| "loss": 0.0417, |
| "step": 1400 |
| }, |
| { |
| "epoch": 1.1444805194805194, |
| "grad_norm": 0.47777095437049866, |
| "learning_rate": 9.094706176843777e-05, |
| "loss": 0.0365, |
| "step": 1410 |
| }, |
| { |
| "epoch": 1.1525974025974026, |
| "grad_norm": 0.4963802099227905, |
| "learning_rate": 9.078829248301417e-05, |
| "loss": 0.0419, |
| "step": 1420 |
| }, |
| { |
| "epoch": 1.1607142857142858, |
| "grad_norm": 0.30004164576530457, |
| "learning_rate": 9.062828416006539e-05, |
| "loss": 0.038, |
| "step": 1430 |
| }, |
| { |
| "epoch": 1.1688311688311688, |
| "grad_norm": 0.3547914922237396, |
| "learning_rate": 9.046704166020961e-05, |
| "loss": 0.0409, |
| "step": 1440 |
| }, |
| { |
| "epoch": 1.176948051948052, |
| "grad_norm": 0.3625108003616333, |
| "learning_rate": 9.030456988155596e-05, |
| "loss": 0.0402, |
| "step": 1450 |
| }, |
| { |
| "epoch": 1.1850649350649352, |
| "grad_norm": 0.3149315118789673, |
| "learning_rate": 9.014087375955573e-05, |
| "loss": 0.0377, |
| "step": 1460 |
| }, |
| { |
| "epoch": 1.1931818181818181, |
| "grad_norm": 0.3886120319366455, |
| "learning_rate": 8.997595826685243e-05, |
| "loss": 0.0386, |
| "step": 1470 |
| }, |
| { |
| "epoch": 1.2012987012987013, |
| "grad_norm": 0.48191025853157043, |
| "learning_rate": 8.980982841313074e-05, |
| "loss": 0.048, |
| "step": 1480 |
| }, |
| { |
| "epoch": 1.2094155844155845, |
| "grad_norm": 0.454088032245636, |
| "learning_rate": 8.964248924496435e-05, |
| "loss": 0.0342, |
| "step": 1490 |
| }, |
| { |
| "epoch": 1.2175324675324675, |
| "grad_norm": 0.3253687620162964, |
| "learning_rate": 8.947394584566258e-05, |
| "loss": 0.0412, |
| "step": 1500 |
| }, |
| { |
| "epoch": 1.2256493506493507, |
| "grad_norm": 0.3391754925251007, |
| "learning_rate": 8.930420333511606e-05, |
| "loss": 0.0465, |
| "step": 1510 |
| }, |
| { |
| "epoch": 1.2337662337662338, |
| "grad_norm": 0.3184202313423157, |
| "learning_rate": 8.913326686964117e-05, |
| "loss": 0.0379, |
| "step": 1520 |
| }, |
| { |
| "epoch": 1.2418831168831168, |
| "grad_norm": 0.3418414890766144, |
| "learning_rate": 8.89611416418234e-05, |
| "loss": 0.0466, |
| "step": 1530 |
| }, |
| { |
| "epoch": 1.25, |
| "grad_norm": 0.45861661434173584, |
| "learning_rate": 8.878783288035957e-05, |
| "loss": 0.0448, |
| "step": 1540 |
| }, |
| { |
| "epoch": 1.2581168831168832, |
| "grad_norm": 0.45306596159935, |
| "learning_rate": 8.86133458498991e-05, |
| "loss": 0.0459, |
| "step": 1550 |
| }, |
| { |
| "epoch": 1.2662337662337662, |
| "grad_norm": 0.4458024799823761, |
| "learning_rate": 8.843768585088393e-05, |
| "loss": 0.0385, |
| "step": 1560 |
| }, |
| { |
| "epoch": 1.2743506493506493, |
| "grad_norm": 0.38890722393989563, |
| "learning_rate": 8.82608582193877e-05, |
| "loss": 0.0357, |
| "step": 1570 |
| }, |
| { |
| "epoch": 1.2824675324675325, |
| "grad_norm": 0.46648555994033813, |
| "learning_rate": 8.80828683269535e-05, |
| "loss": 0.0396, |
| "step": 1580 |
| }, |
| { |
| "epoch": 1.2905844155844157, |
| "grad_norm": 0.30567464232444763, |
| "learning_rate": 8.790372158043074e-05, |
| "loss": 0.0405, |
| "step": 1590 |
| }, |
| { |
| "epoch": 1.2987012987012987, |
| "grad_norm": 0.36842402815818787, |
| "learning_rate": 8.772342342181095e-05, |
| "loss": 0.0392, |
| "step": 1600 |
| }, |
| { |
| "epoch": 1.3068181818181819, |
| "grad_norm": 0.3658285439014435, |
| "learning_rate": 8.75419793280624e-05, |
| "loss": 0.0371, |
| "step": 1610 |
| }, |
| { |
| "epoch": 1.314935064935065, |
| "grad_norm": 0.28925997018814087, |
| "learning_rate": 8.735939481096378e-05, |
| "loss": 0.0342, |
| "step": 1620 |
| }, |
| { |
| "epoch": 1.323051948051948, |
| "grad_norm": 0.2606930732727051, |
| "learning_rate": 8.717567541693673e-05, |
| "loss": 0.0359, |
| "step": 1630 |
| }, |
| { |
| "epoch": 1.3311688311688312, |
| "grad_norm": 0.521195113658905, |
| "learning_rate": 8.699082672687734e-05, |
| "loss": 0.0387, |
| "step": 1640 |
| }, |
| { |
| "epoch": 1.3392857142857144, |
| "grad_norm": 0.41980624198913574, |
| "learning_rate": 8.680485435598673e-05, |
| "loss": 0.0441, |
| "step": 1650 |
| }, |
| { |
| "epoch": 1.3474025974025974, |
| "grad_norm": 0.22121192514896393, |
| "learning_rate": 8.661776395360029e-05, |
| "loss": 0.0297, |
| "step": 1660 |
| }, |
| { |
| "epoch": 1.3555194805194806, |
| "grad_norm": 0.3273646831512451, |
| "learning_rate": 8.642956120301626e-05, |
| "loss": 0.0355, |
| "step": 1670 |
| }, |
| { |
| "epoch": 1.3636363636363638, |
| "grad_norm": 0.3984847068786621, |
| "learning_rate": 8.624025182132292e-05, |
| "loss": 0.0409, |
| "step": 1680 |
| }, |
| { |
| "epoch": 1.3717532467532467, |
| "grad_norm": 0.34613439440727234, |
| "learning_rate": 8.604984155922506e-05, |
| "loss": 0.0372, |
| "step": 1690 |
| }, |
| { |
| "epoch": 1.37987012987013, |
| "grad_norm": 0.26878851652145386, |
| "learning_rate": 8.585833620086918e-05, |
| "loss": 0.0337, |
| "step": 1700 |
| }, |
| { |
| "epoch": 1.387987012987013, |
| "grad_norm": 0.5729002952575684, |
| "learning_rate": 8.566574156366784e-05, |
| "loss": 0.0373, |
| "step": 1710 |
| }, |
| { |
| "epoch": 1.396103896103896, |
| "grad_norm": 0.44826480746269226, |
| "learning_rate": 8.547206349812298e-05, |
| "loss": 0.0396, |
| "step": 1720 |
| }, |
| { |
| "epoch": 1.4042207792207793, |
| "grad_norm": 0.3320808708667755, |
| "learning_rate": 8.527730788764805e-05, |
| "loss": 0.0335, |
| "step": 1730 |
| }, |
| { |
| "epoch": 1.4123376623376624, |
| "grad_norm": 0.512421190738678, |
| "learning_rate": 8.508148064838948e-05, |
| "loss": 0.0368, |
| "step": 1740 |
| }, |
| { |
| "epoch": 1.4204545454545454, |
| "grad_norm": 0.33640575408935547, |
| "learning_rate": 8.488458772904684e-05, |
| "loss": 0.0394, |
| "step": 1750 |
| }, |
| { |
| "epoch": 1.4285714285714286, |
| "grad_norm": 0.4379742741584778, |
| "learning_rate": 8.468663511069217e-05, |
| "loss": 0.0439, |
| "step": 1760 |
| }, |
| { |
| "epoch": 1.4366883116883118, |
| "grad_norm": 0.4011531472206116, |
| "learning_rate": 8.448762880658825e-05, |
| "loss": 0.037, |
| "step": 1770 |
| }, |
| { |
| "epoch": 1.4448051948051948, |
| "grad_norm": 0.35906273126602173, |
| "learning_rate": 8.428757486200603e-05, |
| "loss": 0.0394, |
| "step": 1780 |
| }, |
| { |
| "epoch": 1.452922077922078, |
| "grad_norm": 0.28073570132255554, |
| "learning_rate": 8.40864793540409e-05, |
| "loss": 0.0385, |
| "step": 1790 |
| }, |
| { |
| "epoch": 1.4610389610389611, |
| "grad_norm": 0.24478106200695038, |
| "learning_rate": 8.388434839142813e-05, |
| "loss": 0.0387, |
| "step": 1800 |
| }, |
| { |
| "epoch": 1.469155844155844, |
| "grad_norm": 0.555260419845581, |
| "learning_rate": 8.368118811435726e-05, |
| "loss": 0.0387, |
| "step": 1810 |
| }, |
| { |
| "epoch": 1.4772727272727273, |
| "grad_norm": 0.5704160332679749, |
| "learning_rate": 8.347700469428564e-05, |
| "loss": 0.0345, |
| "step": 1820 |
| }, |
| { |
| "epoch": 1.4853896103896105, |
| "grad_norm": 0.3263181746006012, |
| "learning_rate": 8.327180433375091e-05, |
| "loss": 0.0362, |
| "step": 1830 |
| }, |
| { |
| "epoch": 1.4935064935064934, |
| "grad_norm": 0.33647775650024414, |
| "learning_rate": 8.306559326618259e-05, |
| "loss": 0.037, |
| "step": 1840 |
| }, |
| { |
| "epoch": 1.5016233766233766, |
| "grad_norm": 0.34997621178627014, |
| "learning_rate": 8.285837775571276e-05, |
| "loss": 0.0359, |
| "step": 1850 |
| }, |
| { |
| "epoch": 1.5097402597402598, |
| "grad_norm": 0.26414382457733154, |
| "learning_rate": 8.265016409698573e-05, |
| "loss": 0.0418, |
| "step": 1860 |
| }, |
| { |
| "epoch": 1.5178571428571428, |
| "grad_norm": 0.2641528248786926, |
| "learning_rate": 8.244095861496686e-05, |
| "loss": 0.0371, |
| "step": 1870 |
| }, |
| { |
| "epoch": 1.525974025974026, |
| "grad_norm": 0.35056617856025696, |
| "learning_rate": 8.223076766475035e-05, |
| "loss": 0.0347, |
| "step": 1880 |
| }, |
| { |
| "epoch": 1.5340909090909092, |
| "grad_norm": 0.33150261640548706, |
| "learning_rate": 8.201959763136633e-05, |
| "loss": 0.0286, |
| "step": 1890 |
| }, |
| { |
| "epoch": 1.5422077922077921, |
| "grad_norm": 0.21580272912979126, |
| "learning_rate": 8.180745492958674e-05, |
| "loss": 0.0391, |
| "step": 1900 |
| }, |
| { |
| "epoch": 1.5503246753246753, |
| "grad_norm": 0.6196150183677673, |
| "learning_rate": 8.159434600373061e-05, |
| "loss": 0.0405, |
| "step": 1910 |
| }, |
| { |
| "epoch": 1.5584415584415585, |
| "grad_norm": 0.3643500506877899, |
| "learning_rate": 8.138027732746818e-05, |
| "loss": 0.0402, |
| "step": 1920 |
| }, |
| { |
| "epoch": 1.5665584415584415, |
| "grad_norm": 0.3535213768482208, |
| "learning_rate": 8.116525540362434e-05, |
| "loss": 0.0348, |
| "step": 1930 |
| }, |
| { |
| "epoch": 1.5746753246753247, |
| "grad_norm": 0.37009313702583313, |
| "learning_rate": 8.094928676398101e-05, |
| "loss": 0.0383, |
| "step": 1940 |
| }, |
| { |
| "epoch": 1.5827922077922079, |
| "grad_norm": 0.31659385561943054, |
| "learning_rate": 8.073237796907882e-05, |
| "loss": 0.0361, |
| "step": 1950 |
| }, |
| { |
| "epoch": 1.5909090909090908, |
| "grad_norm": 0.2771911323070526, |
| "learning_rate": 8.051453560801772e-05, |
| "loss": 0.0331, |
| "step": 1960 |
| }, |
| { |
| "epoch": 1.599025974025974, |
| "grad_norm": 0.2283037304878235, |
| "learning_rate": 8.029576629825687e-05, |
| "loss": 0.0396, |
| "step": 1970 |
| }, |
| { |
| "epoch": 1.6071428571428572, |
| "grad_norm": 0.3288361728191376, |
| "learning_rate": 8.007607668541362e-05, |
| "loss": 0.0387, |
| "step": 1980 |
| }, |
| { |
| "epoch": 1.6152597402597402, |
| "grad_norm": 0.4881565570831299, |
| "learning_rate": 7.985547344306161e-05, |
| "loss": 0.0352, |
| "step": 1990 |
| }, |
| { |
| "epoch": 1.6233766233766234, |
| "grad_norm": 0.2584371864795685, |
| "learning_rate": 7.963396327252812e-05, |
| "loss": 0.0338, |
| "step": 2000 |
| }, |
| { |
| "epoch": 1.6314935064935066, |
| "grad_norm": 0.5432425737380981, |
| "learning_rate": 7.941155290269038e-05, |
| "loss": 0.0308, |
| "step": 2010 |
| }, |
| { |
| "epoch": 1.6396103896103895, |
| "grad_norm": 0.38847246766090393, |
| "learning_rate": 7.918824908977123e-05, |
| "loss": 0.0358, |
| "step": 2020 |
| }, |
| { |
| "epoch": 1.6477272727272727, |
| "grad_norm": 0.49234262108802795, |
| "learning_rate": 7.896405861713394e-05, |
| "loss": 0.0402, |
| "step": 2030 |
| }, |
| { |
| "epoch": 1.655844155844156, |
| "grad_norm": 0.36641925573349, |
| "learning_rate": 7.873898829507606e-05, |
| "loss": 0.0405, |
| "step": 2040 |
| }, |
| { |
| "epoch": 1.6639610389610389, |
| "grad_norm": 0.35777345299720764, |
| "learning_rate": 7.851304496062254e-05, |
| "loss": 0.0341, |
| "step": 2050 |
| }, |
| { |
| "epoch": 1.672077922077922, |
| "grad_norm": 0.4085061252117157, |
| "learning_rate": 7.828623547731818e-05, |
| "loss": 0.0315, |
| "step": 2060 |
| }, |
| { |
| "epoch": 1.6801948051948052, |
| "grad_norm": 0.4399782717227936, |
| "learning_rate": 7.80585667350189e-05, |
| "loss": 0.0291, |
| "step": 2070 |
| }, |
| { |
| "epoch": 1.6883116883116882, |
| "grad_norm": 0.43032369017601013, |
| "learning_rate": 7.783004564968263e-05, |
| "loss": 0.0331, |
| "step": 2080 |
| }, |
| { |
| "epoch": 1.6964285714285714, |
| "grad_norm": 0.3430509865283966, |
| "learning_rate": 7.760067916315921e-05, |
| "loss": 0.0322, |
| "step": 2090 |
| }, |
| { |
| "epoch": 1.7045454545454546, |
| "grad_norm": 0.25578099489212036, |
| "learning_rate": 7.737047424297941e-05, |
| "loss": 0.0281, |
| "step": 2100 |
| }, |
| { |
| "epoch": 1.7126623376623376, |
| "grad_norm": 0.4189400374889374, |
| "learning_rate": 7.713943788214337e-05, |
| "loss": 0.0347, |
| "step": 2110 |
| }, |
| { |
| "epoch": 1.7207792207792207, |
| "grad_norm": 0.31031355261802673, |
| "learning_rate": 7.690757709890812e-05, |
| "loss": 0.0286, |
| "step": 2120 |
| }, |
| { |
| "epoch": 1.728896103896104, |
| "grad_norm": 0.3406350016593933, |
| "learning_rate": 7.66748989365744e-05, |
| "loss": 0.0327, |
| "step": 2130 |
| }, |
| { |
| "epoch": 1.737012987012987, |
| "grad_norm": 0.2371300756931305, |
| "learning_rate": 7.644141046327271e-05, |
| "loss": 0.0322, |
| "step": 2140 |
| }, |
| { |
| "epoch": 1.74512987012987, |
| "grad_norm": 0.37169113755226135, |
| "learning_rate": 7.620711877174866e-05, |
| "loss": 0.0368, |
| "step": 2150 |
| }, |
| { |
| "epoch": 1.7532467532467533, |
| "grad_norm": 0.4142199158668518, |
| "learning_rate": 7.597203097914732e-05, |
| "loss": 0.0303, |
| "step": 2160 |
| }, |
| { |
| "epoch": 1.7613636363636362, |
| "grad_norm": 0.4226316213607788, |
| "learning_rate": 7.573615422679726e-05, |
| "loss": 0.0304, |
| "step": 2170 |
| }, |
| { |
| "epoch": 1.7694805194805194, |
| "grad_norm": 0.2813127636909485, |
| "learning_rate": 7.549949567999345e-05, |
| "loss": 0.0287, |
| "step": 2180 |
| }, |
| { |
| "epoch": 1.7775974025974026, |
| "grad_norm": 0.4985370635986328, |
| "learning_rate": 7.526206252777968e-05, |
| "loss": 0.0312, |
| "step": 2190 |
| }, |
| { |
| "epoch": 1.7857142857142856, |
| "grad_norm": 0.4418068528175354, |
| "learning_rate": 7.50238619827301e-05, |
| "loss": 0.0331, |
| "step": 2200 |
| }, |
| { |
| "epoch": 1.7938311688311688, |
| "grad_norm": 0.4157378077507019, |
| "learning_rate": 7.478490128073022e-05, |
| "loss": 0.0309, |
| "step": 2210 |
| }, |
| { |
| "epoch": 1.801948051948052, |
| "grad_norm": 0.43208909034729004, |
| "learning_rate": 7.454518768075704e-05, |
| "loss": 0.0305, |
| "step": 2220 |
| }, |
| { |
| "epoch": 1.810064935064935, |
| "grad_norm": 0.3125418722629547, |
| "learning_rate": 7.430472846465856e-05, |
| "loss": 0.0362, |
| "step": 2230 |
| }, |
| { |
| "epoch": 1.8181818181818183, |
| "grad_norm": 0.43105587363243103, |
| "learning_rate": 7.406353093693253e-05, |
| "loss": 0.0315, |
| "step": 2240 |
| }, |
| { |
| "epoch": 1.8262987012987013, |
| "grad_norm": 0.29525113105773926, |
| "learning_rate": 7.382160242450469e-05, |
| "loss": 0.0356, |
| "step": 2250 |
| }, |
| { |
| "epoch": 1.8344155844155843, |
| "grad_norm": 0.43105417490005493, |
| "learning_rate": 7.357895027650598e-05, |
| "loss": 0.0341, |
| "step": 2260 |
| }, |
| { |
| "epoch": 1.8425324675324677, |
| "grad_norm": 0.2936663031578064, |
| "learning_rate": 7.333558186404958e-05, |
| "loss": 0.0309, |
| "step": 2270 |
| }, |
| { |
| "epoch": 1.8506493506493507, |
| "grad_norm": 0.37772467732429504, |
| "learning_rate": 7.309150458000668e-05, |
| "loss": 0.0366, |
| "step": 2280 |
| }, |
| { |
| "epoch": 1.8587662337662336, |
| "grad_norm": 0.38669803738594055, |
| "learning_rate": 7.284672583878219e-05, |
| "loss": 0.0348, |
| "step": 2290 |
| }, |
| { |
| "epoch": 1.866883116883117, |
| "grad_norm": 0.3333178162574768, |
| "learning_rate": 7.260125307608929e-05, |
| "loss": 0.0281, |
| "step": 2300 |
| }, |
| { |
| "epoch": 1.875, |
| "grad_norm": 0.3831936717033386, |
| "learning_rate": 7.235509374872373e-05, |
| "loss": 0.0362, |
| "step": 2310 |
| }, |
| { |
| "epoch": 1.883116883116883, |
| "grad_norm": 0.29595747590065, |
| "learning_rate": 7.210825533433719e-05, |
| "loss": 0.0296, |
| "step": 2320 |
| }, |
| { |
| "epoch": 1.8912337662337664, |
| "grad_norm": 0.32862672209739685, |
| "learning_rate": 7.186074533121013e-05, |
| "loss": 0.0375, |
| "step": 2330 |
| }, |
| { |
| "epoch": 1.8993506493506493, |
| "grad_norm": 0.36036041378974915, |
| "learning_rate": 7.161257125802413e-05, |
| "loss": 0.0301, |
| "step": 2340 |
| }, |
| { |
| "epoch": 1.9074675324675323, |
| "grad_norm": 0.29135704040527344, |
| "learning_rate": 7.136374065363334e-05, |
| "loss": 0.0331, |
| "step": 2350 |
| }, |
| { |
| "epoch": 1.9155844155844157, |
| "grad_norm": 0.35510045289993286, |
| "learning_rate": 7.11142610768356e-05, |
| "loss": 0.0326, |
| "step": 2360 |
| }, |
| { |
| "epoch": 1.9237012987012987, |
| "grad_norm": 0.5242955088615417, |
| "learning_rate": 7.086414010614276e-05, |
| "loss": 0.0314, |
| "step": 2370 |
| }, |
| { |
| "epoch": 1.9318181818181817, |
| "grad_norm": 0.3819064795970917, |
| "learning_rate": 7.061338533955043e-05, |
| "loss": 0.0263, |
| "step": 2380 |
| }, |
| { |
| "epoch": 1.939935064935065, |
| "grad_norm": 0.3712959587574005, |
| "learning_rate": 7.036200439430725e-05, |
| "loss": 0.0328, |
| "step": 2390 |
| }, |
| { |
| "epoch": 1.948051948051948, |
| "grad_norm": 0.4319584369659424, |
| "learning_rate": 7.01100049066835e-05, |
| "loss": 0.0344, |
| "step": 2400 |
| }, |
| { |
| "epoch": 1.9561688311688312, |
| "grad_norm": 0.4357791543006897, |
| "learning_rate": 6.985739453173903e-05, |
| "loss": 0.0317, |
| "step": 2410 |
| }, |
| { |
| "epoch": 1.9642857142857144, |
| "grad_norm": 0.2147785723209381, |
| "learning_rate": 6.960418094309085e-05, |
| "loss": 0.034, |
| "step": 2420 |
| }, |
| { |
| "epoch": 1.9724025974025974, |
| "grad_norm": 0.3929905295372009, |
| "learning_rate": 6.93503718326799e-05, |
| "loss": 0.0297, |
| "step": 2430 |
| }, |
| { |
| "epoch": 1.9805194805194806, |
| "grad_norm": 0.2676817774772644, |
| "learning_rate": 6.909597491053751e-05, |
| "loss": 0.0316, |
| "step": 2440 |
| }, |
| { |
| "epoch": 1.9886363636363638, |
| "grad_norm": 0.404502272605896, |
| "learning_rate": 6.884099790455113e-05, |
| "loss": 0.0279, |
| "step": 2450 |
| }, |
| { |
| "epoch": 1.9967532467532467, |
| "grad_norm": 0.48282140493392944, |
| "learning_rate": 6.858544856022952e-05, |
| "loss": 0.0348, |
| "step": 2460 |
| }, |
| { |
| "epoch": 2.0048701298701297, |
| "grad_norm": 0.4860660433769226, |
| "learning_rate": 6.83293346404676e-05, |
| "loss": 0.0349, |
| "step": 2470 |
| }, |
| { |
| "epoch": 2.012987012987013, |
| "grad_norm": 0.27844271063804626, |
| "learning_rate": 6.80726639253105e-05, |
| "loss": 0.0374, |
| "step": 2480 |
| }, |
| { |
| "epoch": 2.021103896103896, |
| "grad_norm": 0.37690722942352295, |
| "learning_rate": 6.781544421171732e-05, |
| "loss": 0.0298, |
| "step": 2490 |
| }, |
| { |
| "epoch": 2.029220779220779, |
| "grad_norm": 0.29487431049346924, |
| "learning_rate": 6.755768331332424e-05, |
| "loss": 0.033, |
| "step": 2500 |
| }, |
| { |
| "epoch": 2.0373376623376624, |
| "grad_norm": 0.39269179105758667, |
| "learning_rate": 6.729938906020713e-05, |
| "loss": 0.0338, |
| "step": 2510 |
| }, |
| { |
| "epoch": 2.0454545454545454, |
| "grad_norm": 0.2467389702796936, |
| "learning_rate": 6.704056929864376e-05, |
| "loss": 0.0268, |
| "step": 2520 |
| }, |
| { |
| "epoch": 2.0535714285714284, |
| "grad_norm": 0.30904653668403625, |
| "learning_rate": 6.67812318908754e-05, |
| "loss": 0.0279, |
| "step": 2530 |
| }, |
| { |
| "epoch": 2.061688311688312, |
| "grad_norm": 0.3185916841030121, |
| "learning_rate": 6.6521384714868e-05, |
| "loss": 0.0318, |
| "step": 2540 |
| }, |
| { |
| "epoch": 2.0698051948051948, |
| "grad_norm": 0.28571951389312744, |
| "learning_rate": 6.626103566407295e-05, |
| "loss": 0.0338, |
| "step": 2550 |
| }, |
| { |
| "epoch": 2.0779220779220777, |
| "grad_norm": 0.4210315942764282, |
| "learning_rate": 6.600019264718713e-05, |
| "loss": 0.0325, |
| "step": 2560 |
| }, |
| { |
| "epoch": 2.086038961038961, |
| "grad_norm": 0.32041001319885254, |
| "learning_rate": 6.573886358791285e-05, |
| "loss": 0.0307, |
| "step": 2570 |
| }, |
| { |
| "epoch": 2.094155844155844, |
| "grad_norm": 0.29989737272262573, |
| "learning_rate": 6.547705642471703e-05, |
| "loss": 0.0295, |
| "step": 2580 |
| }, |
| { |
| "epoch": 2.102272727272727, |
| "grad_norm": 0.27079686522483826, |
| "learning_rate": 6.521477911059008e-05, |
| "loss": 0.0289, |
| "step": 2590 |
| }, |
| { |
| "epoch": 2.1103896103896105, |
| "grad_norm": 0.3335155248641968, |
| "learning_rate": 6.495203961280434e-05, |
| "loss": 0.0268, |
| "step": 2600 |
| }, |
| { |
| "epoch": 2.1185064935064934, |
| "grad_norm": 0.36267414689064026, |
| "learning_rate": 6.468884591267204e-05, |
| "loss": 0.0291, |
| "step": 2610 |
| }, |
| { |
| "epoch": 2.1266233766233764, |
| "grad_norm": 0.38647592067718506, |
| "learning_rate": 6.44252060053028e-05, |
| "loss": 0.0273, |
| "step": 2620 |
| }, |
| { |
| "epoch": 2.13474025974026, |
| "grad_norm": 0.23865100741386414, |
| "learning_rate": 6.416112789936086e-05, |
| "loss": 0.0268, |
| "step": 2630 |
| }, |
| { |
| "epoch": 2.142857142857143, |
| "grad_norm": 0.35867780447006226, |
| "learning_rate": 6.389661961682173e-05, |
| "loss": 0.0246, |
| "step": 2640 |
| }, |
| { |
| "epoch": 2.150974025974026, |
| "grad_norm": 0.3096346855163574, |
| "learning_rate": 6.363168919272846e-05, |
| "loss": 0.0258, |
| "step": 2650 |
| }, |
| { |
| "epoch": 2.159090909090909, |
| "grad_norm": 0.2882651686668396, |
| "learning_rate": 6.336634467494768e-05, |
| "loss": 0.0357, |
| "step": 2660 |
| }, |
| { |
| "epoch": 2.167207792207792, |
| "grad_norm": 0.3401713967323303, |
| "learning_rate": 6.310059412392505e-05, |
| "loss": 0.0295, |
| "step": 2670 |
| }, |
| { |
| "epoch": 2.175324675324675, |
| "grad_norm": 0.20208750665187836, |
| "learning_rate": 6.283444561244042e-05, |
| "loss": 0.0309, |
| "step": 2680 |
| }, |
| { |
| "epoch": 2.1834415584415585, |
| "grad_norm": 0.23864485323429108, |
| "learning_rate": 6.256790722536251e-05, |
| "loss": 0.0236, |
| "step": 2690 |
| }, |
| { |
| "epoch": 2.1915584415584415, |
| "grad_norm": 0.4252474308013916, |
| "learning_rate": 6.230098705940354e-05, |
| "loss": 0.0305, |
| "step": 2700 |
| }, |
| { |
| "epoch": 2.199675324675325, |
| "grad_norm": 0.251686692237854, |
| "learning_rate": 6.203369322287306e-05, |
| "loss": 0.0268, |
| "step": 2710 |
| }, |
| { |
| "epoch": 2.207792207792208, |
| "grad_norm": 0.45067349076271057, |
| "learning_rate": 6.17660338354317e-05, |
| "loss": 0.0286, |
| "step": 2720 |
| }, |
| { |
| "epoch": 2.215909090909091, |
| "grad_norm": 0.2426416426897049, |
| "learning_rate": 6.149801702784456e-05, |
| "loss": 0.0257, |
| "step": 2730 |
| }, |
| { |
| "epoch": 2.224025974025974, |
| "grad_norm": 0.2226698249578476, |
| "learning_rate": 6.122965094173424e-05, |
| "loss": 0.0276, |
| "step": 2740 |
| }, |
| { |
| "epoch": 2.232142857142857, |
| "grad_norm": 0.24318771064281464, |
| "learning_rate": 6.0960943729333374e-05, |
| "loss": 0.0227, |
| "step": 2750 |
| }, |
| { |
| "epoch": 2.24025974025974, |
| "grad_norm": 0.2451125830411911, |
| "learning_rate": 6.069190355323717e-05, |
| "loss": 0.026, |
| "step": 2760 |
| }, |
| { |
| "epoch": 2.2483766233766236, |
| "grad_norm": 0.26024386286735535, |
| "learning_rate": 6.042253858615532e-05, |
| "loss": 0.0231, |
| "step": 2770 |
| }, |
| { |
| "epoch": 2.2564935064935066, |
| "grad_norm": 0.2502027750015259, |
| "learning_rate": 6.015285701066382e-05, |
| "loss": 0.0234, |
| "step": 2780 |
| }, |
| { |
| "epoch": 2.2646103896103895, |
| "grad_norm": 0.3536471724510193, |
| "learning_rate": 5.988286701895631e-05, |
| "loss": 0.0245, |
| "step": 2790 |
| }, |
| { |
| "epoch": 2.2727272727272725, |
| "grad_norm": 0.5378932952880859, |
| "learning_rate": 5.961257681259535e-05, |
| "loss": 0.0276, |
| "step": 2800 |
| }, |
| { |
| "epoch": 2.280844155844156, |
| "grad_norm": 0.4171668589115143, |
| "learning_rate": 5.934199460226317e-05, |
| "loss": 0.0302, |
| "step": 2810 |
| }, |
| { |
| "epoch": 2.288961038961039, |
| "grad_norm": 0.3195844292640686, |
| "learning_rate": 5.9071128607512285e-05, |
| "loss": 0.0222, |
| "step": 2820 |
| }, |
| { |
| "epoch": 2.2970779220779223, |
| "grad_norm": 0.3038657307624817, |
| "learning_rate": 5.8799987056515804e-05, |
| "loss": 0.0278, |
| "step": 2830 |
| }, |
| { |
| "epoch": 2.3051948051948052, |
| "grad_norm": 0.31502866744995117, |
| "learning_rate": 5.8528578185817514e-05, |
| "loss": 0.0255, |
| "step": 2840 |
| }, |
| { |
| "epoch": 2.313311688311688, |
| "grad_norm": 0.2499353289604187, |
| "learning_rate": 5.825691024008162e-05, |
| "loss": 0.023, |
| "step": 2850 |
| }, |
| { |
| "epoch": 2.3214285714285716, |
| "grad_norm": 0.47782427072525024, |
| "learning_rate": 5.798499147184233e-05, |
| "loss": 0.0296, |
| "step": 2860 |
| }, |
| { |
| "epoch": 2.3295454545454546, |
| "grad_norm": 0.311261922121048, |
| "learning_rate": 5.771283014125317e-05, |
| "loss": 0.0234, |
| "step": 2870 |
| }, |
| { |
| "epoch": 2.3376623376623376, |
| "grad_norm": 0.24453973770141602, |
| "learning_rate": 5.7440434515836064e-05, |
| "loss": 0.025, |
| "step": 2880 |
| }, |
| { |
| "epoch": 2.345779220779221, |
| "grad_norm": 0.25350216031074524, |
| "learning_rate": 5.7167812870230094e-05, |
| "loss": 0.024, |
| "step": 2890 |
| }, |
| { |
| "epoch": 2.353896103896104, |
| "grad_norm": 0.2107991725206375, |
| "learning_rate": 5.689497348594035e-05, |
| "loss": 0.0271, |
| "step": 2900 |
| }, |
| { |
| "epoch": 2.362012987012987, |
| "grad_norm": 0.3169057071208954, |
| "learning_rate": 5.662192465108613e-05, |
| "loss": 0.0343, |
| "step": 2910 |
| }, |
| { |
| "epoch": 2.3701298701298703, |
| "grad_norm": 0.3305840492248535, |
| "learning_rate": 5.634867466014932e-05, |
| "loss": 0.0249, |
| "step": 2920 |
| }, |
| { |
| "epoch": 2.3782467532467533, |
| "grad_norm": 0.22127178311347961, |
| "learning_rate": 5.607523181372234e-05, |
| "loss": 0.0247, |
| "step": 2930 |
| }, |
| { |
| "epoch": 2.3863636363636362, |
| "grad_norm": 0.34313908219337463, |
| "learning_rate": 5.5801604418256117e-05, |
| "loss": 0.0247, |
| "step": 2940 |
| }, |
| { |
| "epoch": 2.3944805194805197, |
| "grad_norm": 0.2642175555229187, |
| "learning_rate": 5.552780078580756e-05, |
| "loss": 0.0252, |
| "step": 2950 |
| }, |
| { |
| "epoch": 2.4025974025974026, |
| "grad_norm": 0.19900047779083252, |
| "learning_rate": 5.525382923378728e-05, |
| "loss": 0.0302, |
| "step": 2960 |
| }, |
| { |
| "epoch": 2.4107142857142856, |
| "grad_norm": 0.3788554072380066, |
| "learning_rate": 5.49796980847068e-05, |
| "loss": 0.025, |
| "step": 2970 |
| }, |
| { |
| "epoch": 2.418831168831169, |
| "grad_norm": 0.31273797154426575, |
| "learning_rate": 5.470541566592573e-05, |
| "loss": 0.0276, |
| "step": 2980 |
| }, |
| { |
| "epoch": 2.426948051948052, |
| "grad_norm": 0.34932348132133484, |
| "learning_rate": 5.443099030939887e-05, |
| "loss": 0.0254, |
| "step": 2990 |
| }, |
| { |
| "epoch": 2.435064935064935, |
| "grad_norm": 0.2879962623119354, |
| "learning_rate": 5.415643035142309e-05, |
| "loss": 0.025, |
| "step": 3000 |
| }, |
| { |
| "epoch": 2.4431818181818183, |
| "grad_norm": 0.28197798132896423, |
| "learning_rate": 5.3881744132384104e-05, |
| "loss": 0.0255, |
| "step": 3010 |
| }, |
| { |
| "epoch": 2.4512987012987013, |
| "grad_norm": 0.31070998311042786, |
| "learning_rate": 5.360693999650303e-05, |
| "loss": 0.0299, |
| "step": 3020 |
| }, |
| { |
| "epoch": 2.4594155844155843, |
| "grad_norm": 0.30440738797187805, |
| "learning_rate": 5.3332026291583016e-05, |
| "loss": 0.0228, |
| "step": 3030 |
| }, |
| { |
| "epoch": 2.4675324675324677, |
| "grad_norm": 0.28742456436157227, |
| "learning_rate": 5.305701136875566e-05, |
| "loss": 0.0239, |
| "step": 3040 |
| }, |
| { |
| "epoch": 2.4756493506493507, |
| "grad_norm": 0.31291142106056213, |
| "learning_rate": 5.278190358222721e-05, |
| "loss": 0.0228, |
| "step": 3050 |
| }, |
| { |
| "epoch": 2.4837662337662336, |
| "grad_norm": 0.5043593645095825, |
| "learning_rate": 5.25067112890249e-05, |
| "loss": 0.0307, |
| "step": 3060 |
| }, |
| { |
| "epoch": 2.491883116883117, |
| "grad_norm": 0.3310283124446869, |
| "learning_rate": 5.2231442848743064e-05, |
| "loss": 0.027, |
| "step": 3070 |
| }, |
| { |
| "epoch": 2.5, |
| "grad_norm": 0.2754096984863281, |
| "learning_rate": 5.1956106623289145e-05, |
| "loss": 0.026, |
| "step": 3080 |
| }, |
| { |
| "epoch": 2.508116883116883, |
| "grad_norm": 0.313946396112442, |
| "learning_rate": 5.168071097662972e-05, |
| "loss": 0.0309, |
| "step": 3090 |
| }, |
| { |
| "epoch": 2.5162337662337664, |
| "grad_norm": 0.29437389969825745, |
| "learning_rate": 5.1405264274536445e-05, |
| "loss": 0.0263, |
| "step": 3100 |
| }, |
| { |
| "epoch": 2.5243506493506493, |
| "grad_norm": 0.3874703347682953, |
| "learning_rate": 5.112977488433188e-05, |
| "loss": 0.0312, |
| "step": 3110 |
| }, |
| { |
| "epoch": 2.5324675324675323, |
| "grad_norm": 0.2032894790172577, |
| "learning_rate": 5.085425117463533e-05, |
| "loss": 0.026, |
| "step": 3120 |
| }, |
| { |
| "epoch": 2.5405844155844157, |
| "grad_norm": 0.35001760721206665, |
| "learning_rate": 5.057870151510864e-05, |
| "loss": 0.0287, |
| "step": 3130 |
| }, |
| { |
| "epoch": 2.5487012987012987, |
| "grad_norm": 0.3120177686214447, |
| "learning_rate": 5.030313427620197e-05, |
| "loss": 0.0264, |
| "step": 3140 |
| }, |
| { |
| "epoch": 2.5568181818181817, |
| "grad_norm": 0.2786904573440552, |
| "learning_rate": 5.0027557828899426e-05, |
| "loss": 0.026, |
| "step": 3150 |
| }, |
| { |
| "epoch": 2.564935064935065, |
| "grad_norm": 0.4152393043041229, |
| "learning_rate": 4.975198054446492e-05, |
| "loss": 0.0259, |
| "step": 3160 |
| }, |
| { |
| "epoch": 2.573051948051948, |
| "grad_norm": 0.3477596640586853, |
| "learning_rate": 4.947641079418773e-05, |
| "loss": 0.0284, |
| "step": 3170 |
| }, |
| { |
| "epoch": 2.5811688311688314, |
| "grad_norm": 0.31803539395332336, |
| "learning_rate": 4.920085694912828e-05, |
| "loss": 0.0265, |
| "step": 3180 |
| }, |
| { |
| "epoch": 2.5892857142857144, |
| "grad_norm": 0.24946662783622742, |
| "learning_rate": 4.892532737986387e-05, |
| "loss": 0.0246, |
| "step": 3190 |
| }, |
| { |
| "epoch": 2.5974025974025974, |
| "grad_norm": 0.2712534964084625, |
| "learning_rate": 4.864983045623434e-05, |
| "loss": 0.0284, |
| "step": 3200 |
| }, |
| { |
| "epoch": 2.6055194805194803, |
| "grad_norm": 0.2120172381401062, |
| "learning_rate": 4.837437454708784e-05, |
| "loss": 0.0264, |
| "step": 3210 |
| }, |
| { |
| "epoch": 2.6136363636363638, |
| "grad_norm": 0.29879817366600037, |
| "learning_rate": 4.809896802002662e-05, |
| "loss": 0.0229, |
| "step": 3220 |
| }, |
| { |
| "epoch": 2.6217532467532467, |
| "grad_norm": 0.3061186969280243, |
| "learning_rate": 4.7823619241152854e-05, |
| "loss": 0.0245, |
| "step": 3230 |
| }, |
| { |
| "epoch": 2.62987012987013, |
| "grad_norm": 0.2454555630683899, |
| "learning_rate": 4.754833657481445e-05, |
| "loss": 0.0203, |
| "step": 3240 |
| }, |
| { |
| "epoch": 2.637987012987013, |
| "grad_norm": 0.3605726361274719, |
| "learning_rate": 4.7273128383351015e-05, |
| "loss": 0.0219, |
| "step": 3250 |
| }, |
| { |
| "epoch": 2.646103896103896, |
| "grad_norm": 0.26292887330055237, |
| "learning_rate": 4.699800302683981e-05, |
| "loss": 0.0304, |
| "step": 3260 |
| }, |
| { |
| "epoch": 2.654220779220779, |
| "grad_norm": 0.3809950351715088, |
| "learning_rate": 4.6722968862841806e-05, |
| "loss": 0.0245, |
| "step": 3270 |
| }, |
| { |
| "epoch": 2.6623376623376624, |
| "grad_norm": 0.2573823630809784, |
| "learning_rate": 4.6448034246147754e-05, |
| "loss": 0.0244, |
| "step": 3280 |
| }, |
| { |
| "epoch": 2.6704545454545454, |
| "grad_norm": 0.30012446641921997, |
| "learning_rate": 4.6173207528524476e-05, |
| "loss": 0.0274, |
| "step": 3290 |
| }, |
| { |
| "epoch": 2.678571428571429, |
| "grad_norm": 0.2845306992530823, |
| "learning_rate": 4.58984970584611e-05, |
| "loss": 0.0195, |
| "step": 3300 |
| }, |
| { |
| "epoch": 2.686688311688312, |
| "grad_norm": 0.2307920604944229, |
| "learning_rate": 4.562391118091544e-05, |
| "loss": 0.0241, |
| "step": 3310 |
| }, |
| { |
| "epoch": 2.6948051948051948, |
| "grad_norm": 0.17364875972270966, |
| "learning_rate": 4.534945823706056e-05, |
| "loss": 0.0226, |
| "step": 3320 |
| }, |
| { |
| "epoch": 2.7029220779220777, |
| "grad_norm": 0.29188865423202515, |
| "learning_rate": 4.507514656403137e-05, |
| "loss": 0.0257, |
| "step": 3330 |
| }, |
| { |
| "epoch": 2.711038961038961, |
| "grad_norm": 0.2309047132730484, |
| "learning_rate": 4.480098449467132e-05, |
| "loss": 0.0221, |
| "step": 3340 |
| }, |
| { |
| "epoch": 2.719155844155844, |
| "grad_norm": 0.19017618894577026, |
| "learning_rate": 4.452698035727929e-05, |
| "loss": 0.0221, |
| "step": 3350 |
| }, |
| { |
| "epoch": 2.7272727272727275, |
| "grad_norm": 0.25761279463768005, |
| "learning_rate": 4.425314247535668e-05, |
| "loss": 0.0234, |
| "step": 3360 |
| }, |
| { |
| "epoch": 2.7353896103896105, |
| "grad_norm": 0.22961056232452393, |
| "learning_rate": 4.3979479167354477e-05, |
| "loss": 0.0262, |
| "step": 3370 |
| }, |
| { |
| "epoch": 2.7435064935064934, |
| "grad_norm": 0.21607573330402374, |
| "learning_rate": 4.370599874642055e-05, |
| "loss": 0.0224, |
| "step": 3380 |
| }, |
| { |
| "epoch": 2.7516233766233764, |
| "grad_norm": 0.20206135511398315, |
| "learning_rate": 4.3432709520147205e-05, |
| "loss": 0.024, |
| "step": 3390 |
| }, |
| { |
| "epoch": 2.75974025974026, |
| "grad_norm": 0.3328724503517151, |
| "learning_rate": 4.315961979031875e-05, |
| "loss": 0.0288, |
| "step": 3400 |
| }, |
| { |
| "epoch": 2.767857142857143, |
| "grad_norm": 0.31236326694488525, |
| "learning_rate": 4.2886737852659325e-05, |
| "loss": 0.023, |
| "step": 3410 |
| }, |
| { |
| "epoch": 2.775974025974026, |
| "grad_norm": 0.2892083525657654, |
| "learning_rate": 4.261407199658093e-05, |
| "loss": 0.0284, |
| "step": 3420 |
| }, |
| { |
| "epoch": 2.784090909090909, |
| "grad_norm": 0.30132782459259033, |
| "learning_rate": 4.234163050493158e-05, |
| "loss": 0.0207, |
| "step": 3430 |
| }, |
| { |
| "epoch": 2.792207792207792, |
| "grad_norm": 0.2886647880077362, |
| "learning_rate": 4.2069421653743706e-05, |
| "loss": 0.0205, |
| "step": 3440 |
| }, |
| { |
| "epoch": 2.800324675324675, |
| "grad_norm": 0.2944081723690033, |
| "learning_rate": 4.179745371198276e-05, |
| "loss": 0.0263, |
| "step": 3450 |
| }, |
| { |
| "epoch": 2.8084415584415585, |
| "grad_norm": 0.2653869390487671, |
| "learning_rate": 4.1525734941296026e-05, |
| "loss": 0.0258, |
| "step": 3460 |
| }, |
| { |
| "epoch": 2.8165584415584415, |
| "grad_norm": 0.3410821259021759, |
| "learning_rate": 4.125427359576162e-05, |
| "loss": 0.0209, |
| "step": 3470 |
| }, |
| { |
| "epoch": 2.824675324675325, |
| "grad_norm": 0.3292246460914612, |
| "learning_rate": 4.0983077921637815e-05, |
| "loss": 0.0239, |
| "step": 3480 |
| }, |
| { |
| "epoch": 2.832792207792208, |
| "grad_norm": 0.21837031841278076, |
| "learning_rate": 4.07121561571125e-05, |
| "loss": 0.0273, |
| "step": 3490 |
| }, |
| { |
| "epoch": 2.840909090909091, |
| "grad_norm": 0.33826249837875366, |
| "learning_rate": 4.044151653205292e-05, |
| "loss": 0.0262, |
| "step": 3500 |
| }, |
| { |
| "epoch": 2.849025974025974, |
| "grad_norm": 0.1706782430410385, |
| "learning_rate": 4.0171167267755696e-05, |
| "loss": 0.0264, |
| "step": 3510 |
| }, |
| { |
| "epoch": 2.857142857142857, |
| "grad_norm": 0.2329110950231552, |
| "learning_rate": 3.9901116576697083e-05, |
| "loss": 0.0223, |
| "step": 3520 |
| }, |
| { |
| "epoch": 2.86525974025974, |
| "grad_norm": 0.230225071310997, |
| "learning_rate": 3.963137266228349e-05, |
| "loss": 0.0185, |
| "step": 3530 |
| }, |
| { |
| "epoch": 2.8733766233766236, |
| "grad_norm": 0.2520594000816345, |
| "learning_rate": 3.93619437186023e-05, |
| "loss": 0.0225, |
| "step": 3540 |
| }, |
| { |
| "epoch": 2.8814935064935066, |
| "grad_norm": 0.15949386358261108, |
| "learning_rate": 3.9092837930172884e-05, |
| "loss": 0.0266, |
| "step": 3550 |
| }, |
| { |
| "epoch": 2.8896103896103895, |
| "grad_norm": 0.22446958720684052, |
| "learning_rate": 3.8824063471698105e-05, |
| "loss": 0.0219, |
| "step": 3560 |
| }, |
| { |
| "epoch": 2.8977272727272725, |
| "grad_norm": 0.248432457447052, |
| "learning_rate": 3.855562850781589e-05, |
| "loss": 0.0275, |
| "step": 3570 |
| }, |
| { |
| "epoch": 2.905844155844156, |
| "grad_norm": 0.388147234916687, |
| "learning_rate": 3.828754119285123e-05, |
| "loss": 0.0247, |
| "step": 3580 |
| }, |
| { |
| "epoch": 2.913961038961039, |
| "grad_norm": 0.17168256640434265, |
| "learning_rate": 3.801980967056851e-05, |
| "loss": 0.0237, |
| "step": 3590 |
| }, |
| { |
| "epoch": 2.9220779220779223, |
| "grad_norm": 0.275740385055542, |
| "learning_rate": 3.77524420739241e-05, |
| "loss": 0.0243, |
| "step": 3600 |
| }, |
| { |
| "epoch": 2.9301948051948052, |
| "grad_norm": 0.22579465806484222, |
| "learning_rate": 3.748544652481927e-05, |
| "loss": 0.0227, |
| "step": 3610 |
| }, |
| { |
| "epoch": 2.938311688311688, |
| "grad_norm": 0.29678720235824585, |
| "learning_rate": 3.721883113385353e-05, |
| "loss": 0.0231, |
| "step": 3620 |
| }, |
| { |
| "epoch": 2.946428571428571, |
| "grad_norm": 0.21918945014476776, |
| "learning_rate": 3.695260400007819e-05, |
| "loss": 0.0239, |
| "step": 3630 |
| }, |
| { |
| "epoch": 2.9545454545454546, |
| "grad_norm": 0.3155342638492584, |
| "learning_rate": 3.6686773210750385e-05, |
| "loss": 0.0232, |
| "step": 3640 |
| }, |
| { |
| "epoch": 2.9626623376623376, |
| "grad_norm": 0.20900188386440277, |
| "learning_rate": 3.642134684108737e-05, |
| "loss": 0.0205, |
| "step": 3650 |
| }, |
| { |
| "epoch": 2.970779220779221, |
| "grad_norm": 0.24781478941440582, |
| "learning_rate": 3.615633295402123e-05, |
| "loss": 0.0211, |
| "step": 3660 |
| }, |
| { |
| "epoch": 2.978896103896104, |
| "grad_norm": 0.46745410561561584, |
| "learning_rate": 3.5891739599953945e-05, |
| "loss": 0.0232, |
| "step": 3670 |
| }, |
| { |
| "epoch": 2.987012987012987, |
| "grad_norm": 0.24507856369018555, |
| "learning_rate": 3.5627574816512846e-05, |
| "loss": 0.0219, |
| "step": 3680 |
| }, |
| { |
| "epoch": 2.99512987012987, |
| "grad_norm": 0.23768098652362823, |
| "learning_rate": 3.536384662830648e-05, |
| "loss": 0.0226, |
| "step": 3690 |
| }, |
| { |
| "epoch": 3.0032467532467533, |
| "grad_norm": 0.2294023633003235, |
| "learning_rate": 3.5100563046680764e-05, |
| "loss": 0.0288, |
| "step": 3700 |
| }, |
| { |
| "epoch": 3.0113636363636362, |
| "grad_norm": 0.19534379243850708, |
| "learning_rate": 3.483773206947572e-05, |
| "loss": 0.027, |
| "step": 3710 |
| }, |
| { |
| "epoch": 3.0194805194805197, |
| "grad_norm": 0.22551284730434418, |
| "learning_rate": 3.457536168078247e-05, |
| "loss": 0.0261, |
| "step": 3720 |
| }, |
| { |
| "epoch": 3.0275974025974026, |
| "grad_norm": 0.23405173420906067, |
| "learning_rate": 3.431345985070067e-05, |
| "loss": 0.0204, |
| "step": 3730 |
| }, |
| { |
| "epoch": 3.0357142857142856, |
| "grad_norm": 0.2273300141096115, |
| "learning_rate": 3.40520345350965e-05, |
| "loss": 0.0217, |
| "step": 3740 |
| }, |
| { |
| "epoch": 3.043831168831169, |
| "grad_norm": 0.2894749343395233, |
| "learning_rate": 3.379109367536089e-05, |
| "loss": 0.0222, |
| "step": 3750 |
| }, |
| { |
| "epoch": 3.051948051948052, |
| "grad_norm": 0.34926941990852356, |
| "learning_rate": 3.3530645198168295e-05, |
| "loss": 0.0233, |
| "step": 3760 |
| }, |
| { |
| "epoch": 3.060064935064935, |
| "grad_norm": 0.3377019762992859, |
| "learning_rate": 3.327069701523595e-05, |
| "loss": 0.0284, |
| "step": 3770 |
| }, |
| { |
| "epoch": 3.0681818181818183, |
| "grad_norm": 0.24470967054367065, |
| "learning_rate": 3.301125702308353e-05, |
| "loss": 0.0235, |
| "step": 3780 |
| }, |
| { |
| "epoch": 3.0762987012987013, |
| "grad_norm": 0.2026497721672058, |
| "learning_rate": 3.275233310279321e-05, |
| "loss": 0.0195, |
| "step": 3790 |
| }, |
| { |
| "epoch": 3.0844155844155843, |
| "grad_norm": 0.24781477451324463, |
| "learning_rate": 3.249393311977037e-05, |
| "loss": 0.0218, |
| "step": 3800 |
| }, |
| { |
| "epoch": 3.0925324675324677, |
| "grad_norm": 0.1729935109615326, |
| "learning_rate": 3.223606492350451e-05, |
| "loss": 0.0213, |
| "step": 3810 |
| }, |
| { |
| "epoch": 3.1006493506493507, |
| "grad_norm": 0.2468423992395401, |
| "learning_rate": 3.197873634733096e-05, |
| "loss": 0.0239, |
| "step": 3820 |
| }, |
| { |
| "epoch": 3.1087662337662336, |
| "grad_norm": 0.27226418256759644, |
| "learning_rate": 3.172195520819285e-05, |
| "loss": 0.023, |
| "step": 3830 |
| }, |
| { |
| "epoch": 3.116883116883117, |
| "grad_norm": 0.3348056972026825, |
| "learning_rate": 3.146572930640362e-05, |
| "loss": 0.0204, |
| "step": 3840 |
| }, |
| { |
| "epoch": 3.125, |
| "grad_norm": 0.2769404351711273, |
| "learning_rate": 3.121006642541014e-05, |
| "loss": 0.0184, |
| "step": 3850 |
| }, |
| { |
| "epoch": 3.133116883116883, |
| "grad_norm": 0.264667272567749, |
| "learning_rate": 3.095497433155626e-05, |
| "loss": 0.024, |
| "step": 3860 |
| }, |
| { |
| "epoch": 3.1412337662337664, |
| "grad_norm": 0.24052001535892487, |
| "learning_rate": 3.070046077384682e-05, |
| "loss": 0.021, |
| "step": 3870 |
| }, |
| { |
| "epoch": 3.1493506493506493, |
| "grad_norm": 0.25225111842155457, |
| "learning_rate": 3.0446533483712304e-05, |
| "loss": 0.0215, |
| "step": 3880 |
| }, |
| { |
| "epoch": 3.1574675324675323, |
| "grad_norm": 0.19073453545570374, |
| "learning_rate": 3.0193200174774038e-05, |
| "loss": 0.0184, |
| "step": 3890 |
| }, |
| { |
| "epoch": 3.1655844155844157, |
| "grad_norm": 0.18121835589408875, |
| "learning_rate": 2.994046854260974e-05, |
| "loss": 0.0221, |
| "step": 3900 |
| }, |
| { |
| "epoch": 3.1737012987012987, |
| "grad_norm": 0.27128997445106506, |
| "learning_rate": 2.9688346264519866e-05, |
| "loss": 0.0184, |
| "step": 3910 |
| }, |
| { |
| "epoch": 3.1818181818181817, |
| "grad_norm": 0.2848660945892334, |
| "learning_rate": 2.943684099929436e-05, |
| "loss": 0.0203, |
| "step": 3920 |
| }, |
| { |
| "epoch": 3.189935064935065, |
| "grad_norm": 0.2707204520702362, |
| "learning_rate": 2.918596038697995e-05, |
| "loss": 0.0186, |
| "step": 3930 |
| }, |
| { |
| "epoch": 3.198051948051948, |
| "grad_norm": 0.28140372037887573, |
| "learning_rate": 2.8935712048648112e-05, |
| "loss": 0.0203, |
| "step": 3940 |
| }, |
| { |
| "epoch": 3.206168831168831, |
| "grad_norm": 0.2458561360836029, |
| "learning_rate": 2.8686103586163626e-05, |
| "loss": 0.0214, |
| "step": 3950 |
| }, |
| { |
| "epoch": 3.2142857142857144, |
| "grad_norm": 0.20385901629924774, |
| "learning_rate": 2.843714258195346e-05, |
| "loss": 0.0211, |
| "step": 3960 |
| }, |
| { |
| "epoch": 3.2224025974025974, |
| "grad_norm": 0.2751452922821045, |
| "learning_rate": 2.8188836598776662e-05, |
| "loss": 0.0197, |
| "step": 3970 |
| }, |
| { |
| "epoch": 3.2305194805194803, |
| "grad_norm": 0.19618158042430878, |
| "learning_rate": 2.7941193179494484e-05, |
| "loss": 0.0196, |
| "step": 3980 |
| }, |
| { |
| "epoch": 3.2386363636363638, |
| "grad_norm": 0.22209911048412323, |
| "learning_rate": 2.7694219846841262e-05, |
| "loss": 0.0192, |
| "step": 3990 |
| }, |
| { |
| "epoch": 3.2467532467532467, |
| "grad_norm": 0.23248954117298126, |
| "learning_rate": 2.7447924103195976e-05, |
| "loss": 0.0211, |
| "step": 4000 |
| }, |
| { |
| "epoch": 3.2548701298701297, |
| "grad_norm": 0.30527588725090027, |
| "learning_rate": 2.7202313430354253e-05, |
| "loss": 0.0193, |
| "step": 4010 |
| }, |
| { |
| "epoch": 3.262987012987013, |
| "grad_norm": 0.21639414131641388, |
| "learning_rate": 2.695739528930111e-05, |
| "loss": 0.0169, |
| "step": 4020 |
| }, |
| { |
| "epoch": 3.271103896103896, |
| "grad_norm": 0.28023266792297363, |
| "learning_rate": 2.67131771199844e-05, |
| "loss": 0.0208, |
| "step": 4030 |
| }, |
| { |
| "epoch": 3.279220779220779, |
| "grad_norm": 0.20019178092479706, |
| "learning_rate": 2.6469666341088677e-05, |
| "loss": 0.0249, |
| "step": 4040 |
| }, |
| { |
| "epoch": 3.2873376623376624, |
| "grad_norm": 0.3043162524700165, |
| "learning_rate": 2.6226870349809885e-05, |
| "loss": 0.0204, |
| "step": 4050 |
| }, |
| { |
| "epoch": 3.2954545454545454, |
| "grad_norm": 0.35738375782966614, |
| "learning_rate": 2.5984796521630737e-05, |
| "loss": 0.0231, |
| "step": 4060 |
| }, |
| { |
| "epoch": 3.3035714285714284, |
| "grad_norm": 0.3056882619857788, |
| "learning_rate": 2.574345221009653e-05, |
| "loss": 0.0206, |
| "step": 4070 |
| }, |
| { |
| "epoch": 3.311688311688312, |
| "grad_norm": 0.2278686761856079, |
| "learning_rate": 2.5502844746591804e-05, |
| "loss": 0.0184, |
| "step": 4080 |
| }, |
| { |
| "epoch": 3.3198051948051948, |
| "grad_norm": 0.1812301129102707, |
| "learning_rate": 2.526298144011775e-05, |
| "loss": 0.0185, |
| "step": 4090 |
| }, |
| { |
| "epoch": 3.3279220779220777, |
| "grad_norm": 0.32972440123558044, |
| "learning_rate": 2.5023869577070013e-05, |
| "loss": 0.0236, |
| "step": 4100 |
| }, |
| { |
| "epoch": 3.336038961038961, |
| "grad_norm": 0.34270867705345154, |
| "learning_rate": 2.478551642101743e-05, |
| "loss": 0.0194, |
| "step": 4110 |
| }, |
| { |
| "epoch": 3.344155844155844, |
| "grad_norm": 0.2542712092399597, |
| "learning_rate": 2.4547929212481435e-05, |
| "loss": 0.0204, |
| "step": 4120 |
| }, |
| { |
| "epoch": 3.3522727272727275, |
| "grad_norm": 0.17830567061901093, |
| "learning_rate": 2.4311115168716013e-05, |
| "loss": 0.0213, |
| "step": 4130 |
| }, |
| { |
| "epoch": 3.3603896103896105, |
| "grad_norm": 0.19581981003284454, |
| "learning_rate": 2.4075081483488494e-05, |
| "loss": 0.02, |
| "step": 4140 |
| }, |
| { |
| "epoch": 3.3685064935064934, |
| "grad_norm": 0.17574402689933777, |
| "learning_rate": 2.3839835326861104e-05, |
| "loss": 0.0219, |
| "step": 4150 |
| }, |
| { |
| "epoch": 3.3766233766233764, |
| "grad_norm": 0.24614615738391876, |
| "learning_rate": 2.3605383844972966e-05, |
| "loss": 0.0137, |
| "step": 4160 |
| }, |
| { |
| "epoch": 3.38474025974026, |
| "grad_norm": 0.32522711157798767, |
| "learning_rate": 2.3371734159823284e-05, |
| "loss": 0.0229, |
| "step": 4170 |
| }, |
| { |
| "epoch": 3.392857142857143, |
| "grad_norm": 0.23649130761623383, |
| "learning_rate": 2.3138893369054766e-05, |
| "loss": 0.0178, |
| "step": 4180 |
| }, |
| { |
| "epoch": 3.400974025974026, |
| "grad_norm": 0.2631138861179352, |
| "learning_rate": 2.2906868545738102e-05, |
| "loss": 0.0236, |
| "step": 4190 |
| }, |
| { |
| "epoch": 3.409090909090909, |
| "grad_norm": 0.22087505459785461, |
| "learning_rate": 2.2675666738157186e-05, |
| "loss": 0.0174, |
| "step": 4200 |
| }, |
| { |
| "epoch": 3.417207792207792, |
| "grad_norm": 0.27582108974456787, |
| "learning_rate": 2.2445294969594844e-05, |
| "loss": 0.0163, |
| "step": 4210 |
| }, |
| { |
| "epoch": 3.425324675324675, |
| "grad_norm": 0.22414959967136383, |
| "learning_rate": 2.22157602381196e-05, |
| "loss": 0.0167, |
| "step": 4220 |
| }, |
| { |
| "epoch": 3.4334415584415585, |
| "grad_norm": 0.34231072664260864, |
| "learning_rate": 2.1987069516373098e-05, |
| "loss": 0.0146, |
| "step": 4230 |
| }, |
| { |
| "epoch": 3.4415584415584415, |
| "grad_norm": 0.18641334772109985, |
| "learning_rate": 2.1759229751358217e-05, |
| "loss": 0.0178, |
| "step": 4240 |
| }, |
| { |
| "epoch": 3.449675324675325, |
| "grad_norm": 0.32411453127861023, |
| "learning_rate": 2.1532247864228084e-05, |
| "loss": 0.0198, |
| "step": 4250 |
| }, |
| { |
| "epoch": 3.457792207792208, |
| "grad_norm": 0.20409472286701202, |
| "learning_rate": 2.1306130750075865e-05, |
| "loss": 0.0252, |
| "step": 4260 |
| }, |
| { |
| "epoch": 3.465909090909091, |
| "grad_norm": 0.20589956641197205, |
| "learning_rate": 2.1080885277725236e-05, |
| "loss": 0.0172, |
| "step": 4270 |
| }, |
| { |
| "epoch": 3.474025974025974, |
| "grad_norm": 0.35658302903175354, |
| "learning_rate": 2.085651828952175e-05, |
| "loss": 0.0188, |
| "step": 4280 |
| }, |
| { |
| "epoch": 3.482142857142857, |
| "grad_norm": 0.29051825404167175, |
| "learning_rate": 2.063303660112506e-05, |
| "loss": 0.0169, |
| "step": 4290 |
| }, |
| { |
| "epoch": 3.49025974025974, |
| "grad_norm": 0.16607648134231567, |
| "learning_rate": 2.0410447001301753e-05, |
| "loss": 0.0175, |
| "step": 4300 |
| }, |
| { |
| "epoch": 3.4983766233766236, |
| "grad_norm": 0.23021842539310455, |
| "learning_rate": 2.0188756251719203e-05, |
| "loss": 0.0162, |
| "step": 4310 |
| }, |
| { |
| "epoch": 3.5064935064935066, |
| "grad_norm": 0.18860861659049988, |
| "learning_rate": 1.9967971086740195e-05, |
| "loss": 0.0186, |
| "step": 4320 |
| }, |
| { |
| "epoch": 3.5146103896103895, |
| "grad_norm": 0.25722193717956543, |
| "learning_rate": 1.974809821321827e-05, |
| "loss": 0.0156, |
| "step": 4330 |
| }, |
| { |
| "epoch": 3.5227272727272725, |
| "grad_norm": 0.2840486466884613, |
| "learning_rate": 1.9529144310294023e-05, |
| "loss": 0.0152, |
| "step": 4340 |
| }, |
| { |
| "epoch": 3.530844155844156, |
| "grad_norm": 0.24226586520671844, |
| "learning_rate": 1.9311116029192278e-05, |
| "loss": 0.0177, |
| "step": 4350 |
| }, |
| { |
| "epoch": 3.538961038961039, |
| "grad_norm": 0.32124438881874084, |
| "learning_rate": 1.909401999301993e-05, |
| "loss": 0.0163, |
| "step": 4360 |
| }, |
| { |
| "epoch": 3.5470779220779223, |
| "grad_norm": 0.21062976121902466, |
| "learning_rate": 1.887786279656482e-05, |
| "loss": 0.0166, |
| "step": 4370 |
| }, |
| { |
| "epoch": 3.5551948051948052, |
| "grad_norm": 0.37951183319091797, |
| "learning_rate": 1.8662651006095387e-05, |
| "loss": 0.0156, |
| "step": 4380 |
| }, |
| { |
| "epoch": 3.563311688311688, |
| "grad_norm": 0.26338446140289307, |
| "learning_rate": 1.8448391159161204e-05, |
| "loss": 0.018, |
| "step": 4390 |
| }, |
| { |
| "epoch": 3.571428571428571, |
| "grad_norm": 0.2080383002758026, |
| "learning_rate": 1.8235089764394408e-05, |
| "loss": 0.0225, |
| "step": 4400 |
| }, |
| { |
| "epoch": 3.5795454545454546, |
| "grad_norm": 0.20108529925346375, |
| "learning_rate": 1.8022753301311935e-05, |
| "loss": 0.0202, |
| "step": 4410 |
| }, |
| { |
| "epoch": 3.5876623376623376, |
| "grad_norm": 0.21950218081474304, |
| "learning_rate": 1.7811388220118707e-05, |
| "loss": 0.0184, |
| "step": 4420 |
| }, |
| { |
| "epoch": 3.595779220779221, |
| "grad_norm": 0.15438999235630035, |
| "learning_rate": 1.7601000941511757e-05, |
| "loss": 0.0175, |
| "step": 4430 |
| }, |
| { |
| "epoch": 3.603896103896104, |
| "grad_norm": 0.2578168213367462, |
| "learning_rate": 1.7391597856485083e-05, |
| "loss": 0.0227, |
| "step": 4440 |
| }, |
| { |
| "epoch": 3.612012987012987, |
| "grad_norm": 0.15993569791316986, |
| "learning_rate": 1.7183185326135543e-05, |
| "loss": 0.0173, |
| "step": 4450 |
| }, |
| { |
| "epoch": 3.62012987012987, |
| "grad_norm": 0.28656288981437683, |
| "learning_rate": 1.6975769681469705e-05, |
| "loss": 0.0178, |
| "step": 4460 |
| }, |
| { |
| "epoch": 3.6282467532467533, |
| "grad_norm": 0.3594793975353241, |
| "learning_rate": 1.676935722321139e-05, |
| "loss": 0.0163, |
| "step": 4470 |
| }, |
| { |
| "epoch": 3.6363636363636362, |
| "grad_norm": 0.1838555634021759, |
| "learning_rate": 1.6563954221610355e-05, |
| "loss": 0.0172, |
| "step": 4480 |
| }, |
| { |
| "epoch": 3.6444805194805197, |
| "grad_norm": 0.23116141557693481, |
| "learning_rate": 1.6359566916251845e-05, |
| "loss": 0.0184, |
| "step": 4490 |
| }, |
| { |
| "epoch": 3.6525974025974026, |
| "grad_norm": 0.17763112485408783, |
| "learning_rate": 1.615620151586697e-05, |
| "loss": 0.0168, |
| "step": 4500 |
| }, |
| { |
| "epoch": 3.6607142857142856, |
| "grad_norm": 0.2717979848384857, |
| "learning_rate": 1.5953864198144135e-05, |
| "loss": 0.02, |
| "step": 4510 |
| }, |
| { |
| "epoch": 3.6688311688311686, |
| "grad_norm": 0.20756039023399353, |
| "learning_rate": 1.5752561109541447e-05, |
| "loss": 0.0191, |
| "step": 4520 |
| }, |
| { |
| "epoch": 3.676948051948052, |
| "grad_norm": 0.18792922794818878, |
| "learning_rate": 1.5552298365099882e-05, |
| "loss": 0.0161, |
| "step": 4530 |
| }, |
| { |
| "epoch": 3.685064935064935, |
| "grad_norm": 0.26093602180480957, |
| "learning_rate": 1.5353082048257596e-05, |
| "loss": 0.0141, |
| "step": 4540 |
| }, |
| { |
| "epoch": 3.6931818181818183, |
| "grad_norm": 0.24755381047725677, |
| "learning_rate": 1.5154918210665148e-05, |
| "loss": 0.0168, |
| "step": 4550 |
| }, |
| { |
| "epoch": 3.7012987012987013, |
| "grad_norm": 0.15973913669586182, |
| "learning_rate": 1.4957812872001614e-05, |
| "loss": 0.0137, |
| "step": 4560 |
| }, |
| { |
| "epoch": 3.7094155844155843, |
| "grad_norm": 0.1759711503982544, |
| "learning_rate": 1.4761772019791748e-05, |
| "loss": 0.0179, |
| "step": 4570 |
| }, |
| { |
| "epoch": 3.7175324675324677, |
| "grad_norm": 0.22808237373828888, |
| "learning_rate": 1.4566801609224096e-05, |
| "loss": 0.0176, |
| "step": 4580 |
| }, |
| { |
| "epoch": 3.7256493506493507, |
| "grad_norm": 0.16413456201553345, |
| "learning_rate": 1.4372907562970079e-05, |
| "loss": 0.0161, |
| "step": 4590 |
| }, |
| { |
| "epoch": 3.7337662337662336, |
| "grad_norm": 0.2310037910938263, |
| "learning_rate": 1.4180095771004154e-05, |
| "loss": 0.0144, |
| "step": 4600 |
| }, |
| { |
| "epoch": 3.741883116883117, |
| "grad_norm": 0.1860443502664566, |
| "learning_rate": 1.3988372090424773e-05, |
| "loss": 0.019, |
| "step": 4610 |
| }, |
| { |
| "epoch": 3.75, |
| "grad_norm": 0.2372734099626541, |
| "learning_rate": 1.3797742345276521e-05, |
| "loss": 0.0163, |
| "step": 4620 |
| }, |
| { |
| "epoch": 3.758116883116883, |
| "grad_norm": 0.21986857056617737, |
| "learning_rate": 1.3608212326373249e-05, |
| "loss": 0.0177, |
| "step": 4630 |
| }, |
| { |
| "epoch": 3.7662337662337664, |
| "grad_norm": 0.26009854674339294, |
| "learning_rate": 1.3419787791122062e-05, |
| "loss": 0.015, |
| "step": 4640 |
| }, |
| { |
| "epoch": 3.7743506493506493, |
| "grad_norm": 0.19511309266090393, |
| "learning_rate": 1.323247446334847e-05, |
| "loss": 0.0185, |
| "step": 4650 |
| }, |
| { |
| "epoch": 3.7824675324675323, |
| "grad_norm": 0.2254074364900589, |
| "learning_rate": 1.3046278033122577e-05, |
| "loss": 0.023, |
| "step": 4660 |
| }, |
| { |
| "epoch": 3.7905844155844157, |
| "grad_norm": 0.23219813406467438, |
| "learning_rate": 1.286120415658611e-05, |
| "loss": 0.0197, |
| "step": 4670 |
| }, |
| { |
| "epoch": 3.7987012987012987, |
| "grad_norm": 0.21777114272117615, |
| "learning_rate": 1.2677258455780683e-05, |
| "loss": 0.0149, |
| "step": 4680 |
| }, |
| { |
| "epoch": 3.8068181818181817, |
| "grad_norm": 0.1834493726491928, |
| "learning_rate": 1.2494446518477022e-05, |
| "loss": 0.0164, |
| "step": 4690 |
| }, |
| { |
| "epoch": 3.814935064935065, |
| "grad_norm": 0.20459257066249847, |
| "learning_rate": 1.2312773898005175e-05, |
| "loss": 0.0177, |
| "step": 4700 |
| }, |
| { |
| "epoch": 3.823051948051948, |
| "grad_norm": 0.24848534166812897, |
| "learning_rate": 1.2132246113085822e-05, |
| "loss": 0.0164, |
| "step": 4710 |
| }, |
| { |
| "epoch": 3.8311688311688314, |
| "grad_norm": 0.3230781853199005, |
| "learning_rate": 1.1952868647662696e-05, |
| "loss": 0.0163, |
| "step": 4720 |
| }, |
| { |
| "epoch": 3.8392857142857144, |
| "grad_norm": 0.2101476788520813, |
| "learning_rate": 1.1774646950735913e-05, |
| "loss": 0.0164, |
| "step": 4730 |
| }, |
| { |
| "epoch": 3.8474025974025974, |
| "grad_norm": 0.2525160014629364, |
| "learning_rate": 1.1597586436196473e-05, |
| "loss": 0.0136, |
| "step": 4740 |
| }, |
| { |
| "epoch": 3.8555194805194803, |
| "grad_norm": 0.15569104254245758, |
| "learning_rate": 1.1421692482661856e-05, |
| "loss": 0.0144, |
| "step": 4750 |
| }, |
| { |
| "epoch": 3.8636363636363638, |
| "grad_norm": 0.15702477097511292, |
| "learning_rate": 1.124697043331256e-05, |
| "loss": 0.0157, |
| "step": 4760 |
| }, |
| { |
| "epoch": 3.8717532467532467, |
| "grad_norm": 0.1934150606393814, |
| "learning_rate": 1.107342559572977e-05, |
| "loss": 0.0191, |
| "step": 4770 |
| }, |
| { |
| "epoch": 3.87987012987013, |
| "grad_norm": 0.18723918497562408, |
| "learning_rate": 1.090106324173426e-05, |
| "loss": 0.0197, |
| "step": 4780 |
| }, |
| { |
| "epoch": 3.887987012987013, |
| "grad_norm": 0.16724443435668945, |
| "learning_rate": 1.0729888607226113e-05, |
| "loss": 0.0151, |
| "step": 4790 |
| }, |
| { |
| "epoch": 3.896103896103896, |
| "grad_norm": 0.25319328904151917, |
| "learning_rate": 1.0559906892025745e-05, |
| "loss": 0.0175, |
| "step": 4800 |
| }, |
| { |
| "epoch": 3.904220779220779, |
| "grad_norm": 0.2188456505537033, |
| "learning_rate": 1.0391123259715906e-05, |
| "loss": 0.0142, |
| "step": 4810 |
| }, |
| { |
| "epoch": 3.9123376623376624, |
| "grad_norm": 0.1482021063566208, |
| "learning_rate": 1.0223542837484839e-05, |
| "loss": 0.0122, |
| "step": 4820 |
| }, |
| { |
| "epoch": 3.9204545454545454, |
| "grad_norm": 0.22208091616630554, |
| "learning_rate": 1.0057170715970559e-05, |
| "loss": 0.017, |
| "step": 4830 |
| }, |
| { |
| "epoch": 3.928571428571429, |
| "grad_norm": 0.1939132660627365, |
| "learning_rate": 9.892011949106172e-06, |
| "loss": 0.0298, |
| "step": 4840 |
| }, |
| { |
| "epoch": 3.936688311688312, |
| "grad_norm": 0.35118457674980164, |
| "learning_rate": 9.728071553966339e-06, |
| "loss": 0.0176, |
| "step": 4850 |
| }, |
| { |
| "epoch": 3.9448051948051948, |
| "grad_norm": 0.1808943897485733, |
| "learning_rate": 9.56535451061496e-06, |
| "loss": 0.014, |
| "step": 4860 |
| }, |
| { |
| "epoch": 3.9529220779220777, |
| "grad_norm": 0.19436822831630707, |
| "learning_rate": 9.403865761953779e-06, |
| "loss": 0.0205, |
| "step": 4870 |
| }, |
| { |
| "epoch": 3.961038961038961, |
| "grad_norm": 0.24877876043319702, |
| "learning_rate": 9.243610213572285e-06, |
| "loss": 0.0187, |
| "step": 4880 |
| }, |
| { |
| "epoch": 3.969155844155844, |
| "grad_norm": 0.20981882512569427, |
| "learning_rate": 9.084592733598735e-06, |
| "loss": 0.0127, |
| "step": 4890 |
| }, |
| { |
| "epoch": 3.9772727272727275, |
| "grad_norm": 0.33825045824050903, |
| "learning_rate": 8.92681815255219e-06, |
| "loss": 0.017, |
| "step": 4900 |
| }, |
| { |
| "epoch": 3.9853896103896105, |
| "grad_norm": 0.14291802048683167, |
| "learning_rate": 8.770291263195819e-06, |
| "loss": 0.0184, |
| "step": 4910 |
| }, |
| { |
| "epoch": 3.9935064935064934, |
| "grad_norm": 0.14044474065303802, |
| "learning_rate": 8.615016820391342e-06, |
| "loss": 0.0125, |
| "step": 4920 |
| }, |
| { |
| "epoch": 4.001623376623376, |
| "grad_norm": 0.3731639087200165, |
| "learning_rate": 8.460999540954517e-06, |
| "loss": 0.0155, |
| "step": 4930 |
| }, |
| { |
| "epoch": 4.009740259740259, |
| "grad_norm": 0.18039852380752563, |
| "learning_rate": 8.308244103511909e-06, |
| "loss": 0.0141, |
| "step": 4940 |
| }, |
| { |
| "epoch": 4.017857142857143, |
| "grad_norm": 0.18165889382362366, |
| "learning_rate": 8.156755148358764e-06, |
| "loss": 0.0152, |
| "step": 4950 |
| }, |
| { |
| "epoch": 4.025974025974026, |
| "grad_norm": 0.16628675162792206, |
| "learning_rate": 8.00653727731801e-06, |
| "loss": 0.0146, |
| "step": 4960 |
| }, |
| { |
| "epoch": 4.034090909090909, |
| "grad_norm": 0.2061866968870163, |
| "learning_rate": 7.857595053600513e-06, |
| "loss": 0.0179, |
| "step": 4970 |
| }, |
| { |
| "epoch": 4.042207792207792, |
| "grad_norm": 0.2782154679298401, |
| "learning_rate": 7.709933001666431e-06, |
| "loss": 0.0198, |
| "step": 4980 |
| }, |
| { |
| "epoch": 4.050324675324675, |
| "grad_norm": 0.15913420915603638, |
| "learning_rate": 7.56355560708778e-06, |
| "loss": 0.013, |
| "step": 4990 |
| }, |
| { |
| "epoch": 4.058441558441558, |
| "grad_norm": 0.3607611358165741, |
| "learning_rate": 7.418467316412158e-06, |
| "loss": 0.0168, |
| "step": 5000 |
| }, |
| { |
| "epoch": 4.066558441558442, |
| "grad_norm": 0.2313947230577469, |
| "learning_rate": 7.2746725370277435e-06, |
| "loss": 0.0151, |
| "step": 5010 |
| }, |
| { |
| "epoch": 4.074675324675325, |
| "grad_norm": 0.15162557363510132, |
| "learning_rate": 7.132175637029293e-06, |
| "loss": 0.0161, |
| "step": 5020 |
| }, |
| { |
| "epoch": 4.082792207792208, |
| "grad_norm": 0.2052834928035736, |
| "learning_rate": 6.9909809450855345e-06, |
| "loss": 0.0166, |
| "step": 5030 |
| }, |
| { |
| "epoch": 4.090909090909091, |
| "grad_norm": 0.2189348191022873, |
| "learning_rate": 6.851092750307686e-06, |
| "loss": 0.0154, |
| "step": 5040 |
| }, |
| { |
| "epoch": 4.099025974025974, |
| "grad_norm": 0.176688089966774, |
| "learning_rate": 6.712515302119077e-06, |
| "loss": 0.015, |
| "step": 5050 |
| }, |
| { |
| "epoch": 4.107142857142857, |
| "grad_norm": 0.1926172971725464, |
| "learning_rate": 6.575252810126143e-06, |
| "loss": 0.0126, |
| "step": 5060 |
| }, |
| { |
| "epoch": 4.115259740259741, |
| "grad_norm": 0.2208670973777771, |
| "learning_rate": 6.439309443990532e-06, |
| "loss": 0.0147, |
| "step": 5070 |
| }, |
| { |
| "epoch": 4.123376623376624, |
| "grad_norm": 0.20093731582164764, |
| "learning_rate": 6.304689333302416e-06, |
| "loss": 0.0157, |
| "step": 5080 |
| }, |
| { |
| "epoch": 4.1314935064935066, |
| "grad_norm": 0.19225037097930908, |
| "learning_rate": 6.171396567455051e-06, |
| "loss": 0.0151, |
| "step": 5090 |
| }, |
| { |
| "epoch": 4.1396103896103895, |
| "grad_norm": 0.16921591758728027, |
| "learning_rate": 6.039435195520604e-06, |
| "loss": 0.0184, |
| "step": 5100 |
| }, |
| { |
| "epoch": 4.1477272727272725, |
| "grad_norm": 0.13120335340499878, |
| "learning_rate": 5.908809226127054e-06, |
| "loss": 0.0134, |
| "step": 5110 |
| }, |
| { |
| "epoch": 4.1558441558441555, |
| "grad_norm": 0.5254223346710205, |
| "learning_rate": 5.779522627336537e-06, |
| "loss": 0.0193, |
| "step": 5120 |
| }, |
| { |
| "epoch": 4.163961038961039, |
| "grad_norm": 0.20656515657901764, |
| "learning_rate": 5.651579326524709e-06, |
| "loss": 0.0137, |
| "step": 5130 |
| }, |
| { |
| "epoch": 4.172077922077922, |
| "grad_norm": 0.16024062037467957, |
| "learning_rate": 5.524983210261481e-06, |
| "loss": 0.0148, |
| "step": 5140 |
| }, |
| { |
| "epoch": 4.180194805194805, |
| "grad_norm": 0.29712915420532227, |
| "learning_rate": 5.399738124192988e-06, |
| "loss": 0.014, |
| "step": 5150 |
| }, |
| { |
| "epoch": 4.188311688311688, |
| "grad_norm": 0.18745556473731995, |
| "learning_rate": 5.2758478729247164e-06, |
| "loss": 0.0144, |
| "step": 5160 |
| }, |
| { |
| "epoch": 4.196428571428571, |
| "grad_norm": 0.16319838166236877, |
| "learning_rate": 5.153316219905946e-06, |
| "loss": 0.0118, |
| "step": 5170 |
| }, |
| { |
| "epoch": 4.204545454545454, |
| "grad_norm": 0.22492633759975433, |
| "learning_rate": 5.032146887315448e-06, |
| "loss": 0.0128, |
| "step": 5180 |
| }, |
| { |
| "epoch": 4.212662337662338, |
| "grad_norm": 0.24058054387569427, |
| "learning_rate": 4.91234355594839e-06, |
| "loss": 0.0161, |
| "step": 5190 |
| }, |
| { |
| "epoch": 4.220779220779221, |
| "grad_norm": 0.1942887008190155, |
| "learning_rate": 4.7939098651045235e-06, |
| "loss": 0.0177, |
| "step": 5200 |
| }, |
| { |
| "epoch": 4.228896103896104, |
| "grad_norm": 0.19465738534927368, |
| "learning_rate": 4.67684941247768e-06, |
| "loss": 0.0146, |
| "step": 5210 |
| }, |
| { |
| "epoch": 4.237012987012987, |
| "grad_norm": 0.18518835306167603, |
| "learning_rate": 4.5611657540464036e-06, |
| "loss": 0.0157, |
| "step": 5220 |
| }, |
| { |
| "epoch": 4.24512987012987, |
| "grad_norm": 0.19439534842967987, |
| "learning_rate": 4.446862403965984e-06, |
| "loss": 0.0158, |
| "step": 5230 |
| }, |
| { |
| "epoch": 4.253246753246753, |
| "grad_norm": 0.2327636331319809, |
| "learning_rate": 4.333942834461702e-06, |
| "loss": 0.0137, |
| "step": 5240 |
| }, |
| { |
| "epoch": 4.261363636363637, |
| "grad_norm": 0.30493679642677307, |
| "learning_rate": 4.222410475723326e-06, |
| "loss": 0.0131, |
| "step": 5250 |
| }, |
| { |
| "epoch": 4.26948051948052, |
| "grad_norm": 0.19938842952251434, |
| "learning_rate": 4.112268715800943e-06, |
| "loss": 0.0143, |
| "step": 5260 |
| }, |
| { |
| "epoch": 4.277597402597403, |
| "grad_norm": 0.21862724423408508, |
| "learning_rate": 4.003520900502028e-06, |
| "loss": 0.0185, |
| "step": 5270 |
| }, |
| { |
| "epoch": 4.285714285714286, |
| "grad_norm": 0.17468155920505524, |
| "learning_rate": 3.8961703332898e-06, |
| "loss": 0.0145, |
| "step": 5280 |
| }, |
| { |
| "epoch": 4.2938311688311686, |
| "grad_norm": 0.330278217792511, |
| "learning_rate": 3.790220275182854e-06, |
| "loss": 0.0137, |
| "step": 5290 |
| }, |
| { |
| "epoch": 4.301948051948052, |
| "grad_norm": 0.2295311689376831, |
| "learning_rate": 3.685673944656176e-06, |
| "loss": 0.0132, |
| "step": 5300 |
| }, |
| { |
| "epoch": 4.310064935064935, |
| "grad_norm": 0.18138545751571655, |
| "learning_rate": 3.582534517543268e-06, |
| "loss": 0.0145, |
| "step": 5310 |
| }, |
| { |
| "epoch": 4.318181818181818, |
| "grad_norm": 0.523574709892273, |
| "learning_rate": 3.4808051269397512e-06, |
| "loss": 0.0145, |
| "step": 5320 |
| }, |
| { |
| "epoch": 4.326298701298701, |
| "grad_norm": 0.13321630656719208, |
| "learning_rate": 3.380488863108183e-06, |
| "loss": 0.0144, |
| "step": 5330 |
| }, |
| { |
| "epoch": 4.334415584415584, |
| "grad_norm": 0.2136547565460205, |
| "learning_rate": 3.2815887733841365e-06, |
| "loss": 0.0145, |
| "step": 5340 |
| }, |
| { |
| "epoch": 4.342532467532467, |
| "grad_norm": 0.1495615690946579, |
| "learning_rate": 3.1841078620836683e-06, |
| "loss": 0.0136, |
| "step": 5350 |
| }, |
| { |
| "epoch": 4.35064935064935, |
| "grad_norm": 0.2267351597547531, |
| "learning_rate": 3.0880490904120874e-06, |
| "loss": 0.0152, |
| "step": 5360 |
| }, |
| { |
| "epoch": 4.358766233766234, |
| "grad_norm": 0.2415911704301834, |
| "learning_rate": 2.9934153763739205e-06, |
| "loss": 0.0173, |
| "step": 5370 |
| }, |
| { |
| "epoch": 4.366883116883117, |
| "grad_norm": 0.16218288242816925, |
| "learning_rate": 2.9002095946843277e-06, |
| "loss": 0.0138, |
| "step": 5380 |
| }, |
| { |
| "epoch": 4.375, |
| "grad_norm": 0.12508966028690338, |
| "learning_rate": 2.8084345766817676e-06, |
| "loss": 0.0122, |
| "step": 5390 |
| }, |
| { |
| "epoch": 4.383116883116883, |
| "grad_norm": 0.24942633509635925, |
| "learning_rate": 2.718093110241976e-06, |
| "loss": 0.0127, |
| "step": 5400 |
| }, |
| { |
| "epoch": 4.391233766233766, |
| "grad_norm": 0.18330267071723938, |
| "learning_rate": 2.6291879396933004e-06, |
| "loss": 0.0153, |
| "step": 5410 |
| }, |
| { |
| "epoch": 4.39935064935065, |
| "grad_norm": 0.13802915811538696, |
| "learning_rate": 2.541721765733318e-06, |
| "loss": 0.0114, |
| "step": 5420 |
| }, |
| { |
| "epoch": 4.407467532467533, |
| "grad_norm": 0.13246889412403107, |
| "learning_rate": 2.455697245346783e-06, |
| "loss": 0.013, |
| "step": 5430 |
| }, |
| { |
| "epoch": 4.415584415584416, |
| "grad_norm": 0.1441030502319336, |
| "learning_rate": 2.371116991724953e-06, |
| "loss": 0.0155, |
| "step": 5440 |
| }, |
| { |
| "epoch": 4.423701298701299, |
| "grad_norm": 0.1408785879611969, |
| "learning_rate": 2.2879835741861586e-06, |
| "loss": 0.02, |
| "step": 5450 |
| }, |
| { |
| "epoch": 4.431818181818182, |
| "grad_norm": 0.2826613187789917, |
| "learning_rate": 2.206299518097804e-06, |
| "loss": 0.0115, |
| "step": 5460 |
| }, |
| { |
| "epoch": 4.439935064935065, |
| "grad_norm": 0.2430581897497177, |
| "learning_rate": 2.1260673047996227e-06, |
| "loss": 0.0162, |
| "step": 5470 |
| }, |
| { |
| "epoch": 4.448051948051948, |
| "grad_norm": 0.17169269919395447, |
| "learning_rate": 2.047289371528299e-06, |
| "loss": 0.0103, |
| "step": 5480 |
| }, |
| { |
| "epoch": 4.4561688311688314, |
| "grad_norm": 0.13680242002010345, |
| "learning_rate": 1.96996811134344e-06, |
| "loss": 0.0136, |
| "step": 5490 |
| }, |
| { |
| "epoch": 4.464285714285714, |
| "grad_norm": 0.12849834561347961, |
| "learning_rate": 1.8941058730549132e-06, |
| "loss": 0.0118, |
| "step": 5500 |
| }, |
| { |
| "epoch": 4.472402597402597, |
| "grad_norm": 0.18202272057533264, |
| "learning_rate": 1.8197049611514194e-06, |
| "loss": 0.0134, |
| "step": 5510 |
| }, |
| { |
| "epoch": 4.48051948051948, |
| "grad_norm": 0.2274516373872757, |
| "learning_rate": 1.7467676357305561e-06, |
| "loss": 0.0129, |
| "step": 5520 |
| }, |
| { |
| "epoch": 4.488636363636363, |
| "grad_norm": 0.17817318439483643, |
| "learning_rate": 1.6752961124301415e-06, |
| "loss": 0.0153, |
| "step": 5530 |
| }, |
| { |
| "epoch": 4.496753246753247, |
| "grad_norm": 0.23610791563987732, |
| "learning_rate": 1.6052925623609049e-06, |
| "loss": 0.0186, |
| "step": 5540 |
| }, |
| { |
| "epoch": 4.50487012987013, |
| "grad_norm": 0.19274769723415375, |
| "learning_rate": 1.5367591120405256e-06, |
| "loss": 0.0109, |
| "step": 5550 |
| }, |
| { |
| "epoch": 4.512987012987013, |
| "grad_norm": 0.17529398202896118, |
| "learning_rate": 1.4696978433290653e-06, |
| "loss": 0.0131, |
| "step": 5560 |
| }, |
| { |
| "epoch": 4.521103896103896, |
| "grad_norm": 0.214157834649086, |
| "learning_rate": 1.4041107933656928e-06, |
| "loss": 0.0147, |
| "step": 5570 |
| }, |
| { |
| "epoch": 4.529220779220779, |
| "grad_norm": 0.24206124246120453, |
| "learning_rate": 1.339999954506821e-06, |
| "loss": 0.0126, |
| "step": 5580 |
| }, |
| { |
| "epoch": 4.537337662337662, |
| "grad_norm": 0.24474920332431793, |
| "learning_rate": 1.2773672742655784e-06, |
| "loss": 0.0146, |
| "step": 5590 |
| }, |
| { |
| "epoch": 4.545454545454545, |
| "grad_norm": 0.19131408631801605, |
| "learning_rate": 1.2162146552526399e-06, |
| "loss": 0.0142, |
| "step": 5600 |
| }, |
| { |
| "epoch": 4.553571428571429, |
| "grad_norm": 0.1901671588420868, |
| "learning_rate": 1.1565439551184664e-06, |
| "loss": 0.0183, |
| "step": 5610 |
| }, |
| { |
| "epoch": 4.561688311688312, |
| "grad_norm": 0.13584747910499573, |
| "learning_rate": 1.0983569864968346e-06, |
| "loss": 0.0123, |
| "step": 5620 |
| }, |
| { |
| "epoch": 4.569805194805195, |
| "grad_norm": 0.11625763773918152, |
| "learning_rate": 1.0416555169497688e-06, |
| "loss": 0.0129, |
| "step": 5630 |
| }, |
| { |
| "epoch": 4.577922077922078, |
| "grad_norm": 0.2195693403482437, |
| "learning_rate": 9.864412689139123e-07, |
| "loss": 0.0148, |
| "step": 5640 |
| }, |
| { |
| "epoch": 4.586038961038961, |
| "grad_norm": 0.1349664032459259, |
| "learning_rate": 9.327159196481138e-07, |
| "loss": 0.0142, |
| "step": 5650 |
| }, |
| { |
| "epoch": 4.5941558441558445, |
| "grad_norm": 0.147340327501297, |
| "learning_rate": 8.804811011825398e-07, |
| "loss": 0.01, |
| "step": 5660 |
| }, |
| { |
| "epoch": 4.6022727272727275, |
| "grad_norm": 0.18606358766555786, |
| "learning_rate": 8.297384002690866e-07, |
| "loss": 0.015, |
| "step": 5670 |
| }, |
| { |
| "epoch": 4.6103896103896105, |
| "grad_norm": 0.22204607725143433, |
| "learning_rate": 7.804893583331696e-07, |
| "loss": 0.0138, |
| "step": 5680 |
| }, |
| { |
| "epoch": 4.6185064935064934, |
| "grad_norm": 0.18901236355304718, |
| "learning_rate": 7.32735471426893e-07, |
| "loss": 0.0161, |
| "step": 5690 |
| }, |
| { |
| "epoch": 4.626623376623376, |
| "grad_norm": 0.11991266161203384, |
| "learning_rate": 6.864781901836259e-07, |
| "loss": 0.0147, |
| "step": 5700 |
| }, |
| { |
| "epoch": 4.634740259740259, |
| "grad_norm": 0.1843988001346588, |
| "learning_rate": 6.417189197739093e-07, |
| "loss": 0.015, |
| "step": 5710 |
| }, |
| { |
| "epoch": 4.642857142857143, |
| "grad_norm": 0.18619252741336823, |
| "learning_rate": 5.984590198627849e-07, |
| "loss": 0.0177, |
| "step": 5720 |
| }, |
| { |
| "epoch": 4.650974025974026, |
| "grad_norm": 0.19511115550994873, |
| "learning_rate": 5.566998045685112e-07, |
| "loss": 0.0164, |
| "step": 5730 |
| }, |
| { |
| "epoch": 4.659090909090909, |
| "grad_norm": 0.204880952835083, |
| "learning_rate": 5.164425424226016e-07, |
| "loss": 0.0123, |
| "step": 5740 |
| }, |
| { |
| "epoch": 4.667207792207792, |
| "grad_norm": 0.1436614841222763, |
| "learning_rate": 4.776884563313266e-07, |
| "loss": 0.0128, |
| "step": 5750 |
| }, |
| { |
| "epoch": 4.675324675324675, |
| "grad_norm": 0.15262284874916077, |
| "learning_rate": 4.404387235385443e-07, |
| "loss": 0.013, |
| "step": 5760 |
| }, |
| { |
| "epoch": 4.683441558441558, |
| "grad_norm": 0.16841436922550201, |
| "learning_rate": 4.0469447558995065e-07, |
| "loss": 0.0161, |
| "step": 5770 |
| }, |
| { |
| "epoch": 4.691558441558442, |
| "grad_norm": 0.268843412399292, |
| "learning_rate": 3.7045679829870175e-07, |
| "loss": 0.0148, |
| "step": 5780 |
| }, |
| { |
| "epoch": 4.699675324675325, |
| "grad_norm": 0.19948160648345947, |
| "learning_rate": 3.377267317124233e-07, |
| "loss": 0.0161, |
| "step": 5790 |
| }, |
| { |
| "epoch": 4.707792207792208, |
| "grad_norm": 0.35599833726882935, |
| "learning_rate": 3.0650527008162513e-07, |
| "loss": 0.0139, |
| "step": 5800 |
| }, |
| { |
| "epoch": 4.715909090909091, |
| "grad_norm": 0.09707993268966675, |
| "learning_rate": 2.767933618295082e-07, |
| "loss": 0.0119, |
| "step": 5810 |
| }, |
| { |
| "epoch": 4.724025974025974, |
| "grad_norm": 0.18284735083580017, |
| "learning_rate": 2.485919095231326e-07, |
| "loss": 0.0135, |
| "step": 5820 |
| }, |
| { |
| "epoch": 4.732142857142857, |
| "grad_norm": 0.2739686965942383, |
| "learning_rate": 2.219017698460002e-07, |
| "loss": 0.0167, |
| "step": 5830 |
| }, |
| { |
| "epoch": 4.740259740259741, |
| "grad_norm": 0.2390127032995224, |
| "learning_rate": 1.9672375357206452e-07, |
| "loss": 0.0134, |
| "step": 5840 |
| }, |
| { |
| "epoch": 4.748376623376624, |
| "grad_norm": 0.1402144432067871, |
| "learning_rate": 1.73058625541056e-07, |
| "loss": 0.0137, |
| "step": 5850 |
| }, |
| { |
| "epoch": 4.7564935064935066, |
| "grad_norm": 0.14280611276626587, |
| "learning_rate": 1.5090710463527836e-07, |
| "loss": 0.0169, |
| "step": 5860 |
| }, |
| { |
| "epoch": 4.7646103896103895, |
| "grad_norm": 0.22650258243083954, |
| "learning_rate": 1.3026986375776485e-07, |
| "loss": 0.0145, |
| "step": 5870 |
| }, |
| { |
| "epoch": 4.7727272727272725, |
| "grad_norm": 0.31712767481803894, |
| "learning_rate": 1.1114752981183917e-07, |
| "loss": 0.0161, |
| "step": 5880 |
| }, |
| { |
| "epoch": 4.7808441558441555, |
| "grad_norm": 0.1640625298023224, |
| "learning_rate": 9.354068368204739e-08, |
| "loss": 0.0162, |
| "step": 5890 |
| }, |
| { |
| "epoch": 4.788961038961039, |
| "grad_norm": 0.17054955661296844, |
| "learning_rate": 7.744986021656076e-08, |
| "loss": 0.0139, |
| "step": 5900 |
| }, |
| { |
| "epoch": 4.797077922077922, |
| "grad_norm": 0.18097509443759918, |
| "learning_rate": 6.287554821087783e-08, |
| "loss": 0.0144, |
| "step": 5910 |
| }, |
| { |
| "epoch": 4.805194805194805, |
| "grad_norm": 0.20280596613883972, |
| "learning_rate": 4.981819039300284e-08, |
| "loss": 0.015, |
| "step": 5920 |
| }, |
| { |
| "epoch": 4.813311688311688, |
| "grad_norm": 0.3539312183856964, |
| "learning_rate": 3.827818341000655e-08, |
| "loss": 0.0161, |
| "step": 5930 |
| }, |
| { |
| "epoch": 4.821428571428571, |
| "grad_norm": 0.22297385334968567, |
| "learning_rate": 2.8255877815946963e-08, |
| "loss": 0.0139, |
| "step": 5940 |
| }, |
| { |
| "epoch": 4.829545454545455, |
| "grad_norm": 0.22780290246009827, |
| "learning_rate": 1.9751578061244504e-08, |
| "loss": 0.0118, |
| "step": 5950 |
| }, |
| { |
| "epoch": 4.837662337662338, |
| "grad_norm": 0.15877029299736023, |
| "learning_rate": 1.2765542483417214e-08, |
| "loss": 0.0108, |
| "step": 5960 |
| }, |
| { |
| "epoch": 4.845779220779221, |
| "grad_norm": 0.2675601541996002, |
| "learning_rate": 7.2979832992592365e-09, |
| "loss": 0.0135, |
| "step": 5970 |
| }, |
| { |
| "epoch": 4.853896103896104, |
| "grad_norm": 0.11890222132205963, |
| "learning_rate": 3.349066598362649e-09, |
| "loss": 0.0129, |
| "step": 5980 |
| }, |
| { |
| "epoch": 4.862012987012987, |
| "grad_norm": 0.17431388795375824, |
| "learning_rate": 9.189123380826114e-10, |
| "loss": 0.0129, |
| "step": 5990 |
| }, |
| { |
| "epoch": 4.87012987012987, |
| "grad_norm": 0.4079163074493408, |
| "learning_rate": 7.594339912486703e-12, |
| "loss": 0.0156, |
| "step": 6000 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 6000, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 2000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 0.0, |
| "train_batch_size": 32, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|