diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,34167 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 2.999769248519345, + "eval_steps": 500, + "global_step": 4875, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0006153372817475579, + "grad_norm": 12.19604778289795, + "learning_rate": 1.360544217687075e-07, + "loss": 2.3099, + "step": 1 + }, + { + "epoch": 0.0012306745634951158, + "grad_norm": 15.239002227783203, + "learning_rate": 2.72108843537415e-07, + "loss": 3.0532, + "step": 2 + }, + { + "epoch": 0.0018460118452426736, + "grad_norm": 12.550257682800293, + "learning_rate": 4.0816326530612243e-07, + "loss": 2.6444, + "step": 3 + }, + { + "epoch": 0.0024613491269902315, + "grad_norm": 12.652851104736328, + "learning_rate": 5.4421768707483e-07, + "loss": 2.6468, + "step": 4 + }, + { + "epoch": 0.003076686408737789, + "grad_norm": 12.416555404663086, + "learning_rate": 6.802721088435376e-07, + "loss": 2.532, + "step": 5 + }, + { + "epoch": 0.0036920236904853473, + "grad_norm": 12.310219764709473, + "learning_rate": 8.163265306122449e-07, + "loss": 2.6111, + "step": 6 + }, + { + "epoch": 0.004307360972232905, + "grad_norm": 10.38923454284668, + "learning_rate": 9.523809523809525e-07, + "loss": 2.7098, + "step": 7 + }, + { + "epoch": 0.004922698253980463, + "grad_norm": 12.7611722946167, + "learning_rate": 1.08843537414966e-06, + "loss": 2.7756, + "step": 8 + }, + { + "epoch": 0.005538035535728021, + "grad_norm": 9.331400871276855, + "learning_rate": 1.2244897959183673e-06, + "loss": 2.6625, + "step": 9 + }, + { + "epoch": 0.006153372817475578, + "grad_norm": 10.27541446685791, + "learning_rate": 1.3605442176870751e-06, + "loss": 2.4718, + "step": 10 + }, + { + "epoch": 0.0067687100992231365, + "grad_norm": 10.909602165222168, + "learning_rate": 1.4965986394557825e-06, + "loss": 2.4732, + "step": 11 + }, + { + "epoch": 0.007384047380970695, + "grad_norm": 9.367798805236816, + "learning_rate": 1.6326530612244897e-06, + "loss": 2.495, + "step": 12 + }, + { + "epoch": 0.007999384662718252, + "grad_norm": 13.295595169067383, + "learning_rate": 1.7687074829931975e-06, + "loss": 2.4132, + "step": 13 + }, + { + "epoch": 0.00861472194446581, + "grad_norm": 14.637689590454102, + "learning_rate": 1.904761904761905e-06, + "loss": 2.4541, + "step": 14 + }, + { + "epoch": 0.009230059226213368, + "grad_norm": 9.251331329345703, + "learning_rate": 2.0408163265306125e-06, + "loss": 2.5067, + "step": 15 + }, + { + "epoch": 0.009845396507960926, + "grad_norm": 10.725550651550293, + "learning_rate": 2.17687074829932e-06, + "loss": 2.5198, + "step": 16 + }, + { + "epoch": 0.010460733789708484, + "grad_norm": 11.747109413146973, + "learning_rate": 2.3129251700680273e-06, + "loss": 2.8333, + "step": 17 + }, + { + "epoch": 0.011076071071456042, + "grad_norm": 11.613968849182129, + "learning_rate": 2.4489795918367347e-06, + "loss": 2.6118, + "step": 18 + }, + { + "epoch": 0.0116914083532036, + "grad_norm": 9.965760231018066, + "learning_rate": 2.5850340136054425e-06, + "loss": 2.196, + "step": 19 + }, + { + "epoch": 0.012306745634951157, + "grad_norm": 8.113799095153809, + "learning_rate": 2.7210884353741503e-06, + "loss": 2.4032, + "step": 20 + }, + { + "epoch": 0.012922082916698715, + "grad_norm": 7.668728351593018, + "learning_rate": 2.8571428571428573e-06, + "loss": 2.5803, + "step": 21 + }, + { + "epoch": 0.013537420198446273, + "grad_norm": 10.033961296081543, + "learning_rate": 2.993197278911565e-06, + "loss": 2.4535, + "step": 22 + }, + { + "epoch": 0.014152757480193831, + "grad_norm": 9.578744888305664, + "learning_rate": 3.1292517006802725e-06, + "loss": 2.5757, + "step": 23 + }, + { + "epoch": 0.01476809476194139, + "grad_norm": 9.450577735900879, + "learning_rate": 3.2653061224489794e-06, + "loss": 2.6411, + "step": 24 + }, + { + "epoch": 0.015383432043688947, + "grad_norm": 11.06508731842041, + "learning_rate": 3.4013605442176872e-06, + "loss": 2.4776, + "step": 25 + }, + { + "epoch": 0.015998769325436504, + "grad_norm": 8.413506507873535, + "learning_rate": 3.537414965986395e-06, + "loss": 2.4179, + "step": 26 + }, + { + "epoch": 0.016614106607184063, + "grad_norm": 8.397893905639648, + "learning_rate": 3.6734693877551024e-06, + "loss": 2.54, + "step": 27 + }, + { + "epoch": 0.01722944388893162, + "grad_norm": 7.0037841796875, + "learning_rate": 3.80952380952381e-06, + "loss": 2.3879, + "step": 28 + }, + { + "epoch": 0.01784478117067918, + "grad_norm": 6.6977105140686035, + "learning_rate": 3.945578231292517e-06, + "loss": 2.4887, + "step": 29 + }, + { + "epoch": 0.018460118452426736, + "grad_norm": 8.206796646118164, + "learning_rate": 4.081632653061225e-06, + "loss": 2.4905, + "step": 30 + }, + { + "epoch": 0.019075455734174296, + "grad_norm": 6.383983612060547, + "learning_rate": 4.217687074829933e-06, + "loss": 2.4294, + "step": 31 + }, + { + "epoch": 0.019690793015921852, + "grad_norm": 6.3399152755737305, + "learning_rate": 4.35374149659864e-06, + "loss": 2.4334, + "step": 32 + }, + { + "epoch": 0.02030613029766941, + "grad_norm": 7.152157306671143, + "learning_rate": 4.489795918367348e-06, + "loss": 2.3939, + "step": 33 + }, + { + "epoch": 0.02092146757941697, + "grad_norm": 6.566653251647949, + "learning_rate": 4.6258503401360546e-06, + "loss": 2.4182, + "step": 34 + }, + { + "epoch": 0.021536804861164525, + "grad_norm": 7.585001468658447, + "learning_rate": 4.761904761904762e-06, + "loss": 2.3461, + "step": 35 + }, + { + "epoch": 0.022152142142912085, + "grad_norm": 6.56134033203125, + "learning_rate": 4.897959183673469e-06, + "loss": 2.3956, + "step": 36 + }, + { + "epoch": 0.02276747942465964, + "grad_norm": 6.3935370445251465, + "learning_rate": 5.034013605442177e-06, + "loss": 2.2591, + "step": 37 + }, + { + "epoch": 0.0233828167064072, + "grad_norm": 6.876726150512695, + "learning_rate": 5.170068027210885e-06, + "loss": 2.4297, + "step": 38 + }, + { + "epoch": 0.023998153988154757, + "grad_norm": 7.576490879058838, + "learning_rate": 5.306122448979593e-06, + "loss": 2.3946, + "step": 39 + }, + { + "epoch": 0.024613491269902314, + "grad_norm": 8.067111015319824, + "learning_rate": 5.442176870748301e-06, + "loss": 2.2269, + "step": 40 + }, + { + "epoch": 0.025228828551649873, + "grad_norm": 6.732779026031494, + "learning_rate": 5.578231292517007e-06, + "loss": 2.3128, + "step": 41 + }, + { + "epoch": 0.02584416583339743, + "grad_norm": 7.763489723205566, + "learning_rate": 5.7142857142857145e-06, + "loss": 2.5258, + "step": 42 + }, + { + "epoch": 0.02645950311514499, + "grad_norm": 5.554826736450195, + "learning_rate": 5.850340136054422e-06, + "loss": 2.4433, + "step": 43 + }, + { + "epoch": 0.027074840396892546, + "grad_norm": 6.419793605804443, + "learning_rate": 5.98639455782313e-06, + "loss": 2.3108, + "step": 44 + }, + { + "epoch": 0.027690177678640106, + "grad_norm": 11.004531860351562, + "learning_rate": 6.122448979591837e-06, + "loss": 2.356, + "step": 45 + }, + { + "epoch": 0.028305514960387662, + "grad_norm": 7.6814045906066895, + "learning_rate": 6.258503401360545e-06, + "loss": 2.4769, + "step": 46 + }, + { + "epoch": 0.028920852242135222, + "grad_norm": 7.453983306884766, + "learning_rate": 6.394557823129253e-06, + "loss": 2.454, + "step": 47 + }, + { + "epoch": 0.02953618952388278, + "grad_norm": 6.893926620483398, + "learning_rate": 6.530612244897959e-06, + "loss": 2.5225, + "step": 48 + }, + { + "epoch": 0.030151526805630335, + "grad_norm": 5.951063632965088, + "learning_rate": 6.666666666666667e-06, + "loss": 2.4653, + "step": 49 + }, + { + "epoch": 0.030766864087377895, + "grad_norm": 7.616079807281494, + "learning_rate": 6.8027210884353745e-06, + "loss": 2.4718, + "step": 50 + }, + { + "epoch": 0.03138220136912545, + "grad_norm": 5.9421706199646, + "learning_rate": 6.938775510204082e-06, + "loss": 2.429, + "step": 51 + }, + { + "epoch": 0.03199753865087301, + "grad_norm": 7.82779598236084, + "learning_rate": 7.07482993197279e-06, + "loss": 2.3768, + "step": 52 + }, + { + "epoch": 0.03261287593262057, + "grad_norm": 5.905935287475586, + "learning_rate": 7.210884353741497e-06, + "loss": 2.3646, + "step": 53 + }, + { + "epoch": 0.03322821321436813, + "grad_norm": 7.382134914398193, + "learning_rate": 7.346938775510205e-06, + "loss": 2.306, + "step": 54 + }, + { + "epoch": 0.03384355049611568, + "grad_norm": 8.672754287719727, + "learning_rate": 7.482993197278913e-06, + "loss": 2.2952, + "step": 55 + }, + { + "epoch": 0.03445888777786324, + "grad_norm": 8.294017791748047, + "learning_rate": 7.61904761904762e-06, + "loss": 2.2438, + "step": 56 + }, + { + "epoch": 0.035074225059610796, + "grad_norm": 7.072339057922363, + "learning_rate": 7.755102040816327e-06, + "loss": 2.2357, + "step": 57 + }, + { + "epoch": 0.03568956234135836, + "grad_norm": 6.455402851104736, + "learning_rate": 7.891156462585034e-06, + "loss": 2.1462, + "step": 58 + }, + { + "epoch": 0.036304899623105916, + "grad_norm": 6.317981243133545, + "learning_rate": 8.027210884353741e-06, + "loss": 2.3614, + "step": 59 + }, + { + "epoch": 0.03692023690485347, + "grad_norm": 8.003864288330078, + "learning_rate": 8.16326530612245e-06, + "loss": 2.2973, + "step": 60 + }, + { + "epoch": 0.03753557418660103, + "grad_norm": 6.5360260009765625, + "learning_rate": 8.299319727891157e-06, + "loss": 2.409, + "step": 61 + }, + { + "epoch": 0.03815091146834859, + "grad_norm": 7.277507781982422, + "learning_rate": 8.435374149659866e-06, + "loss": 2.1577, + "step": 62 + }, + { + "epoch": 0.03876624875009615, + "grad_norm": 7.617149829864502, + "learning_rate": 8.571428571428571e-06, + "loss": 2.419, + "step": 63 + }, + { + "epoch": 0.039381586031843704, + "grad_norm": 7.837764263153076, + "learning_rate": 8.70748299319728e-06, + "loss": 2.3789, + "step": 64 + }, + { + "epoch": 0.03999692331359126, + "grad_norm": 9.696128845214844, + "learning_rate": 8.843537414965987e-06, + "loss": 2.4108, + "step": 65 + }, + { + "epoch": 0.04061226059533882, + "grad_norm": 6.58843469619751, + "learning_rate": 8.979591836734695e-06, + "loss": 2.2956, + "step": 66 + }, + { + "epoch": 0.04122759787708638, + "grad_norm": 9.924915313720703, + "learning_rate": 9.115646258503402e-06, + "loss": 2.2518, + "step": 67 + }, + { + "epoch": 0.04184293515883394, + "grad_norm": 7.789942741394043, + "learning_rate": 9.251700680272109e-06, + "loss": 2.4292, + "step": 68 + }, + { + "epoch": 0.04245827244058149, + "grad_norm": 7.295944690704346, + "learning_rate": 9.387755102040818e-06, + "loss": 2.3679, + "step": 69 + }, + { + "epoch": 0.04307360972232905, + "grad_norm": 8.339579582214355, + "learning_rate": 9.523809523809525e-06, + "loss": 2.2142, + "step": 70 + }, + { + "epoch": 0.04368894700407661, + "grad_norm": 7.895396709442139, + "learning_rate": 9.659863945578232e-06, + "loss": 2.4184, + "step": 71 + }, + { + "epoch": 0.04430428428582417, + "grad_norm": 6.097289085388184, + "learning_rate": 9.795918367346939e-06, + "loss": 2.1587, + "step": 72 + }, + { + "epoch": 0.044919621567571726, + "grad_norm": 10.293917655944824, + "learning_rate": 9.931972789115647e-06, + "loss": 2.3748, + "step": 73 + }, + { + "epoch": 0.04553495884931928, + "grad_norm": 7.961585521697998, + "learning_rate": 1.0068027210884354e-05, + "loss": 2.4699, + "step": 74 + }, + { + "epoch": 0.04615029613106684, + "grad_norm": 6.856618881225586, + "learning_rate": 1.0204081632653063e-05, + "loss": 2.2603, + "step": 75 + }, + { + "epoch": 0.0467656334128144, + "grad_norm": 6.629584312438965, + "learning_rate": 1.034013605442177e-05, + "loss": 2.3074, + "step": 76 + }, + { + "epoch": 0.04738097069456196, + "grad_norm": 6.016289710998535, + "learning_rate": 1.0476190476190477e-05, + "loss": 2.1188, + "step": 77 + }, + { + "epoch": 0.047996307976309514, + "grad_norm": 8.910690307617188, + "learning_rate": 1.0612244897959186e-05, + "loss": 2.268, + "step": 78 + }, + { + "epoch": 0.04861164525805707, + "grad_norm": 6.646378993988037, + "learning_rate": 1.0748299319727893e-05, + "loss": 2.2229, + "step": 79 + }, + { + "epoch": 0.04922698253980463, + "grad_norm": 7.928714752197266, + "learning_rate": 1.0884353741496601e-05, + "loss": 2.3112, + "step": 80 + }, + { + "epoch": 0.04984231982155219, + "grad_norm": 6.825870513916016, + "learning_rate": 1.1020408163265306e-05, + "loss": 2.211, + "step": 81 + }, + { + "epoch": 0.05045765710329975, + "grad_norm": 6.413818836212158, + "learning_rate": 1.1156462585034013e-05, + "loss": 2.3144, + "step": 82 + }, + { + "epoch": 0.0510729943850473, + "grad_norm": 6.224338531494141, + "learning_rate": 1.1292517006802722e-05, + "loss": 2.1961, + "step": 83 + }, + { + "epoch": 0.05168833166679486, + "grad_norm": 7.603141784667969, + "learning_rate": 1.1428571428571429e-05, + "loss": 2.1188, + "step": 84 + }, + { + "epoch": 0.05230366894854242, + "grad_norm": 5.960072994232178, + "learning_rate": 1.1564625850340136e-05, + "loss": 2.2987, + "step": 85 + }, + { + "epoch": 0.05291900623028998, + "grad_norm": 8.072278022766113, + "learning_rate": 1.1700680272108845e-05, + "loss": 2.2372, + "step": 86 + }, + { + "epoch": 0.053534343512037535, + "grad_norm": 7.3826494216918945, + "learning_rate": 1.1836734693877552e-05, + "loss": 2.1342, + "step": 87 + }, + { + "epoch": 0.05414968079378509, + "grad_norm": 10.220178604125977, + "learning_rate": 1.197278911564626e-05, + "loss": 2.3101, + "step": 88 + }, + { + "epoch": 0.05476501807553265, + "grad_norm": 6.8845295906066895, + "learning_rate": 1.2108843537414967e-05, + "loss": 2.2763, + "step": 89 + }, + { + "epoch": 0.05538035535728021, + "grad_norm": 9.377163887023926, + "learning_rate": 1.2244897959183674e-05, + "loss": 2.3071, + "step": 90 + }, + { + "epoch": 0.05599569263902777, + "grad_norm": 6.3633270263671875, + "learning_rate": 1.2380952380952383e-05, + "loss": 2.1565, + "step": 91 + }, + { + "epoch": 0.056611029920775324, + "grad_norm": 7.064178466796875, + "learning_rate": 1.251700680272109e-05, + "loss": 2.1797, + "step": 92 + }, + { + "epoch": 0.05722636720252288, + "grad_norm": 9.560176849365234, + "learning_rate": 1.2653061224489798e-05, + "loss": 2.3988, + "step": 93 + }, + { + "epoch": 0.057841704484270444, + "grad_norm": 6.303845405578613, + "learning_rate": 1.2789115646258505e-05, + "loss": 2.3227, + "step": 94 + }, + { + "epoch": 0.058457041766018, + "grad_norm": 5.651893138885498, + "learning_rate": 1.2925170068027212e-05, + "loss": 2.3152, + "step": 95 + }, + { + "epoch": 0.05907237904776556, + "grad_norm": 9.477595329284668, + "learning_rate": 1.3061224489795918e-05, + "loss": 2.1448, + "step": 96 + }, + { + "epoch": 0.05968771632951311, + "grad_norm": 7.602406978607178, + "learning_rate": 1.3197278911564626e-05, + "loss": 2.4577, + "step": 97 + }, + { + "epoch": 0.06030305361126067, + "grad_norm": 7.166826248168945, + "learning_rate": 1.3333333333333333e-05, + "loss": 2.3114, + "step": 98 + }, + { + "epoch": 0.06091839089300823, + "grad_norm": 6.621010780334473, + "learning_rate": 1.3469387755102042e-05, + "loss": 2.2645, + "step": 99 + }, + { + "epoch": 0.06153372817475579, + "grad_norm": 6.79273796081543, + "learning_rate": 1.3605442176870749e-05, + "loss": 2.2804, + "step": 100 + }, + { + "epoch": 0.062149065456503345, + "grad_norm": 7.881176471710205, + "learning_rate": 1.3741496598639456e-05, + "loss": 2.2498, + "step": 101 + }, + { + "epoch": 0.0627644027382509, + "grad_norm": 8.357041358947754, + "learning_rate": 1.3877551020408165e-05, + "loss": 2.2624, + "step": 102 + }, + { + "epoch": 0.06337974001999847, + "grad_norm": 8.153701782226562, + "learning_rate": 1.4013605442176872e-05, + "loss": 2.1734, + "step": 103 + }, + { + "epoch": 0.06399507730174601, + "grad_norm": 7.32088041305542, + "learning_rate": 1.414965986394558e-05, + "loss": 2.3522, + "step": 104 + }, + { + "epoch": 0.06461041458349358, + "grad_norm": 7.949704170227051, + "learning_rate": 1.4285714285714287e-05, + "loss": 2.3535, + "step": 105 + }, + { + "epoch": 0.06522575186524114, + "grad_norm": 6.276240825653076, + "learning_rate": 1.4421768707482994e-05, + "loss": 2.1667, + "step": 106 + }, + { + "epoch": 0.06584108914698869, + "grad_norm": 6.365450859069824, + "learning_rate": 1.4557823129251703e-05, + "loss": 2.275, + "step": 107 + }, + { + "epoch": 0.06645642642873625, + "grad_norm": 7.83253812789917, + "learning_rate": 1.469387755102041e-05, + "loss": 2.6419, + "step": 108 + }, + { + "epoch": 0.0670717637104838, + "grad_norm": 6.616688251495361, + "learning_rate": 1.4829931972789118e-05, + "loss": 2.2927, + "step": 109 + }, + { + "epoch": 0.06768710099223137, + "grad_norm": 6.100420951843262, + "learning_rate": 1.4965986394557825e-05, + "loss": 2.2712, + "step": 110 + }, + { + "epoch": 0.06830243827397893, + "grad_norm": 6.779526233673096, + "learning_rate": 1.510204081632653e-05, + "loss": 2.322, + "step": 111 + }, + { + "epoch": 0.06891777555572648, + "grad_norm": 8.595475196838379, + "learning_rate": 1.523809523809524e-05, + "loss": 2.1629, + "step": 112 + }, + { + "epoch": 0.06953311283747404, + "grad_norm": 6.767415523529053, + "learning_rate": 1.5374149659863945e-05, + "loss": 2.3272, + "step": 113 + }, + { + "epoch": 0.07014845011922159, + "grad_norm": 7.218113422393799, + "learning_rate": 1.5510204081632655e-05, + "loss": 2.1886, + "step": 114 + }, + { + "epoch": 0.07076378740096916, + "grad_norm": 6.490159511566162, + "learning_rate": 1.5646258503401362e-05, + "loss": 2.2235, + "step": 115 + }, + { + "epoch": 0.07137912468271672, + "grad_norm": 7.232898235321045, + "learning_rate": 1.578231292517007e-05, + "loss": 2.2153, + "step": 116 + }, + { + "epoch": 0.07199446196446427, + "grad_norm": 6.288939952850342, + "learning_rate": 1.5918367346938776e-05, + "loss": 2.1155, + "step": 117 + }, + { + "epoch": 0.07260979924621183, + "grad_norm": 8.061519622802734, + "learning_rate": 1.6054421768707483e-05, + "loss": 2.2276, + "step": 118 + }, + { + "epoch": 0.0732251365279594, + "grad_norm": 7.087181091308594, + "learning_rate": 1.6190476190476193e-05, + "loss": 2.2139, + "step": 119 + }, + { + "epoch": 0.07384047380970694, + "grad_norm": 7.084591388702393, + "learning_rate": 1.63265306122449e-05, + "loss": 2.4376, + "step": 120 + }, + { + "epoch": 0.07445581109145451, + "grad_norm": 7.487453460693359, + "learning_rate": 1.6462585034013607e-05, + "loss": 2.312, + "step": 121 + }, + { + "epoch": 0.07507114837320206, + "grad_norm": 7.014548301696777, + "learning_rate": 1.6598639455782314e-05, + "loss": 2.3467, + "step": 122 + }, + { + "epoch": 0.07568648565494962, + "grad_norm": 6.409976482391357, + "learning_rate": 1.673469387755102e-05, + "loss": 2.2535, + "step": 123 + }, + { + "epoch": 0.07630182293669718, + "grad_norm": 7.632189750671387, + "learning_rate": 1.687074829931973e-05, + "loss": 2.0728, + "step": 124 + }, + { + "epoch": 0.07691716021844473, + "grad_norm": 6.299214839935303, + "learning_rate": 1.7006802721088435e-05, + "loss": 2.2776, + "step": 125 + }, + { + "epoch": 0.0775324975001923, + "grad_norm": 6.972152233123779, + "learning_rate": 1.7142857142857142e-05, + "loss": 2.4254, + "step": 126 + }, + { + "epoch": 0.07814783478193985, + "grad_norm": 6.414390563964844, + "learning_rate": 1.7278911564625852e-05, + "loss": 2.2549, + "step": 127 + }, + { + "epoch": 0.07876317206368741, + "grad_norm": 6.761835098266602, + "learning_rate": 1.741496598639456e-05, + "loss": 2.1781, + "step": 128 + }, + { + "epoch": 0.07937850934543497, + "grad_norm": 7.697648048400879, + "learning_rate": 1.7551020408163266e-05, + "loss": 2.1143, + "step": 129 + }, + { + "epoch": 0.07999384662718252, + "grad_norm": 6.886279582977295, + "learning_rate": 1.7687074829931973e-05, + "loss": 2.1997, + "step": 130 + }, + { + "epoch": 0.08060918390893008, + "grad_norm": 7.696090221405029, + "learning_rate": 1.782312925170068e-05, + "loss": 2.2099, + "step": 131 + }, + { + "epoch": 0.08122452119067763, + "grad_norm": 7.289732933044434, + "learning_rate": 1.795918367346939e-05, + "loss": 2.1935, + "step": 132 + }, + { + "epoch": 0.0818398584724252, + "grad_norm": 7.200902938842773, + "learning_rate": 1.8095238095238097e-05, + "loss": 2.2497, + "step": 133 + }, + { + "epoch": 0.08245519575417276, + "grad_norm": 8.190312385559082, + "learning_rate": 1.8231292517006804e-05, + "loss": 2.2769, + "step": 134 + }, + { + "epoch": 0.08307053303592031, + "grad_norm": 8.264681816101074, + "learning_rate": 1.836734693877551e-05, + "loss": 2.2566, + "step": 135 + }, + { + "epoch": 0.08368587031766787, + "grad_norm": 10.042634010314941, + "learning_rate": 1.8503401360544218e-05, + "loss": 2.4196, + "step": 136 + }, + { + "epoch": 0.08430120759941542, + "grad_norm": 6.739928245544434, + "learning_rate": 1.863945578231293e-05, + "loss": 2.1782, + "step": 137 + }, + { + "epoch": 0.08491654488116299, + "grad_norm": 6.209414958953857, + "learning_rate": 1.8775510204081636e-05, + "loss": 2.1425, + "step": 138 + }, + { + "epoch": 0.08553188216291055, + "grad_norm": 7.777565956115723, + "learning_rate": 1.8911564625850343e-05, + "loss": 2.2254, + "step": 139 + }, + { + "epoch": 0.0861472194446581, + "grad_norm": 6.648738384246826, + "learning_rate": 1.904761904761905e-05, + "loss": 2.0742, + "step": 140 + }, + { + "epoch": 0.08676255672640566, + "grad_norm": 6.379414081573486, + "learning_rate": 1.9183673469387756e-05, + "loss": 2.255, + "step": 141 + }, + { + "epoch": 0.08737789400815323, + "grad_norm": 9.732193946838379, + "learning_rate": 1.9319727891156463e-05, + "loss": 2.4084, + "step": 142 + }, + { + "epoch": 0.08799323128990078, + "grad_norm": 7.218408107757568, + "learning_rate": 1.945578231292517e-05, + "loss": 2.3535, + "step": 143 + }, + { + "epoch": 0.08860856857164834, + "grad_norm": 9.508156776428223, + "learning_rate": 1.9591836734693877e-05, + "loss": 2.0541, + "step": 144 + }, + { + "epoch": 0.08922390585339589, + "grad_norm": 6.861551284790039, + "learning_rate": 1.9727891156462588e-05, + "loss": 2.1808, + "step": 145 + }, + { + "epoch": 0.08983924313514345, + "grad_norm": 5.8775954246521, + "learning_rate": 1.9863945578231295e-05, + "loss": 2.1334, + "step": 146 + }, + { + "epoch": 0.09045458041689101, + "grad_norm": 7.791916370391846, + "learning_rate": 2e-05, + "loss": 2.1319, + "step": 147 + }, + { + "epoch": 0.09106991769863856, + "grad_norm": 6.224850654602051, + "learning_rate": 1.9999997792428403e-05, + "loss": 2.3734, + "step": 148 + }, + { + "epoch": 0.09168525498038613, + "grad_norm": 6.596772193908691, + "learning_rate": 1.9999991169714585e-05, + "loss": 2.163, + "step": 149 + }, + { + "epoch": 0.09230059226213368, + "grad_norm": 7.813839912414551, + "learning_rate": 1.999998013186146e-05, + "loss": 2.3454, + "step": 150 + }, + { + "epoch": 0.09291592954388124, + "grad_norm": 8.478799819946289, + "learning_rate": 1.999996467887392e-05, + "loss": 2.3438, + "step": 151 + }, + { + "epoch": 0.0935312668256288, + "grad_norm": 7.226709365844727, + "learning_rate": 1.9999944810758777e-05, + "loss": 2.2818, + "step": 152 + }, + { + "epoch": 0.09414660410737635, + "grad_norm": 6.941035270690918, + "learning_rate": 1.99999205275248e-05, + "loss": 2.4037, + "step": 153 + }, + { + "epoch": 0.09476194138912392, + "grad_norm": 6.867600440979004, + "learning_rate": 1.9999891829182717e-05, + "loss": 2.3074, + "step": 154 + }, + { + "epoch": 0.09537727867087147, + "grad_norm": 7.304531574249268, + "learning_rate": 1.9999858715745195e-05, + "loss": 2.2173, + "step": 155 + }, + { + "epoch": 0.09599261595261903, + "grad_norm": 6.696152687072754, + "learning_rate": 1.9999821187226853e-05, + "loss": 2.2215, + "step": 156 + }, + { + "epoch": 0.09660795323436659, + "grad_norm": 6.913352012634277, + "learning_rate": 1.9999779243644264e-05, + "loss": 2.2539, + "step": 157 + }, + { + "epoch": 0.09722329051611414, + "grad_norm": 6.659013271331787, + "learning_rate": 1.9999732885015946e-05, + "loss": 2.1661, + "step": 158 + }, + { + "epoch": 0.0978386277978617, + "grad_norm": 6.3328351974487305, + "learning_rate": 1.9999682111362368e-05, + "loss": 2.1587, + "step": 159 + }, + { + "epoch": 0.09845396507960925, + "grad_norm": 6.690054893493652, + "learning_rate": 1.9999626922705944e-05, + "loss": 2.0343, + "step": 160 + }, + { + "epoch": 0.09906930236135682, + "grad_norm": 6.7721452713012695, + "learning_rate": 1.999956731907104e-05, + "loss": 2.3304, + "step": 161 + }, + { + "epoch": 0.09968463964310438, + "grad_norm": 7.867499351501465, + "learning_rate": 1.9999503300483978e-05, + "loss": 2.3125, + "step": 162 + }, + { + "epoch": 0.10029997692485193, + "grad_norm": 7.189154148101807, + "learning_rate": 1.9999434866973018e-05, + "loss": 2.2407, + "step": 163 + }, + { + "epoch": 0.1009153142065995, + "grad_norm": 5.734553813934326, + "learning_rate": 1.9999362018568375e-05, + "loss": 2.0928, + "step": 164 + }, + { + "epoch": 0.10153065148834706, + "grad_norm": 7.70632791519165, + "learning_rate": 1.9999284755302213e-05, + "loss": 2.2985, + "step": 165 + }, + { + "epoch": 0.1021459887700946, + "grad_norm": 6.493799686431885, + "learning_rate": 1.9999203077208644e-05, + "loss": 2.2469, + "step": 166 + }, + { + "epoch": 0.10276132605184217, + "grad_norm": 6.781482696533203, + "learning_rate": 1.999911698432373e-05, + "loss": 2.2995, + "step": 167 + }, + { + "epoch": 0.10337666333358972, + "grad_norm": 6.93203067779541, + "learning_rate": 1.9999026476685484e-05, + "loss": 2.1683, + "step": 168 + }, + { + "epoch": 0.10399200061533728, + "grad_norm": 5.854724407196045, + "learning_rate": 1.999893155433387e-05, + "loss": 2.1104, + "step": 169 + }, + { + "epoch": 0.10460733789708485, + "grad_norm": 5.918574810028076, + "learning_rate": 1.999883221731079e-05, + "loss": 2.1557, + "step": 170 + }, + { + "epoch": 0.1052226751788324, + "grad_norm": 8.446393966674805, + "learning_rate": 1.9998728465660105e-05, + "loss": 2.3397, + "step": 171 + }, + { + "epoch": 0.10583801246057996, + "grad_norm": 6.58097505569458, + "learning_rate": 1.9998620299427624e-05, + "loss": 2.0943, + "step": 172 + }, + { + "epoch": 0.10645334974232751, + "grad_norm": 8.08385944366455, + "learning_rate": 1.9998507718661104e-05, + "loss": 2.462, + "step": 173 + }, + { + "epoch": 0.10706868702407507, + "grad_norm": 6.295893669128418, + "learning_rate": 1.999839072341025e-05, + "loss": 2.1889, + "step": 174 + }, + { + "epoch": 0.10768402430582263, + "grad_norm": 6.3036065101623535, + "learning_rate": 1.9998269313726722e-05, + "loss": 2.1683, + "step": 175 + }, + { + "epoch": 0.10829936158757018, + "grad_norm": 7.015434265136719, + "learning_rate": 1.9998143489664114e-05, + "loss": 2.2009, + "step": 176 + }, + { + "epoch": 0.10891469886931775, + "grad_norm": 8.733070373535156, + "learning_rate": 1.9998013251277988e-05, + "loss": 1.9872, + "step": 177 + }, + { + "epoch": 0.1095300361510653, + "grad_norm": 6.213490962982178, + "learning_rate": 1.9997878598625845e-05, + "loss": 2.1302, + "step": 178 + }, + { + "epoch": 0.11014537343281286, + "grad_norm": 6.989812850952148, + "learning_rate": 1.9997739531767132e-05, + "loss": 2.3182, + "step": 179 + }, + { + "epoch": 0.11076071071456042, + "grad_norm": 8.277668952941895, + "learning_rate": 1.9997596050763253e-05, + "loss": 2.2011, + "step": 180 + }, + { + "epoch": 0.11137604799630797, + "grad_norm": 7.179686546325684, + "learning_rate": 1.999744815567755e-05, + "loss": 2.0579, + "step": 181 + }, + { + "epoch": 0.11199138527805554, + "grad_norm": 8.399940490722656, + "learning_rate": 1.999729584657533e-05, + "loss": 2.1875, + "step": 182 + }, + { + "epoch": 0.11260672255980309, + "grad_norm": 7.5999064445495605, + "learning_rate": 1.999713912352384e-05, + "loss": 2.2394, + "step": 183 + }, + { + "epoch": 0.11322205984155065, + "grad_norm": 6.889599800109863, + "learning_rate": 1.9996977986592263e-05, + "loss": 2.1876, + "step": 184 + }, + { + "epoch": 0.11383739712329821, + "grad_norm": 6.113670349121094, + "learning_rate": 1.9996812435851754e-05, + "loss": 2.2049, + "step": 185 + }, + { + "epoch": 0.11445273440504576, + "grad_norm": 6.120764255523682, + "learning_rate": 1.9996642471375406e-05, + "loss": 2.1365, + "step": 186 + }, + { + "epoch": 0.11506807168679332, + "grad_norm": 6.781260013580322, + "learning_rate": 1.9996468093238256e-05, + "loss": 2.2402, + "step": 187 + }, + { + "epoch": 0.11568340896854089, + "grad_norm": 7.765617847442627, + "learning_rate": 1.99962893015173e-05, + "loss": 1.9575, + "step": 188 + }, + { + "epoch": 0.11629874625028844, + "grad_norm": 9.168259620666504, + "learning_rate": 1.999610609629147e-05, + "loss": 2.3315, + "step": 189 + }, + { + "epoch": 0.116914083532036, + "grad_norm": 6.384528636932373, + "learning_rate": 1.9995918477641657e-05, + "loss": 2.2097, + "step": 190 + }, + { + "epoch": 0.11752942081378355, + "grad_norm": 5.753864288330078, + "learning_rate": 1.9995726445650698e-05, + "loss": 2.0606, + "step": 191 + }, + { + "epoch": 0.11814475809553111, + "grad_norm": 10.064996719360352, + "learning_rate": 1.9995530000403378e-05, + "loss": 2.2434, + "step": 192 + }, + { + "epoch": 0.11876009537727868, + "grad_norm": 7.190062046051025, + "learning_rate": 1.999532914198643e-05, + "loss": 2.3134, + "step": 193 + }, + { + "epoch": 0.11937543265902623, + "grad_norm": 7.418085098266602, + "learning_rate": 1.9995123870488537e-05, + "loss": 2.2731, + "step": 194 + }, + { + "epoch": 0.11999076994077379, + "grad_norm": 7.50383186340332, + "learning_rate": 1.999491418600033e-05, + "loss": 2.0997, + "step": 195 + }, + { + "epoch": 0.12060610722252134, + "grad_norm": 6.834039211273193, + "learning_rate": 1.999470008861438e-05, + "loss": 2.1782, + "step": 196 + }, + { + "epoch": 0.1212214445042689, + "grad_norm": 7.89847469329834, + "learning_rate": 1.9994481578425225e-05, + "loss": 2.2068, + "step": 197 + }, + { + "epoch": 0.12183678178601647, + "grad_norm": 6.3203301429748535, + "learning_rate": 1.9994258655529336e-05, + "loss": 1.9962, + "step": 198 + }, + { + "epoch": 0.12245211906776401, + "grad_norm": 6.775093078613281, + "learning_rate": 1.999403132002513e-05, + "loss": 2.1712, + "step": 199 + }, + { + "epoch": 0.12306745634951158, + "grad_norm": 7.706969261169434, + "learning_rate": 1.999379957201299e-05, + "loss": 2.2695, + "step": 200 + }, + { + "epoch": 0.12368279363125913, + "grad_norm": 6.38070011138916, + "learning_rate": 1.999356341159523e-05, + "loss": 2.2007, + "step": 201 + }, + { + "epoch": 0.12429813091300669, + "grad_norm": 7.725106716156006, + "learning_rate": 1.9993322838876116e-05, + "loss": 2.0353, + "step": 202 + }, + { + "epoch": 0.12491346819475425, + "grad_norm": 6.194108963012695, + "learning_rate": 1.9993077853961874e-05, + "loss": 2.305, + "step": 203 + }, + { + "epoch": 0.1255288054765018, + "grad_norm": 6.3423051834106445, + "learning_rate": 1.9992828456960654e-05, + "loss": 2.0069, + "step": 204 + }, + { + "epoch": 0.12614414275824937, + "grad_norm": 7.696938991546631, + "learning_rate": 1.999257464798258e-05, + "loss": 1.9936, + "step": 205 + }, + { + "epoch": 0.12675948003999693, + "grad_norm": 7.483890056610107, + "learning_rate": 1.999231642713971e-05, + "loss": 2.1589, + "step": 206 + }, + { + "epoch": 0.1273748173217445, + "grad_norm": 6.746100425720215, + "learning_rate": 1.999205379454605e-05, + "loss": 2.1784, + "step": 207 + }, + { + "epoch": 0.12799015460349203, + "grad_norm": 8.610551834106445, + "learning_rate": 1.9991786750317555e-05, + "loss": 2.3596, + "step": 208 + }, + { + "epoch": 0.1286054918852396, + "grad_norm": 7.048105716705322, + "learning_rate": 1.999151529457213e-05, + "loss": 2.0247, + "step": 209 + }, + { + "epoch": 0.12922082916698716, + "grad_norm": 5.952898979187012, + "learning_rate": 1.999123942742963e-05, + "loss": 2.2933, + "step": 210 + }, + { + "epoch": 0.12983616644873472, + "grad_norm": 7.782407283782959, + "learning_rate": 1.999095914901185e-05, + "loss": 2.2313, + "step": 211 + }, + { + "epoch": 0.13045150373048228, + "grad_norm": 7.508443355560303, + "learning_rate": 1.9990674459442536e-05, + "loss": 2.3096, + "step": 212 + }, + { + "epoch": 0.13106684101222982, + "grad_norm": 8.852317810058594, + "learning_rate": 1.9990385358847385e-05, + "loss": 2.129, + "step": 213 + }, + { + "epoch": 0.13168217829397738, + "grad_norm": 8.939552307128906, + "learning_rate": 1.9990091847354043e-05, + "loss": 2.2285, + "step": 214 + }, + { + "epoch": 0.13229751557572494, + "grad_norm": 7.306134223937988, + "learning_rate": 1.9989793925092092e-05, + "loss": 2.1938, + "step": 215 + }, + { + "epoch": 0.1329128528574725, + "grad_norm": 6.431981563568115, + "learning_rate": 1.9989491592193074e-05, + "loss": 2.2276, + "step": 216 + }, + { + "epoch": 0.13352819013922007, + "grad_norm": 6.79127311706543, + "learning_rate": 1.998918484879047e-05, + "loss": 2.1765, + "step": 217 + }, + { + "epoch": 0.1341435274209676, + "grad_norm": 6.1376471519470215, + "learning_rate": 1.9988873695019716e-05, + "loss": 2.199, + "step": 218 + }, + { + "epoch": 0.13475886470271517, + "grad_norm": 7.265410900115967, + "learning_rate": 1.9988558131018188e-05, + "loss": 2.2423, + "step": 219 + }, + { + "epoch": 0.13537420198446273, + "grad_norm": 7.469689846038818, + "learning_rate": 1.998823815692521e-05, + "loss": 2.1785, + "step": 220 + }, + { + "epoch": 0.1359895392662103, + "grad_norm": 7.498953342437744, + "learning_rate": 1.9987913772882058e-05, + "loss": 2.3555, + "step": 221 + }, + { + "epoch": 0.13660487654795786, + "grad_norm": 7.313173294067383, + "learning_rate": 1.9987584979031954e-05, + "loss": 2.2644, + "step": 222 + }, + { + "epoch": 0.1372202138297054, + "grad_norm": 9.507674217224121, + "learning_rate": 1.9987251775520056e-05, + "loss": 1.9738, + "step": 223 + }, + { + "epoch": 0.13783555111145296, + "grad_norm": 7.811926364898682, + "learning_rate": 1.998691416249349e-05, + "loss": 2.0466, + "step": 224 + }, + { + "epoch": 0.13845088839320052, + "grad_norm": 9.086389541625977, + "learning_rate": 1.9986572140101312e-05, + "loss": 2.1886, + "step": 225 + }, + { + "epoch": 0.13906622567494809, + "grad_norm": 7.8804402351379395, + "learning_rate": 1.998622570849453e-05, + "loss": 2.2407, + "step": 226 + }, + { + "epoch": 0.13968156295669565, + "grad_norm": 5.217634201049805, + "learning_rate": 1.9985874867826095e-05, + "loss": 2.0141, + "step": 227 + }, + { + "epoch": 0.14029690023844318, + "grad_norm": 6.523334980010986, + "learning_rate": 1.9985519618250912e-05, + "loss": 2.2227, + "step": 228 + }, + { + "epoch": 0.14091223752019075, + "grad_norm": 6.376529693603516, + "learning_rate": 1.998515995992583e-05, + "loss": 2.4924, + "step": 229 + }, + { + "epoch": 0.1415275748019383, + "grad_norm": 6.972442150115967, + "learning_rate": 1.9984795893009643e-05, + "loss": 2.0996, + "step": 230 + }, + { + "epoch": 0.14214291208368587, + "grad_norm": 7.551859378814697, + "learning_rate": 1.9984427417663085e-05, + "loss": 2.2245, + "step": 231 + }, + { + "epoch": 0.14275824936543344, + "grad_norm": 6.202492713928223, + "learning_rate": 1.9984054534048854e-05, + "loss": 2.2962, + "step": 232 + }, + { + "epoch": 0.14337358664718097, + "grad_norm": 6.948767185211182, + "learning_rate": 1.9983677242331573e-05, + "loss": 2.2262, + "step": 233 + }, + { + "epoch": 0.14398892392892854, + "grad_norm": 7.514490127563477, + "learning_rate": 1.998329554267783e-05, + "loss": 1.9778, + "step": 234 + }, + { + "epoch": 0.1446042612106761, + "grad_norm": 7.180710315704346, + "learning_rate": 1.9982909435256143e-05, + "loss": 2.153, + "step": 235 + }, + { + "epoch": 0.14521959849242366, + "grad_norm": 6.4899749755859375, + "learning_rate": 1.998251892023699e-05, + "loss": 2.3546, + "step": 236 + }, + { + "epoch": 0.14583493577417123, + "grad_norm": 9.064291954040527, + "learning_rate": 1.998212399779279e-05, + "loss": 2.2512, + "step": 237 + }, + { + "epoch": 0.1464502730559188, + "grad_norm": 8.431541442871094, + "learning_rate": 1.99817246680979e-05, + "loss": 2.1062, + "step": 238 + }, + { + "epoch": 0.14706561033766632, + "grad_norm": 7.465375900268555, + "learning_rate": 1.998132093132864e-05, + "loss": 2.2107, + "step": 239 + }, + { + "epoch": 0.1476809476194139, + "grad_norm": 6.768311500549316, + "learning_rate": 1.9980912787663253e-05, + "loss": 2.2643, + "step": 240 + }, + { + "epoch": 0.14829628490116145, + "grad_norm": 6.039884090423584, + "learning_rate": 1.9980500237281948e-05, + "loss": 2.2226, + "step": 241 + }, + { + "epoch": 0.14891162218290901, + "grad_norm": 8.214302062988281, + "learning_rate": 1.9980083280366874e-05, + "loss": 2.1622, + "step": 242 + }, + { + "epoch": 0.14952695946465658, + "grad_norm": 7.437131881713867, + "learning_rate": 1.9979661917102116e-05, + "loss": 2.1727, + "step": 243 + }, + { + "epoch": 0.1501422967464041, + "grad_norm": 6.665640830993652, + "learning_rate": 1.9979236147673718e-05, + "loss": 2.225, + "step": 244 + }, + { + "epoch": 0.15075763402815168, + "grad_norm": 7.592454433441162, + "learning_rate": 1.997880597226966e-05, + "loss": 2.1225, + "step": 245 + }, + { + "epoch": 0.15137297130989924, + "grad_norm": 7.649346828460693, + "learning_rate": 1.9978371391079874e-05, + "loss": 2.206, + "step": 246 + }, + { + "epoch": 0.1519883085916468, + "grad_norm": 8.0563325881958, + "learning_rate": 1.997793240429623e-05, + "loss": 2.0705, + "step": 247 + }, + { + "epoch": 0.15260364587339437, + "grad_norm": 7.286764621734619, + "learning_rate": 1.997748901211255e-05, + "loss": 2.3531, + "step": 248 + }, + { + "epoch": 0.1532189831551419, + "grad_norm": 6.957502365112305, + "learning_rate": 1.9977041214724594e-05, + "loss": 2.2328, + "step": 249 + }, + { + "epoch": 0.15383432043688947, + "grad_norm": 7.015744209289551, + "learning_rate": 1.997658901233008e-05, + "loss": 2.0994, + "step": 250 + }, + { + "epoch": 0.15444965771863703, + "grad_norm": 6.151637554168701, + "learning_rate": 1.9976132405128646e-05, + "loss": 2.1639, + "step": 251 + }, + { + "epoch": 0.1550649950003846, + "grad_norm": 6.553138256072998, + "learning_rate": 1.997567139332191e-05, + "loss": 2.1711, + "step": 252 + }, + { + "epoch": 0.15568033228213216, + "grad_norm": 6.514746189117432, + "learning_rate": 1.9975205977113398e-05, + "loss": 1.9452, + "step": 253 + }, + { + "epoch": 0.1562956695638797, + "grad_norm": 6.238303184509277, + "learning_rate": 1.997473615670861e-05, + "loss": 2.2592, + "step": 254 + }, + { + "epoch": 0.15691100684562725, + "grad_norm": 6.985532760620117, + "learning_rate": 1.997426193231497e-05, + "loss": 2.108, + "step": 255 + }, + { + "epoch": 0.15752634412737482, + "grad_norm": 6.6124114990234375, + "learning_rate": 1.997378330414186e-05, + "loss": 2.2004, + "step": 256 + }, + { + "epoch": 0.15814168140912238, + "grad_norm": 6.370915412902832, + "learning_rate": 1.99733002724006e-05, + "loss": 2.2795, + "step": 257 + }, + { + "epoch": 0.15875701869086994, + "grad_norm": 6.579582691192627, + "learning_rate": 1.9972812837304454e-05, + "loss": 2.0762, + "step": 258 + }, + { + "epoch": 0.15937235597261748, + "grad_norm": 6.175236225128174, + "learning_rate": 1.9972320999068636e-05, + "loss": 2.0872, + "step": 259 + }, + { + "epoch": 0.15998769325436504, + "grad_norm": 6.37175989151001, + "learning_rate": 1.9971824757910295e-05, + "loss": 2.3272, + "step": 260 + }, + { + "epoch": 0.1606030305361126, + "grad_norm": 5.7008562088012695, + "learning_rate": 1.9971324114048533e-05, + "loss": 2.1787, + "step": 261 + }, + { + "epoch": 0.16121836781786017, + "grad_norm": 6.113302230834961, + "learning_rate": 1.997081906770438e-05, + "loss": 2.2305, + "step": 262 + }, + { + "epoch": 0.16183370509960773, + "grad_norm": 6.189788818359375, + "learning_rate": 1.997030961910084e-05, + "loss": 2.0858, + "step": 263 + }, + { + "epoch": 0.16244904238135527, + "grad_norm": 7.134002685546875, + "learning_rate": 1.9969795768462824e-05, + "loss": 2.1002, + "step": 264 + }, + { + "epoch": 0.16306437966310283, + "grad_norm": 8.21679401397705, + "learning_rate": 1.9969277516017214e-05, + "loss": 2.1967, + "step": 265 + }, + { + "epoch": 0.1636797169448504, + "grad_norm": 6.420109748840332, + "learning_rate": 1.996875486199282e-05, + "loss": 2.082, + "step": 266 + }, + { + "epoch": 0.16429505422659796, + "grad_norm": 7.433874607086182, + "learning_rate": 1.9968227806620413e-05, + "loss": 2.1289, + "step": 267 + }, + { + "epoch": 0.16491039150834552, + "grad_norm": 5.533511161804199, + "learning_rate": 1.9967696350132684e-05, + "loss": 2.0283, + "step": 268 + }, + { + "epoch": 0.16552572879009306, + "grad_norm": 5.577115058898926, + "learning_rate": 1.996716049276428e-05, + "loss": 2.3862, + "step": 269 + }, + { + "epoch": 0.16614106607184062, + "grad_norm": 8.04842472076416, + "learning_rate": 1.9966620234751794e-05, + "loss": 2.0882, + "step": 270 + }, + { + "epoch": 0.16675640335358818, + "grad_norm": 6.011610984802246, + "learning_rate": 1.9966075576333757e-05, + "loss": 2.3203, + "step": 271 + }, + { + "epoch": 0.16737174063533575, + "grad_norm": 6.053454875946045, + "learning_rate": 1.996552651775064e-05, + "loss": 2.2417, + "step": 272 + }, + { + "epoch": 0.1679870779170833, + "grad_norm": 5.905984878540039, + "learning_rate": 1.996497305924486e-05, + "loss": 2.1342, + "step": 273 + }, + { + "epoch": 0.16860241519883085, + "grad_norm": 6.817095756530762, + "learning_rate": 1.9964415201060782e-05, + "loss": 2.3274, + "step": 274 + }, + { + "epoch": 0.1692177524805784, + "grad_norm": 6.636928081512451, + "learning_rate": 1.9963852943444703e-05, + "loss": 2.0719, + "step": 275 + }, + { + "epoch": 0.16983308976232597, + "grad_norm": 7.396712303161621, + "learning_rate": 1.9963286286644875e-05, + "loss": 2.1194, + "step": 276 + }, + { + "epoch": 0.17044842704407354, + "grad_norm": 9.994913101196289, + "learning_rate": 1.9962715230911477e-05, + "loss": 2.2105, + "step": 277 + }, + { + "epoch": 0.1710637643258211, + "grad_norm": 7.166652202606201, + "learning_rate": 1.996213977649664e-05, + "loss": 2.1306, + "step": 278 + }, + { + "epoch": 0.17167910160756863, + "grad_norm": 5.8511576652526855, + "learning_rate": 1.996155992365444e-05, + "loss": 2.1633, + "step": 279 + }, + { + "epoch": 0.1722944388893162, + "grad_norm": 7.486011981964111, + "learning_rate": 1.9960975672640887e-05, + "loss": 2.2971, + "step": 280 + }, + { + "epoch": 0.17290977617106376, + "grad_norm": 6.147366046905518, + "learning_rate": 1.9960387023713935e-05, + "loss": 2.2664, + "step": 281 + }, + { + "epoch": 0.17352511345281132, + "grad_norm": 7.009576320648193, + "learning_rate": 1.9959793977133483e-05, + "loss": 2.2283, + "step": 282 + }, + { + "epoch": 0.1741404507345589, + "grad_norm": 6.399231433868408, + "learning_rate": 1.995919653316137e-05, + "loss": 2.4884, + "step": 283 + }, + { + "epoch": 0.17475578801630645, + "grad_norm": 8.704874992370605, + "learning_rate": 1.9958594692061374e-05, + "loss": 2.1609, + "step": 284 + }, + { + "epoch": 0.175371125298054, + "grad_norm": 6.015669822692871, + "learning_rate": 1.995798845409922e-05, + "loss": 2.0872, + "step": 285 + }, + { + "epoch": 0.17598646257980155, + "grad_norm": 7.871126174926758, + "learning_rate": 1.9957377819542568e-05, + "loss": 2.2546, + "step": 286 + }, + { + "epoch": 0.1766017998615491, + "grad_norm": 6.176607131958008, + "learning_rate": 1.9956762788661018e-05, + "loss": 2.3003, + "step": 287 + }, + { + "epoch": 0.17721713714329668, + "grad_norm": 7.166645050048828, + "learning_rate": 1.9956143361726123e-05, + "loss": 2.2227, + "step": 288 + }, + { + "epoch": 0.17783247442504424, + "grad_norm": 7.536888599395752, + "learning_rate": 1.9955519539011362e-05, + "loss": 2.1036, + "step": 289 + }, + { + "epoch": 0.17844781170679178, + "grad_norm": 5.248376369476318, + "learning_rate": 1.9954891320792166e-05, + "loss": 1.9512, + "step": 290 + }, + { + "epoch": 0.17906314898853934, + "grad_norm": 7.230815410614014, + "learning_rate": 1.9954258707345903e-05, + "loss": 2.2006, + "step": 291 + }, + { + "epoch": 0.1796784862702869, + "grad_norm": 6.731558799743652, + "learning_rate": 1.9953621698951875e-05, + "loss": 1.9177, + "step": 292 + }, + { + "epoch": 0.18029382355203447, + "grad_norm": 5.553421974182129, + "learning_rate": 1.9952980295891335e-05, + "loss": 2.0724, + "step": 293 + }, + { + "epoch": 0.18090916083378203, + "grad_norm": 5.962311744689941, + "learning_rate": 1.9952334498447473e-05, + "loss": 2.1421, + "step": 294 + }, + { + "epoch": 0.18152449811552956, + "grad_norm": 8.042583465576172, + "learning_rate": 1.9951684306905418e-05, + "loss": 1.9441, + "step": 295 + }, + { + "epoch": 0.18213983539727713, + "grad_norm": 7.810717582702637, + "learning_rate": 1.995102972155223e-05, + "loss": 2.4081, + "step": 296 + }, + { + "epoch": 0.1827551726790247, + "grad_norm": 6.1297197341918945, + "learning_rate": 1.995037074267693e-05, + "loss": 2.1563, + "step": 297 + }, + { + "epoch": 0.18337050996077225, + "grad_norm": 6.062806129455566, + "learning_rate": 1.9949707370570456e-05, + "loss": 2.0673, + "step": 298 + }, + { + "epoch": 0.18398584724251982, + "grad_norm": 6.49329948425293, + "learning_rate": 1.9949039605525705e-05, + "loss": 2.2752, + "step": 299 + }, + { + "epoch": 0.18460118452426735, + "grad_norm": 5.824356555938721, + "learning_rate": 1.9948367447837503e-05, + "loss": 2.1171, + "step": 300 + }, + { + "epoch": 0.18521652180601492, + "grad_norm": 6.269557952880859, + "learning_rate": 1.994769089780261e-05, + "loss": 2.0773, + "step": 301 + }, + { + "epoch": 0.18583185908776248, + "grad_norm": 7.171839714050293, + "learning_rate": 1.994700995571974e-05, + "loss": 2.0385, + "step": 302 + }, + { + "epoch": 0.18644719636951004, + "grad_norm": 6.399089813232422, + "learning_rate": 1.9946324621889538e-05, + "loss": 2.3624, + "step": 303 + }, + { + "epoch": 0.1870625336512576, + "grad_norm": 6.480278015136719, + "learning_rate": 1.9945634896614584e-05, + "loss": 2.0589, + "step": 304 + }, + { + "epoch": 0.18767787093300514, + "grad_norm": 7.1144490242004395, + "learning_rate": 1.9944940780199407e-05, + "loss": 2.1389, + "step": 305 + }, + { + "epoch": 0.1882932082147527, + "grad_norm": 6.8761420249938965, + "learning_rate": 1.9944242272950468e-05, + "loss": 2.0464, + "step": 306 + }, + { + "epoch": 0.18890854549650027, + "grad_norm": 8.053979873657227, + "learning_rate": 1.9943539375176164e-05, + "loss": 2.2689, + "step": 307 + }, + { + "epoch": 0.18952388277824783, + "grad_norm": 8.066965103149414, + "learning_rate": 1.9942832087186842e-05, + "loss": 2.012, + "step": 308 + }, + { + "epoch": 0.1901392200599954, + "grad_norm": 6.701384544372559, + "learning_rate": 1.9942120409294768e-05, + "loss": 2.3258, + "step": 309 + }, + { + "epoch": 0.19075455734174293, + "grad_norm": 7.0195631980896, + "learning_rate": 1.9941404341814175e-05, + "loss": 2.3442, + "step": 310 + }, + { + "epoch": 0.1913698946234905, + "grad_norm": 6.345066070556641, + "learning_rate": 1.99406838850612e-05, + "loss": 2.2286, + "step": 311 + }, + { + "epoch": 0.19198523190523806, + "grad_norm": 6.843148708343506, + "learning_rate": 1.9939959039353947e-05, + "loss": 2.1668, + "step": 312 + }, + { + "epoch": 0.19260056918698562, + "grad_norm": 7.692275047302246, + "learning_rate": 1.9939229805012435e-05, + "loss": 2.266, + "step": 313 + }, + { + "epoch": 0.19321590646873318, + "grad_norm": 5.76910924911499, + "learning_rate": 1.9938496182358643e-05, + "loss": 2.028, + "step": 314 + }, + { + "epoch": 0.19383124375048072, + "grad_norm": 6.08083963394165, + "learning_rate": 1.9937758171716468e-05, + "loss": 2.0602, + "step": 315 + }, + { + "epoch": 0.19444658103222828, + "grad_norm": 7.122509002685547, + "learning_rate": 1.9937015773411757e-05, + "loss": 2.0934, + "step": 316 + }, + { + "epoch": 0.19506191831397585, + "grad_norm": 7.054267883300781, + "learning_rate": 1.9936268987772286e-05, + "loss": 2.0998, + "step": 317 + }, + { + "epoch": 0.1956772555957234, + "grad_norm": 7.110306262969971, + "learning_rate": 1.993551781512777e-05, + "loss": 2.2287, + "step": 318 + }, + { + "epoch": 0.19629259287747097, + "grad_norm": 7.289877891540527, + "learning_rate": 1.9934762255809866e-05, + "loss": 2.2176, + "step": 319 + }, + { + "epoch": 0.1969079301592185, + "grad_norm": 5.4655890464782715, + "learning_rate": 1.9934002310152167e-05, + "loss": 2.0582, + "step": 320 + }, + { + "epoch": 0.19752326744096607, + "grad_norm": 7.0723443031311035, + "learning_rate": 1.9933237978490193e-05, + "loss": 2.1347, + "step": 321 + }, + { + "epoch": 0.19813860472271363, + "grad_norm": 6.986056327819824, + "learning_rate": 1.9932469261161414e-05, + "loss": 2.2139, + "step": 322 + }, + { + "epoch": 0.1987539420044612, + "grad_norm": 6.892303943634033, + "learning_rate": 1.993169615850522e-05, + "loss": 2.2661, + "step": 323 + }, + { + "epoch": 0.19936927928620876, + "grad_norm": 5.967850208282471, + "learning_rate": 1.993091867086296e-05, + "loss": 2.2654, + "step": 324 + }, + { + "epoch": 0.19998461656795632, + "grad_norm": 5.933230400085449, + "learning_rate": 1.9930136798577898e-05, + "loss": 2.1275, + "step": 325 + }, + { + "epoch": 0.20059995384970386, + "grad_norm": 6.223001956939697, + "learning_rate": 1.9929350541995237e-05, + "loss": 2.0281, + "step": 326 + }, + { + "epoch": 0.20121529113145142, + "grad_norm": 5.567825794219971, + "learning_rate": 1.992855990146213e-05, + "loss": 2.3354, + "step": 327 + }, + { + "epoch": 0.201830628413199, + "grad_norm": 7.758937358856201, + "learning_rate": 1.9927764877327654e-05, + "loss": 2.2166, + "step": 328 + }, + { + "epoch": 0.20244596569494655, + "grad_norm": 7.360799789428711, + "learning_rate": 1.9926965469942826e-05, + "loss": 2.0766, + "step": 329 + }, + { + "epoch": 0.2030613029766941, + "grad_norm": 6.316734313964844, + "learning_rate": 1.9926161679660588e-05, + "loss": 2.0725, + "step": 330 + }, + { + "epoch": 0.20367664025844165, + "grad_norm": 7.628759384155273, + "learning_rate": 1.9925353506835827e-05, + "loss": 1.9973, + "step": 331 + }, + { + "epoch": 0.2042919775401892, + "grad_norm": 6.809196949005127, + "learning_rate": 1.9924540951825368e-05, + "loss": 2.2608, + "step": 332 + }, + { + "epoch": 0.20490731482193678, + "grad_norm": 6.040438652038574, + "learning_rate": 1.9923724014987963e-05, + "loss": 2.0318, + "step": 333 + }, + { + "epoch": 0.20552265210368434, + "grad_norm": 6.304820537567139, + "learning_rate": 1.9922902696684295e-05, + "loss": 2.229, + "step": 334 + }, + { + "epoch": 0.2061379893854319, + "grad_norm": 6.82254695892334, + "learning_rate": 1.9922076997277e-05, + "loss": 2.4772, + "step": 335 + }, + { + "epoch": 0.20675332666717944, + "grad_norm": 6.760186195373535, + "learning_rate": 1.9921246917130627e-05, + "loss": 2.1211, + "step": 336 + }, + { + "epoch": 0.207368663948927, + "grad_norm": 9.429368019104004, + "learning_rate": 1.992041245661167e-05, + "loss": 2.0931, + "step": 337 + }, + { + "epoch": 0.20798400123067456, + "grad_norm": 6.755245685577393, + "learning_rate": 1.9919573616088558e-05, + "loss": 2.1959, + "step": 338 + }, + { + "epoch": 0.20859933851242213, + "grad_norm": 6.21528959274292, + "learning_rate": 1.9918730395931648e-05, + "loss": 1.9591, + "step": 339 + }, + { + "epoch": 0.2092146757941697, + "grad_norm": 7.1138739585876465, + "learning_rate": 1.9917882796513238e-05, + "loss": 2.2058, + "step": 340 + }, + { + "epoch": 0.20983001307591723, + "grad_norm": 6.409053802490234, + "learning_rate": 1.991703081820755e-05, + "loss": 2.3067, + "step": 341 + }, + { + "epoch": 0.2104453503576648, + "grad_norm": 6.518154144287109, + "learning_rate": 1.991617446139075e-05, + "loss": 2.273, + "step": 342 + }, + { + "epoch": 0.21106068763941235, + "grad_norm": 8.184784889221191, + "learning_rate": 1.9915313726440922e-05, + "loss": 2.1472, + "step": 343 + }, + { + "epoch": 0.21167602492115992, + "grad_norm": 6.450830936431885, + "learning_rate": 1.9914448613738107e-05, + "loss": 2.1798, + "step": 344 + }, + { + "epoch": 0.21229136220290748, + "grad_norm": 5.723616600036621, + "learning_rate": 1.9913579123664253e-05, + "loss": 2.1553, + "step": 345 + }, + { + "epoch": 0.21290669948465502, + "grad_norm": 5.89141321182251, + "learning_rate": 1.9912705256603258e-05, + "loss": 2.2372, + "step": 346 + }, + { + "epoch": 0.21352203676640258, + "grad_norm": 7.175257205963135, + "learning_rate": 1.9911827012940944e-05, + "loss": 2.1302, + "step": 347 + }, + { + "epoch": 0.21413737404815014, + "grad_norm": 8.261730194091797, + "learning_rate": 1.9910944393065073e-05, + "loss": 1.9793, + "step": 348 + }, + { + "epoch": 0.2147527113298977, + "grad_norm": 7.452632427215576, + "learning_rate": 1.9910057397365328e-05, + "loss": 2.2237, + "step": 349 + }, + { + "epoch": 0.21536804861164527, + "grad_norm": 8.374231338500977, + "learning_rate": 1.990916602623333e-05, + "loss": 2.2178, + "step": 350 + }, + { + "epoch": 0.2159833858933928, + "grad_norm": 5.048969268798828, + "learning_rate": 1.9908270280062643e-05, + "loss": 2.2253, + "step": 351 + }, + { + "epoch": 0.21659872317514037, + "grad_norm": 6.6445746421813965, + "learning_rate": 1.990737015924874e-05, + "loss": 2.0489, + "step": 352 + }, + { + "epoch": 0.21721406045688793, + "grad_norm": 6.215426921844482, + "learning_rate": 1.990646566418904e-05, + "loss": 2.3782, + "step": 353 + }, + { + "epoch": 0.2178293977386355, + "grad_norm": 6.37086820602417, + "learning_rate": 1.9905556795282894e-05, + "loss": 2.2275, + "step": 354 + }, + { + "epoch": 0.21844473502038306, + "grad_norm": 8.854802131652832, + "learning_rate": 1.9904643552931578e-05, + "loss": 2.1179, + "step": 355 + }, + { + "epoch": 0.2190600723021306, + "grad_norm": 6.347991943359375, + "learning_rate": 1.99037259375383e-05, + "loss": 2.1986, + "step": 356 + }, + { + "epoch": 0.21967540958387816, + "grad_norm": 7.154611110687256, + "learning_rate": 1.9902803949508206e-05, + "loss": 2.1958, + "step": 357 + }, + { + "epoch": 0.22029074686562572, + "grad_norm": 5.69920015335083, + "learning_rate": 1.990187758924836e-05, + "loss": 2.1313, + "step": 358 + }, + { + "epoch": 0.22090608414737328, + "grad_norm": 5.44937801361084, + "learning_rate": 1.9900946857167768e-05, + "loss": 2.1712, + "step": 359 + }, + { + "epoch": 0.22152142142912085, + "grad_norm": 6.054533958435059, + "learning_rate": 1.9900011753677363e-05, + "loss": 2.1489, + "step": 360 + }, + { + "epoch": 0.22213675871086838, + "grad_norm": 8.734567642211914, + "learning_rate": 1.989907227919e-05, + "loss": 2.3008, + "step": 361 + }, + { + "epoch": 0.22275209599261594, + "grad_norm": 7.4444193840026855, + "learning_rate": 1.9898128434120477e-05, + "loss": 2.1316, + "step": 362 + }, + { + "epoch": 0.2233674332743635, + "grad_norm": 7.923347473144531, + "learning_rate": 1.989718021888551e-05, + "loss": 2.1887, + "step": 363 + }, + { + "epoch": 0.22398277055611107, + "grad_norm": 8.016619682312012, + "learning_rate": 1.9896227633903752e-05, + "loss": 2.2835, + "step": 364 + }, + { + "epoch": 0.22459810783785863, + "grad_norm": 6.930282115936279, + "learning_rate": 1.989527067959578e-05, + "loss": 2.3075, + "step": 365 + }, + { + "epoch": 0.22521344511960617, + "grad_norm": 6.000427722930908, + "learning_rate": 1.989430935638411e-05, + "loss": 1.9957, + "step": 366 + }, + { + "epoch": 0.22582878240135373, + "grad_norm": 8.509255409240723, + "learning_rate": 1.9893343664693177e-05, + "loss": 2.2192, + "step": 367 + }, + { + "epoch": 0.2264441196831013, + "grad_norm": 5.632628917694092, + "learning_rate": 1.989237360494934e-05, + "loss": 1.9795, + "step": 368 + }, + { + "epoch": 0.22705945696484886, + "grad_norm": 6.647215366363525, + "learning_rate": 1.989139917758091e-05, + "loss": 2.1924, + "step": 369 + }, + { + "epoch": 0.22767479424659642, + "grad_norm": 7.156121730804443, + "learning_rate": 1.9890420383018094e-05, + "loss": 2.0981, + "step": 370 + }, + { + "epoch": 0.228290131528344, + "grad_norm": 7.252394199371338, + "learning_rate": 1.9889437221693053e-05, + "loss": 2.2059, + "step": 371 + }, + { + "epoch": 0.22890546881009152, + "grad_norm": 6.42435884475708, + "learning_rate": 1.9888449694039866e-05, + "loss": 2.166, + "step": 372 + }, + { + "epoch": 0.22952080609183909, + "grad_norm": 9.277846336364746, + "learning_rate": 1.988745780049454e-05, + "loss": 2.214, + "step": 373 + }, + { + "epoch": 0.23013614337358665, + "grad_norm": 7.816690444946289, + "learning_rate": 1.9886461541495007e-05, + "loss": 2.2179, + "step": 374 + }, + { + "epoch": 0.2307514806553342, + "grad_norm": 6.463733673095703, + "learning_rate": 1.9885460917481137e-05, + "loss": 2.3004, + "step": 375 + }, + { + "epoch": 0.23136681793708178, + "grad_norm": 7.256739139556885, + "learning_rate": 1.988445592889471e-05, + "loss": 2.0532, + "step": 376 + }, + { + "epoch": 0.2319821552188293, + "grad_norm": 7.0301408767700195, + "learning_rate": 1.9883446576179454e-05, + "loss": 2.3259, + "step": 377 + }, + { + "epoch": 0.23259749250057687, + "grad_norm": 7.148013591766357, + "learning_rate": 1.9882432859781002e-05, + "loss": 2.036, + "step": 378 + }, + { + "epoch": 0.23321282978232444, + "grad_norm": 6.145988941192627, + "learning_rate": 1.988141478014693e-05, + "loss": 2.102, + "step": 379 + }, + { + "epoch": 0.233828167064072, + "grad_norm": 5.533059120178223, + "learning_rate": 1.9880392337726736e-05, + "loss": 2.3002, + "step": 380 + }, + { + "epoch": 0.23444350434581956, + "grad_norm": 7.125953674316406, + "learning_rate": 1.987936553297184e-05, + "loss": 2.039, + "step": 381 + }, + { + "epoch": 0.2350588416275671, + "grad_norm": 6.224822998046875, + "learning_rate": 1.987833436633559e-05, + "loss": 1.9744, + "step": 382 + }, + { + "epoch": 0.23567417890931466, + "grad_norm": 5.97299861907959, + "learning_rate": 1.9877298838273263e-05, + "loss": 2.135, + "step": 383 + }, + { + "epoch": 0.23628951619106223, + "grad_norm": 5.2293195724487305, + "learning_rate": 1.987625894924206e-05, + "loss": 2.0927, + "step": 384 + }, + { + "epoch": 0.2369048534728098, + "grad_norm": 7.606610298156738, + "learning_rate": 1.9875214699701108e-05, + "loss": 2.2124, + "step": 385 + }, + { + "epoch": 0.23752019075455735, + "grad_norm": 5.683553695678711, + "learning_rate": 1.987416609011145e-05, + "loss": 2.2802, + "step": 386 + }, + { + "epoch": 0.2381355280363049, + "grad_norm": 6.466892719268799, + "learning_rate": 1.9873113120936074e-05, + "loss": 1.9912, + "step": 387 + }, + { + "epoch": 0.23875086531805245, + "grad_norm": 6.861242294311523, + "learning_rate": 1.9872055792639875e-05, + "loss": 2.0031, + "step": 388 + }, + { + "epoch": 0.23936620259980002, + "grad_norm": 7.965638160705566, + "learning_rate": 1.9870994105689675e-05, + "loss": 1.9883, + "step": 389 + }, + { + "epoch": 0.23998153988154758, + "grad_norm": 5.721940517425537, + "learning_rate": 1.9869928060554234e-05, + "loss": 2.0458, + "step": 390 + }, + { + "epoch": 0.24059687716329514, + "grad_norm": 7.6345109939575195, + "learning_rate": 1.9868857657704215e-05, + "loss": 2.1741, + "step": 391 + }, + { + "epoch": 0.24121221444504268, + "grad_norm": 7.349871635437012, + "learning_rate": 1.986778289761222e-05, + "loss": 2.2056, + "step": 392 + }, + { + "epoch": 0.24182755172679024, + "grad_norm": 6.881843566894531, + "learning_rate": 1.9866703780752777e-05, + "loss": 2.1428, + "step": 393 + }, + { + "epoch": 0.2424428890085378, + "grad_norm": 7.238661289215088, + "learning_rate": 1.9865620307602325e-05, + "loss": 2.2158, + "step": 394 + }, + { + "epoch": 0.24305822629028537, + "grad_norm": 6.1770405769348145, + "learning_rate": 1.9864532478639233e-05, + "loss": 2.2703, + "step": 395 + }, + { + "epoch": 0.24367356357203293, + "grad_norm": 7.86658239364624, + "learning_rate": 1.9863440294343793e-05, + "loss": 2.1958, + "step": 396 + }, + { + "epoch": 0.24428890085378047, + "grad_norm": 6.18236780166626, + "learning_rate": 1.9862343755198227e-05, + "loss": 2.1716, + "step": 397 + }, + { + "epoch": 0.24490423813552803, + "grad_norm": 7.2332444190979, + "learning_rate": 1.9861242861686665e-05, + "loss": 2.2062, + "step": 398 + }, + { + "epoch": 0.2455195754172756, + "grad_norm": 7.3182244300842285, + "learning_rate": 1.986013761429517e-05, + "loss": 2.0008, + "step": 399 + }, + { + "epoch": 0.24613491269902316, + "grad_norm": 9.727232933044434, + "learning_rate": 1.9859028013511725e-05, + "loss": 2.4275, + "step": 400 + }, + { + "epoch": 0.24675024998077072, + "grad_norm": 7.137484550476074, + "learning_rate": 1.9857914059826232e-05, + "loss": 1.9977, + "step": 401 + }, + { + "epoch": 0.24736558726251826, + "grad_norm": 6.848637104034424, + "learning_rate": 1.985679575373052e-05, + "loss": 2.1361, + "step": 402 + }, + { + "epoch": 0.24798092454426582, + "grad_norm": 6.6387152671813965, + "learning_rate": 1.9855673095718338e-05, + "loss": 2.2431, + "step": 403 + }, + { + "epoch": 0.24859626182601338, + "grad_norm": 6.569985389709473, + "learning_rate": 1.9854546086285353e-05, + "loss": 2.1993, + "step": 404 + }, + { + "epoch": 0.24921159910776094, + "grad_norm": 7.186047554016113, + "learning_rate": 1.9853414725929157e-05, + "loss": 1.9766, + "step": 405 + }, + { + "epoch": 0.2498269363895085, + "grad_norm": 5.881412982940674, + "learning_rate": 1.985227901514926e-05, + "loss": 2.0708, + "step": 406 + }, + { + "epoch": 0.25044227367125604, + "grad_norm": 6.3719563484191895, + "learning_rate": 1.9851138954447097e-05, + "loss": 2.0864, + "step": 407 + }, + { + "epoch": 0.2510576109530036, + "grad_norm": 5.641948699951172, + "learning_rate": 1.984999454432602e-05, + "loss": 2.2062, + "step": 408 + }, + { + "epoch": 0.25167294823475117, + "grad_norm": 5.384571075439453, + "learning_rate": 1.9848845785291305e-05, + "loss": 2.0688, + "step": 409 + }, + { + "epoch": 0.25228828551649873, + "grad_norm": 6.254296779632568, + "learning_rate": 1.9847692677850142e-05, + "loss": 2.2702, + "step": 410 + }, + { + "epoch": 0.2529036227982463, + "grad_norm": 7.017115116119385, + "learning_rate": 1.9846535222511648e-05, + "loss": 2.154, + "step": 411 + }, + { + "epoch": 0.25351896007999386, + "grad_norm": 5.954420566558838, + "learning_rate": 1.984537341978685e-05, + "loss": 2.1252, + "step": 412 + }, + { + "epoch": 0.2541342973617414, + "grad_norm": 5.733051776885986, + "learning_rate": 1.984420727018871e-05, + "loss": 2.1848, + "step": 413 + }, + { + "epoch": 0.254749634643489, + "grad_norm": 6.673903465270996, + "learning_rate": 1.984303677423209e-05, + "loss": 2.1333, + "step": 414 + }, + { + "epoch": 0.2553649719252365, + "grad_norm": 6.9880828857421875, + "learning_rate": 1.9841861932433784e-05, + "loss": 2.0923, + "step": 415 + }, + { + "epoch": 0.25598030920698406, + "grad_norm": 8.211299896240234, + "learning_rate": 1.984068274531251e-05, + "loss": 2.094, + "step": 416 + }, + { + "epoch": 0.2565956464887316, + "grad_norm": 8.913713455200195, + "learning_rate": 1.983949921338888e-05, + "loss": 1.9641, + "step": 417 + }, + { + "epoch": 0.2572109837704792, + "grad_norm": 6.530969619750977, + "learning_rate": 1.9838311337185452e-05, + "loss": 2.1846, + "step": 418 + }, + { + "epoch": 0.25782632105222675, + "grad_norm": 6.044552803039551, + "learning_rate": 1.983711911722669e-05, + "loss": 2.0655, + "step": 419 + }, + { + "epoch": 0.2584416583339743, + "grad_norm": 8.713349342346191, + "learning_rate": 1.983592255403897e-05, + "loss": 2.071, + "step": 420 + }, + { + "epoch": 0.2590569956157219, + "grad_norm": 7.177270889282227, + "learning_rate": 1.9834721648150593e-05, + "loss": 2.2534, + "step": 421 + }, + { + "epoch": 0.25967233289746944, + "grad_norm": 7.610826015472412, + "learning_rate": 1.983351640009178e-05, + "loss": 2.1748, + "step": 422 + }, + { + "epoch": 0.260287670179217, + "grad_norm": 7.266585826873779, + "learning_rate": 1.9832306810394665e-05, + "loss": 2.1405, + "step": 423 + }, + { + "epoch": 0.26090300746096456, + "grad_norm": 6.7067718505859375, + "learning_rate": 1.98310928795933e-05, + "loss": 1.9372, + "step": 424 + }, + { + "epoch": 0.26151834474271207, + "grad_norm": 6.486339569091797, + "learning_rate": 1.9829874608223646e-05, + "loss": 2.3964, + "step": 425 + }, + { + "epoch": 0.26213368202445964, + "grad_norm": 7.647077560424805, + "learning_rate": 1.9828651996823593e-05, + "loss": 2.2145, + "step": 426 + }, + { + "epoch": 0.2627490193062072, + "grad_norm": 7.769103050231934, + "learning_rate": 1.982742504593294e-05, + "loss": 1.8302, + "step": 427 + }, + { + "epoch": 0.26336435658795476, + "grad_norm": 7.385188102722168, + "learning_rate": 1.9826193756093407e-05, + "loss": 2.432, + "step": 428 + }, + { + "epoch": 0.2639796938697023, + "grad_norm": 6.599915027618408, + "learning_rate": 1.9824958127848618e-05, + "loss": 1.9448, + "step": 429 + }, + { + "epoch": 0.2645950311514499, + "grad_norm": 7.376984596252441, + "learning_rate": 1.9823718161744127e-05, + "loss": 2.048, + "step": 430 + }, + { + "epoch": 0.26521036843319745, + "grad_norm": 6.1908111572265625, + "learning_rate": 1.9822473858327398e-05, + "loss": 1.9834, + "step": 431 + }, + { + "epoch": 0.265825705714945, + "grad_norm": 6.363672733306885, + "learning_rate": 1.98212252181478e-05, + "loss": 2.1676, + "step": 432 + }, + { + "epoch": 0.2664410429966926, + "grad_norm": 7.182098388671875, + "learning_rate": 1.9819972241756637e-05, + "loss": 2.2249, + "step": 433 + }, + { + "epoch": 0.26705638027844014, + "grad_norm": 6.424217700958252, + "learning_rate": 1.9818714929707106e-05, + "loss": 2.0555, + "step": 434 + }, + { + "epoch": 0.2676717175601877, + "grad_norm": 7.257086277008057, + "learning_rate": 1.9817453282554334e-05, + "loss": 1.9441, + "step": 435 + }, + { + "epoch": 0.2682870548419352, + "grad_norm": 7.239059925079346, + "learning_rate": 1.981618730085536e-05, + "loss": 1.9012, + "step": 436 + }, + { + "epoch": 0.2689023921236828, + "grad_norm": 6.948366165161133, + "learning_rate": 1.9814916985169123e-05, + "loss": 1.9423, + "step": 437 + }, + { + "epoch": 0.26951772940543034, + "grad_norm": 7.727968215942383, + "learning_rate": 1.9813642336056488e-05, + "loss": 1.9616, + "step": 438 + }, + { + "epoch": 0.2701330666871779, + "grad_norm": 6.442748069763184, + "learning_rate": 1.981236335408024e-05, + "loss": 1.9863, + "step": 439 + }, + { + "epoch": 0.27074840396892547, + "grad_norm": 6.55586051940918, + "learning_rate": 1.9811080039805052e-05, + "loss": 2.2342, + "step": 440 + }, + { + "epoch": 0.27136374125067303, + "grad_norm": 6.166406154632568, + "learning_rate": 1.9809792393797543e-05, + "loss": 2.1338, + "step": 441 + }, + { + "epoch": 0.2719790785324206, + "grad_norm": 8.40396785736084, + "learning_rate": 1.9808500416626218e-05, + "loss": 2.221, + "step": 442 + }, + { + "epoch": 0.27259441581416816, + "grad_norm": 6.885834217071533, + "learning_rate": 1.98072041088615e-05, + "loss": 2.137, + "step": 443 + }, + { + "epoch": 0.2732097530959157, + "grad_norm": 5.4751362800598145, + "learning_rate": 1.9805903471075735e-05, + "loss": 2.1725, + "step": 444 + }, + { + "epoch": 0.2738250903776633, + "grad_norm": 8.195385932922363, + "learning_rate": 1.980459850384317e-05, + "loss": 2.2382, + "step": 445 + }, + { + "epoch": 0.2744404276594108, + "grad_norm": 6.750603675842285, + "learning_rate": 1.9803289207739967e-05, + "loss": 2.2043, + "step": 446 + }, + { + "epoch": 0.27505576494115835, + "grad_norm": 6.691913604736328, + "learning_rate": 1.98019755833442e-05, + "loss": 2.1685, + "step": 447 + }, + { + "epoch": 0.2756711022229059, + "grad_norm": 5.762302875518799, + "learning_rate": 1.980065763123585e-05, + "loss": 2.0506, + "step": 448 + }, + { + "epoch": 0.2762864395046535, + "grad_norm": 6.193378925323486, + "learning_rate": 1.9799335351996814e-05, + "loss": 2.071, + "step": 449 + }, + { + "epoch": 0.27690177678640104, + "grad_norm": 7.262932300567627, + "learning_rate": 1.9798008746210894e-05, + "loss": 2.1806, + "step": 450 + }, + { + "epoch": 0.2775171140681486, + "grad_norm": 7.697564125061035, + "learning_rate": 1.9796677814463812e-05, + "loss": 2.2237, + "step": 451 + }, + { + "epoch": 0.27813245134989617, + "grad_norm": 6.243818759918213, + "learning_rate": 1.9795342557343188e-05, + "loss": 1.9068, + "step": 452 + }, + { + "epoch": 0.27874778863164373, + "grad_norm": 9.060500144958496, + "learning_rate": 1.979400297543856e-05, + "loss": 1.8656, + "step": 453 + }, + { + "epoch": 0.2793631259133913, + "grad_norm": 5.65220832824707, + "learning_rate": 1.9792659069341372e-05, + "loss": 2.1506, + "step": 454 + }, + { + "epoch": 0.27997846319513886, + "grad_norm": 6.04824161529541, + "learning_rate": 1.9791310839644976e-05, + "loss": 2.1566, + "step": 455 + }, + { + "epoch": 0.28059380047688637, + "grad_norm": 7.729449272155762, + "learning_rate": 1.9789958286944632e-05, + "loss": 2.5844, + "step": 456 + }, + { + "epoch": 0.28120913775863393, + "grad_norm": 6.98706579208374, + "learning_rate": 1.978860141183752e-05, + "loss": 1.9736, + "step": 457 + }, + { + "epoch": 0.2818244750403815, + "grad_norm": 6.076931476593018, + "learning_rate": 1.9787240214922713e-05, + "loss": 2.3308, + "step": 458 + }, + { + "epoch": 0.28243981232212906, + "grad_norm": 6.798956394195557, + "learning_rate": 1.97858746968012e-05, + "loss": 2.1014, + "step": 459 + }, + { + "epoch": 0.2830551496038766, + "grad_norm": 5.7830681800842285, + "learning_rate": 1.9784504858075883e-05, + "loss": 2.1438, + "step": 460 + }, + { + "epoch": 0.2836704868856242, + "grad_norm": 6.707157135009766, + "learning_rate": 1.9783130699351556e-05, + "loss": 2.0092, + "step": 461 + }, + { + "epoch": 0.28428582416737175, + "grad_norm": 6.2394256591796875, + "learning_rate": 1.9781752221234932e-05, + "loss": 2.0457, + "step": 462 + }, + { + "epoch": 0.2849011614491193, + "grad_norm": 10.760422706604004, + "learning_rate": 1.9780369424334633e-05, + "loss": 2.1643, + "step": 463 + }, + { + "epoch": 0.2855164987308669, + "grad_norm": 7.9149932861328125, + "learning_rate": 1.977898230926118e-05, + "loss": 2.1123, + "step": 464 + }, + { + "epoch": 0.28613183601261444, + "grad_norm": 6.866069316864014, + "learning_rate": 1.9777590876627012e-05, + "loss": 2.0572, + "step": 465 + }, + { + "epoch": 0.28674717329436195, + "grad_norm": 7.102480888366699, + "learning_rate": 1.9776195127046453e-05, + "loss": 2.2017, + "step": 466 + }, + { + "epoch": 0.2873625105761095, + "grad_norm": 6.655081272125244, + "learning_rate": 1.9774795061135754e-05, + "loss": 2.0097, + "step": 467 + }, + { + "epoch": 0.28797784785785707, + "grad_norm": 6.171550273895264, + "learning_rate": 1.9773390679513068e-05, + "loss": 2.0017, + "step": 468 + }, + { + "epoch": 0.28859318513960464, + "grad_norm": 7.372035503387451, + "learning_rate": 1.977198198279844e-05, + "loss": 2.0606, + "step": 469 + }, + { + "epoch": 0.2892085224213522, + "grad_norm": 8.590441703796387, + "learning_rate": 1.9770568971613837e-05, + "loss": 2.1106, + "step": 470 + }, + { + "epoch": 0.28982385970309976, + "grad_norm": 6.321943283081055, + "learning_rate": 1.9769151646583122e-05, + "loss": 2.0165, + "step": 471 + }, + { + "epoch": 0.2904391969848473, + "grad_norm": 7.14409065246582, + "learning_rate": 1.9767730008332062e-05, + "loss": 1.9506, + "step": 472 + }, + { + "epoch": 0.2910545342665949, + "grad_norm": 6.794196605682373, + "learning_rate": 1.9766304057488335e-05, + "loss": 2.1155, + "step": 473 + }, + { + "epoch": 0.29166987154834245, + "grad_norm": 6.779558181762695, + "learning_rate": 1.9764873794681514e-05, + "loss": 2.1093, + "step": 474 + }, + { + "epoch": 0.29228520883009, + "grad_norm": 6.708950519561768, + "learning_rate": 1.9763439220543084e-05, + "loss": 2.0918, + "step": 475 + }, + { + "epoch": 0.2929005461118376, + "grad_norm": 5.461427688598633, + "learning_rate": 1.976200033570643e-05, + "loss": 2.1408, + "step": 476 + }, + { + "epoch": 0.2935158833935851, + "grad_norm": 6.7533135414123535, + "learning_rate": 1.9760557140806838e-05, + "loss": 2.1982, + "step": 477 + }, + { + "epoch": 0.29413122067533265, + "grad_norm": 5.827248573303223, + "learning_rate": 1.97591096364815e-05, + "loss": 2.3502, + "step": 478 + }, + { + "epoch": 0.2947465579570802, + "grad_norm": 8.197000503540039, + "learning_rate": 1.9757657823369508e-05, + "loss": 1.8716, + "step": 479 + }, + { + "epoch": 0.2953618952388278, + "grad_norm": 6.4852118492126465, + "learning_rate": 1.9756201702111863e-05, + "loss": 2.0202, + "step": 480 + }, + { + "epoch": 0.29597723252057534, + "grad_norm": 6.8935723304748535, + "learning_rate": 1.975474127335146e-05, + "loss": 2.2148, + "step": 481 + }, + { + "epoch": 0.2965925698023229, + "grad_norm": 6.659445285797119, + "learning_rate": 1.97532765377331e-05, + "loss": 2.0186, + "step": 482 + }, + { + "epoch": 0.29720790708407047, + "grad_norm": 6.4208664894104, + "learning_rate": 1.9751807495903484e-05, + "loss": 2.1471, + "step": 483 + }, + { + "epoch": 0.29782324436581803, + "grad_norm": 7.608399391174316, + "learning_rate": 1.9750334148511218e-05, + "loss": 2.1458, + "step": 484 + }, + { + "epoch": 0.2984385816475656, + "grad_norm": 6.308159351348877, + "learning_rate": 1.9748856496206804e-05, + "loss": 1.8726, + "step": 485 + }, + { + "epoch": 0.29905391892931316, + "grad_norm": 6.627085208892822, + "learning_rate": 1.9747374539642643e-05, + "loss": 2.0269, + "step": 486 + }, + { + "epoch": 0.29966925621106066, + "grad_norm": 5.6046247482299805, + "learning_rate": 1.9745888279473046e-05, + "loss": 2.0686, + "step": 487 + }, + { + "epoch": 0.3002845934928082, + "grad_norm": 5.383586883544922, + "learning_rate": 1.9744397716354214e-05, + "loss": 2.02, + "step": 488 + }, + { + "epoch": 0.3008999307745558, + "grad_norm": 6.852762699127197, + "learning_rate": 1.9742902850944257e-05, + "loss": 2.2452, + "step": 489 + }, + { + "epoch": 0.30151526805630335, + "grad_norm": 6.156121253967285, + "learning_rate": 1.9741403683903172e-05, + "loss": 2.0801, + "step": 490 + }, + { + "epoch": 0.3021306053380509, + "grad_norm": 6.44775915145874, + "learning_rate": 1.973990021589287e-05, + "loss": 2.1522, + "step": 491 + }, + { + "epoch": 0.3027459426197985, + "grad_norm": 6.344707489013672, + "learning_rate": 1.9738392447577148e-05, + "loss": 2.2753, + "step": 492 + }, + { + "epoch": 0.30336127990154604, + "grad_norm": 7.356759548187256, + "learning_rate": 1.973688037962171e-05, + "loss": 2.0733, + "step": 493 + }, + { + "epoch": 0.3039766171832936, + "grad_norm": 8.45927906036377, + "learning_rate": 1.9735364012694156e-05, + "loss": 2.0687, + "step": 494 + }, + { + "epoch": 0.30459195446504117, + "grad_norm": 6.584293365478516, + "learning_rate": 1.9733843347463982e-05, + "loss": 2.1884, + "step": 495 + }, + { + "epoch": 0.30520729174678873, + "grad_norm": 8.901890754699707, + "learning_rate": 1.9732318384602587e-05, + "loss": 2.1999, + "step": 496 + }, + { + "epoch": 0.30582262902853624, + "grad_norm": 7.216164588928223, + "learning_rate": 1.973078912478326e-05, + "loss": 2.1833, + "step": 497 + }, + { + "epoch": 0.3064379663102838, + "grad_norm": 5.170838356018066, + "learning_rate": 1.972925556868119e-05, + "loss": 2.0456, + "step": 498 + }, + { + "epoch": 0.30705330359203137, + "grad_norm": 7.901139736175537, + "learning_rate": 1.972771771697347e-05, + "loss": 1.9971, + "step": 499 + }, + { + "epoch": 0.30766864087377893, + "grad_norm": 8.008712768554688, + "learning_rate": 1.972617557033908e-05, + "loss": 2.1032, + "step": 500 + }, + { + "epoch": 0.3082839781555265, + "grad_norm": 5.95906925201416, + "learning_rate": 1.97246291294589e-05, + "loss": 2.2351, + "step": 501 + }, + { + "epoch": 0.30889931543727406, + "grad_norm": 5.90054988861084, + "learning_rate": 1.9723078395015703e-05, + "loss": 2.0537, + "step": 502 + }, + { + "epoch": 0.3095146527190216, + "grad_norm": 8.347711563110352, + "learning_rate": 1.9721523367694167e-05, + "loss": 2.1036, + "step": 503 + }, + { + "epoch": 0.3101299900007692, + "grad_norm": 7.148227214813232, + "learning_rate": 1.9719964048180853e-05, + "loss": 1.8943, + "step": 504 + }, + { + "epoch": 0.31074532728251675, + "grad_norm": 6.4781365394592285, + "learning_rate": 1.9718400437164225e-05, + "loss": 2.3394, + "step": 505 + }, + { + "epoch": 0.3113606645642643, + "grad_norm": 6.061545372009277, + "learning_rate": 1.9716832535334636e-05, + "loss": 2.1535, + "step": 506 + }, + { + "epoch": 0.3119760018460118, + "grad_norm": 6.343735218048096, + "learning_rate": 1.971526034338435e-05, + "loss": 2.1222, + "step": 507 + }, + { + "epoch": 0.3125913391277594, + "grad_norm": 5.510926246643066, + "learning_rate": 1.9713683862007494e-05, + "loss": 2.0234, + "step": 508 + }, + { + "epoch": 0.31320667640950695, + "grad_norm": 6.479781627655029, + "learning_rate": 1.9712103091900122e-05, + "loss": 1.9711, + "step": 509 + }, + { + "epoch": 0.3138220136912545, + "grad_norm": 5.80315637588501, + "learning_rate": 1.9710518033760158e-05, + "loss": 2.1488, + "step": 510 + }, + { + "epoch": 0.31443735097300207, + "grad_norm": 6.781944274902344, + "learning_rate": 1.970892868828743e-05, + "loss": 2.0369, + "step": 511 + }, + { + "epoch": 0.31505268825474964, + "grad_norm": 8.231182098388672, + "learning_rate": 1.9707335056183657e-05, + "loss": 2.186, + "step": 512 + }, + { + "epoch": 0.3156680255364972, + "grad_norm": 6.895247936248779, + "learning_rate": 1.970573713815245e-05, + "loss": 2.0092, + "step": 513 + }, + { + "epoch": 0.31628336281824476, + "grad_norm": 7.921633720397949, + "learning_rate": 1.9704134934899317e-05, + "loss": 2.2858, + "step": 514 + }, + { + "epoch": 0.3168987000999923, + "grad_norm": 9.535151481628418, + "learning_rate": 1.9702528447131647e-05, + "loss": 2.2236, + "step": 515 + }, + { + "epoch": 0.3175140373817399, + "grad_norm": 6.191679954528809, + "learning_rate": 1.9700917675558733e-05, + "loss": 2.0798, + "step": 516 + }, + { + "epoch": 0.3181293746634874, + "grad_norm": 6.227905750274658, + "learning_rate": 1.969930262089175e-05, + "loss": 2.1722, + "step": 517 + }, + { + "epoch": 0.31874471194523496, + "grad_norm": 6.13393497467041, + "learning_rate": 1.969768328384377e-05, + "loss": 1.9873, + "step": 518 + }, + { + "epoch": 0.3193600492269825, + "grad_norm": 8.439413070678711, + "learning_rate": 1.969605966512975e-05, + "loss": 2.1157, + "step": 519 + }, + { + "epoch": 0.3199753865087301, + "grad_norm": 6.620768070220947, + "learning_rate": 1.9694431765466545e-05, + "loss": 2.0231, + "step": 520 + }, + { + "epoch": 0.32059072379047765, + "grad_norm": 5.9292402267456055, + "learning_rate": 1.9692799585572894e-05, + "loss": 2.1484, + "step": 521 + }, + { + "epoch": 0.3212060610722252, + "grad_norm": 6.563449382781982, + "learning_rate": 1.9691163126169428e-05, + "loss": 2.1881, + "step": 522 + }, + { + "epoch": 0.3218213983539728, + "grad_norm": 6.943025588989258, + "learning_rate": 1.9689522387978666e-05, + "loss": 1.9864, + "step": 523 + }, + { + "epoch": 0.32243673563572034, + "grad_norm": 6.0574822425842285, + "learning_rate": 1.9687877371725022e-05, + "loss": 2.0679, + "step": 524 + }, + { + "epoch": 0.3230520729174679, + "grad_norm": 6.164260387420654, + "learning_rate": 1.968622807813479e-05, + "loss": 2.1197, + "step": 525 + }, + { + "epoch": 0.32366741019921547, + "grad_norm": 6.717950820922852, + "learning_rate": 1.9684574507936155e-05, + "loss": 2.3997, + "step": 526 + }, + { + "epoch": 0.32428274748096303, + "grad_norm": 6.690700531005859, + "learning_rate": 1.9682916661859197e-05, + "loss": 2.0646, + "step": 527 + }, + { + "epoch": 0.32489808476271054, + "grad_norm": 5.969173431396484, + "learning_rate": 1.9681254540635877e-05, + "loss": 2.1142, + "step": 528 + }, + { + "epoch": 0.3255134220444581, + "grad_norm": 9.076180458068848, + "learning_rate": 1.9679588145000044e-05, + "loss": 1.7832, + "step": 529 + }, + { + "epoch": 0.32612875932620566, + "grad_norm": 8.094725608825684, + "learning_rate": 1.9677917475687435e-05, + "loss": 1.9554, + "step": 530 + }, + { + "epoch": 0.3267440966079532, + "grad_norm": 8.727152824401855, + "learning_rate": 1.967624253343568e-05, + "loss": 1.9656, + "step": 531 + }, + { + "epoch": 0.3273594338897008, + "grad_norm": 9.448356628417969, + "learning_rate": 1.9674563318984282e-05, + "loss": 1.9395, + "step": 532 + }, + { + "epoch": 0.32797477117144835, + "grad_norm": 7.393074989318848, + "learning_rate": 1.9672879833074642e-05, + "loss": 2.4237, + "step": 533 + }, + { + "epoch": 0.3285901084531959, + "grad_norm": 6.575650215148926, + "learning_rate": 1.9671192076450047e-05, + "loss": 2.0351, + "step": 534 + }, + { + "epoch": 0.3292054457349435, + "grad_norm": 6.532747745513916, + "learning_rate": 1.9669500049855657e-05, + "loss": 2.0543, + "step": 535 + }, + { + "epoch": 0.32982078301669104, + "grad_norm": 7.5740580558776855, + "learning_rate": 1.966780375403853e-05, + "loss": 2.0641, + "step": 536 + }, + { + "epoch": 0.3304361202984386, + "grad_norm": 6.163138389587402, + "learning_rate": 1.966610318974761e-05, + "loss": 2.083, + "step": 537 + }, + { + "epoch": 0.3310514575801861, + "grad_norm": 7.61068058013916, + "learning_rate": 1.9664398357733715e-05, + "loss": 2.3029, + "step": 538 + }, + { + "epoch": 0.3316667948619337, + "grad_norm": 6.8477301597595215, + "learning_rate": 1.9662689258749555e-05, + "loss": 2.1675, + "step": 539 + }, + { + "epoch": 0.33228213214368124, + "grad_norm": 6.140357971191406, + "learning_rate": 1.9660975893549722e-05, + "loss": 2.1448, + "step": 540 + }, + { + "epoch": 0.3328974694254288, + "grad_norm": 6.271470069885254, + "learning_rate": 1.9659258262890683e-05, + "loss": 2.037, + "step": 541 + }, + { + "epoch": 0.33351280670717637, + "grad_norm": 5.54105281829834, + "learning_rate": 1.965753636753081e-05, + "loss": 2.0499, + "step": 542 + }, + { + "epoch": 0.33412814398892393, + "grad_norm": 8.30624771118164, + "learning_rate": 1.9655810208230334e-05, + "loss": 1.9859, + "step": 543 + }, + { + "epoch": 0.3347434812706715, + "grad_norm": 7.357904434204102, + "learning_rate": 1.9654079785751385e-05, + "loss": 2.2433, + "step": 544 + }, + { + "epoch": 0.33535881855241906, + "grad_norm": 5.205164432525635, + "learning_rate": 1.9652345100857966e-05, + "loss": 2.2067, + "step": 545 + }, + { + "epoch": 0.3359741558341666, + "grad_norm": 7.190614223480225, + "learning_rate": 1.9650606154315967e-05, + "loss": 2.3405, + "step": 546 + }, + { + "epoch": 0.3365894931159142, + "grad_norm": 6.556515693664551, + "learning_rate": 1.9648862946893158e-05, + "loss": 2.2161, + "step": 547 + }, + { + "epoch": 0.3372048303976617, + "grad_norm": 8.148612022399902, + "learning_rate": 1.964711547935919e-05, + "loss": 2.1046, + "step": 548 + }, + { + "epoch": 0.33782016767940926, + "grad_norm": 6.504887580871582, + "learning_rate": 1.9645363752485594e-05, + "loss": 1.9156, + "step": 549 + }, + { + "epoch": 0.3384355049611568, + "grad_norm": 7.525837421417236, + "learning_rate": 1.9643607767045778e-05, + "loss": 2.1825, + "step": 550 + }, + { + "epoch": 0.3390508422429044, + "grad_norm": 7.36214017868042, + "learning_rate": 1.964184752381504e-05, + "loss": 2.3588, + "step": 551 + }, + { + "epoch": 0.33966617952465195, + "grad_norm": 6.133991241455078, + "learning_rate": 1.964008302357056e-05, + "loss": 2.0536, + "step": 552 + }, + { + "epoch": 0.3402815168063995, + "grad_norm": 7.714048862457275, + "learning_rate": 1.963831426709138e-05, + "loss": 2.0789, + "step": 553 + }, + { + "epoch": 0.34089685408814707, + "grad_norm": 8.27047061920166, + "learning_rate": 1.963654125515843e-05, + "loss": 2.0026, + "step": 554 + }, + { + "epoch": 0.34151219136989464, + "grad_norm": 5.873971939086914, + "learning_rate": 1.963476398855452e-05, + "loss": 2.2546, + "step": 555 + }, + { + "epoch": 0.3421275286516422, + "grad_norm": 5.798715114593506, + "learning_rate": 1.963298246806435e-05, + "loss": 2.1605, + "step": 556 + }, + { + "epoch": 0.34274286593338976, + "grad_norm": 5.822642803192139, + "learning_rate": 1.9631196694474475e-05, + "loss": 2.1069, + "step": 557 + }, + { + "epoch": 0.34335820321513727, + "grad_norm": 6.862981796264648, + "learning_rate": 1.9629406668573346e-05, + "loss": 1.9567, + "step": 558 + }, + { + "epoch": 0.34397354049688483, + "grad_norm": 6.417773246765137, + "learning_rate": 1.9627612391151278e-05, + "loss": 2.1257, + "step": 559 + }, + { + "epoch": 0.3445888777786324, + "grad_norm": 6.440151214599609, + "learning_rate": 1.9625813863000477e-05, + "loss": 2.0136, + "step": 560 + }, + { + "epoch": 0.34520421506037996, + "grad_norm": 5.9266767501831055, + "learning_rate": 1.962401108491502e-05, + "loss": 2.041, + "step": 561 + }, + { + "epoch": 0.3458195523421275, + "grad_norm": 8.566217422485352, + "learning_rate": 1.9622204057690856e-05, + "loss": 2.1334, + "step": 562 + }, + { + "epoch": 0.3464348896238751, + "grad_norm": 6.137258529663086, + "learning_rate": 1.962039278212581e-05, + "loss": 1.9027, + "step": 563 + }, + { + "epoch": 0.34705022690562265, + "grad_norm": 5.238780975341797, + "learning_rate": 1.9618577259019595e-05, + "loss": 2.2347, + "step": 564 + }, + { + "epoch": 0.3476655641873702, + "grad_norm": 6.760177135467529, + "learning_rate": 1.961675748917378e-05, + "loss": 1.9923, + "step": 565 + }, + { + "epoch": 0.3482809014691178, + "grad_norm": 9.06141471862793, + "learning_rate": 1.9614933473391825e-05, + "loss": 2.2409, + "step": 566 + }, + { + "epoch": 0.34889623875086534, + "grad_norm": 6.686290740966797, + "learning_rate": 1.961310521247906e-05, + "loss": 2.2489, + "step": 567 + }, + { + "epoch": 0.3495115760326129, + "grad_norm": 7.451016426086426, + "learning_rate": 1.961127270724269e-05, + "loss": 2.0118, + "step": 568 + }, + { + "epoch": 0.3501269133143604, + "grad_norm": 7.377191066741943, + "learning_rate": 1.9609435958491785e-05, + "loss": 2.0321, + "step": 569 + }, + { + "epoch": 0.350742250596108, + "grad_norm": 5.707620620727539, + "learning_rate": 1.96075949670373e-05, + "loss": 2.0791, + "step": 570 + }, + { + "epoch": 0.35135758787785554, + "grad_norm": 6.476554870605469, + "learning_rate": 1.9605749733692063e-05, + "loss": 2.1532, + "step": 571 + }, + { + "epoch": 0.3519729251596031, + "grad_norm": 6.502229690551758, + "learning_rate": 1.9603900259270765e-05, + "loss": 2.1074, + "step": 572 + }, + { + "epoch": 0.35258826244135066, + "grad_norm": 5.209996223449707, + "learning_rate": 1.960204654458998e-05, + "loss": 2.1551, + "step": 573 + }, + { + "epoch": 0.3532035997230982, + "grad_norm": 5.662802696228027, + "learning_rate": 1.9600188590468144e-05, + "loss": 2.1909, + "step": 574 + }, + { + "epoch": 0.3538189370048458, + "grad_norm": 7.717959403991699, + "learning_rate": 1.9598326397725577e-05, + "loss": 2.076, + "step": 575 + }, + { + "epoch": 0.35443427428659335, + "grad_norm": 6.848892688751221, + "learning_rate": 1.9596459967184458e-05, + "loss": 2.0946, + "step": 576 + }, + { + "epoch": 0.3550496115683409, + "grad_norm": 7.038410186767578, + "learning_rate": 1.9594589299668846e-05, + "loss": 2.0521, + "step": 577 + }, + { + "epoch": 0.3556649488500885, + "grad_norm": 5.778482913970947, + "learning_rate": 1.9592714396004665e-05, + "loss": 2.0643, + "step": 578 + }, + { + "epoch": 0.356280286131836, + "grad_norm": 6.244689464569092, + "learning_rate": 1.9590835257019715e-05, + "loss": 2.293, + "step": 579 + }, + { + "epoch": 0.35689562341358355, + "grad_norm": 5.427659511566162, + "learning_rate": 1.958895188354366e-05, + "loss": 2.0774, + "step": 580 + }, + { + "epoch": 0.3575109606953311, + "grad_norm": 5.845086574554443, + "learning_rate": 1.9587064276408043e-05, + "loss": 1.95, + "step": 581 + }, + { + "epoch": 0.3581262979770787, + "grad_norm": 6.497982501983643, + "learning_rate": 1.958517243644626e-05, + "loss": 2.1245, + "step": 582 + }, + { + "epoch": 0.35874163525882624, + "grad_norm": 6.696561813354492, + "learning_rate": 1.958327636449359e-05, + "loss": 1.8858, + "step": 583 + }, + { + "epoch": 0.3593569725405738, + "grad_norm": 9.36977767944336, + "learning_rate": 1.9581376061387174e-05, + "loss": 2.1842, + "step": 584 + }, + { + "epoch": 0.35997230982232137, + "grad_norm": 6.4523773193359375, + "learning_rate": 1.9579471527966026e-05, + "loss": 2.1049, + "step": 585 + }, + { + "epoch": 0.36058764710406893, + "grad_norm": 5.803147315979004, + "learning_rate": 1.9577562765071024e-05, + "loss": 2.0309, + "step": 586 + }, + { + "epoch": 0.3612029843858165, + "grad_norm": 7.218697547912598, + "learning_rate": 1.9575649773544914e-05, + "loss": 2.1607, + "step": 587 + }, + { + "epoch": 0.36181832166756406, + "grad_norm": 5.549620151519775, + "learning_rate": 1.957373255423231e-05, + "loss": 2.1316, + "step": 588 + }, + { + "epoch": 0.36243365894931157, + "grad_norm": 6.056726455688477, + "learning_rate": 1.9571811107979687e-05, + "loss": 2.1097, + "step": 589 + }, + { + "epoch": 0.36304899623105913, + "grad_norm": 6.990708351135254, + "learning_rate": 1.956988543563539e-05, + "loss": 2.0171, + "step": 590 + }, + { + "epoch": 0.3636643335128067, + "grad_norm": 6.121316432952881, + "learning_rate": 1.9567955538049643e-05, + "loss": 2.244, + "step": 591 + }, + { + "epoch": 0.36427967079455426, + "grad_norm": 6.361142158508301, + "learning_rate": 1.9566021416074513e-05, + "loss": 2.2806, + "step": 592 + }, + { + "epoch": 0.3648950080763018, + "grad_norm": 7.428301811218262, + "learning_rate": 1.9564083070563948e-05, + "loss": 2.1023, + "step": 593 + }, + { + "epoch": 0.3655103453580494, + "grad_norm": 5.722786903381348, + "learning_rate": 1.9562140502373753e-05, + "loss": 2.0652, + "step": 594 + }, + { + "epoch": 0.36612568263979695, + "grad_norm": 5.978518486022949, + "learning_rate": 1.9560193712361597e-05, + "loss": 2.0651, + "step": 595 + }, + { + "epoch": 0.3667410199215445, + "grad_norm": 7.357931137084961, + "learning_rate": 1.955824270138702e-05, + "loss": 2.1884, + "step": 596 + }, + { + "epoch": 0.36735635720329207, + "grad_norm": 6.015055179595947, + "learning_rate": 1.9556287470311418e-05, + "loss": 2.0952, + "step": 597 + }, + { + "epoch": 0.36797169448503964, + "grad_norm": 6.024143218994141, + "learning_rate": 1.9554328019998055e-05, + "loss": 2.0912, + "step": 598 + }, + { + "epoch": 0.36858703176678714, + "grad_norm": 9.52488899230957, + "learning_rate": 1.9552364351312056e-05, + "loss": 2.0105, + "step": 599 + }, + { + "epoch": 0.3692023690485347, + "grad_norm": 5.382646083831787, + "learning_rate": 1.955039646512041e-05, + "loss": 1.9708, + "step": 600 + }, + { + "epoch": 0.36981770633028227, + "grad_norm": 6.482569217681885, + "learning_rate": 1.9548424362291963e-05, + "loss": 2.3013, + "step": 601 + }, + { + "epoch": 0.37043304361202983, + "grad_norm": 6.049602508544922, + "learning_rate": 1.954644804369743e-05, + "loss": 1.9721, + "step": 602 + }, + { + "epoch": 0.3710483808937774, + "grad_norm": 5.909185886383057, + "learning_rate": 1.954446751020939e-05, + "loss": 2.1651, + "step": 603 + }, + { + "epoch": 0.37166371817552496, + "grad_norm": 4.999292850494385, + "learning_rate": 1.9542482762702264e-05, + "loss": 1.8775, + "step": 604 + }, + { + "epoch": 0.3722790554572725, + "grad_norm": 8.2312650680542, + "learning_rate": 1.9540493802052355e-05, + "loss": 1.9429, + "step": 605 + }, + { + "epoch": 0.3728943927390201, + "grad_norm": 6.001389026641846, + "learning_rate": 1.9538500629137814e-05, + "loss": 2.0537, + "step": 606 + }, + { + "epoch": 0.37350973002076765, + "grad_norm": 7.899374008178711, + "learning_rate": 1.953650324483866e-05, + "loss": 2.0021, + "step": 607 + }, + { + "epoch": 0.3741250673025152, + "grad_norm": 7.307419300079346, + "learning_rate": 1.9534501650036764e-05, + "loss": 2.2674, + "step": 608 + }, + { + "epoch": 0.3747404045842628, + "grad_norm": 7.11273717880249, + "learning_rate": 1.9532495845615854e-05, + "loss": 2.3332, + "step": 609 + }, + { + "epoch": 0.3753557418660103, + "grad_norm": 6.872147560119629, + "learning_rate": 1.953048583246153e-05, + "loss": 2.1161, + "step": 610 + }, + { + "epoch": 0.37597107914775785, + "grad_norm": 6.160400390625, + "learning_rate": 1.9528471611461235e-05, + "loss": 2.1318, + "step": 611 + }, + { + "epoch": 0.3765864164295054, + "grad_norm": 5.950971603393555, + "learning_rate": 1.952645318350428e-05, + "loss": 2.3417, + "step": 612 + }, + { + "epoch": 0.377201753711253, + "grad_norm": 8.005498886108398, + "learning_rate": 1.9524430549481827e-05, + "loss": 2.0128, + "step": 613 + }, + { + "epoch": 0.37781709099300054, + "grad_norm": 7.3835039138793945, + "learning_rate": 1.9522403710286904e-05, + "loss": 2.2003, + "step": 614 + }, + { + "epoch": 0.3784324282747481, + "grad_norm": 6.300394058227539, + "learning_rate": 1.952037266681438e-05, + "loss": 2.0152, + "step": 615 + }, + { + "epoch": 0.37904776555649566, + "grad_norm": 5.8235859870910645, + "learning_rate": 1.9518337419961e-05, + "loss": 2.1279, + "step": 616 + }, + { + "epoch": 0.3796631028382432, + "grad_norm": 7.469605445861816, + "learning_rate": 1.9516297970625344e-05, + "loss": 2.1297, + "step": 617 + }, + { + "epoch": 0.3802784401199908, + "grad_norm": 6.566958904266357, + "learning_rate": 1.9514254319707868e-05, + "loss": 2.2407, + "step": 618 + }, + { + "epoch": 0.38089377740173835, + "grad_norm": 5.8921990394592285, + "learning_rate": 1.9512206468110863e-05, + "loss": 1.9459, + "step": 619 + }, + { + "epoch": 0.38150911468348586, + "grad_norm": 6.139562129974365, + "learning_rate": 1.9510154416738495e-05, + "loss": 2.1876, + "step": 620 + }, + { + "epoch": 0.3821244519652334, + "grad_norm": 6.216349124908447, + "learning_rate": 1.950809816649677e-05, + "loss": 2.0235, + "step": 621 + }, + { + "epoch": 0.382739789246981, + "grad_norm": 6.2589640617370605, + "learning_rate": 1.950603771829355e-05, + "loss": 2.1186, + "step": 622 + }, + { + "epoch": 0.38335512652872855, + "grad_norm": 7.359731197357178, + "learning_rate": 1.9503973073038554e-05, + "loss": 2.0738, + "step": 623 + }, + { + "epoch": 0.3839704638104761, + "grad_norm": 6.516498565673828, + "learning_rate": 1.9501904231643353e-05, + "loss": 2.1325, + "step": 624 + }, + { + "epoch": 0.3845858010922237, + "grad_norm": 8.067529678344727, + "learning_rate": 1.949983119502137e-05, + "loss": 2.2291, + "step": 625 + }, + { + "epoch": 0.38520113837397124, + "grad_norm": 7.105299472808838, + "learning_rate": 1.949775396408788e-05, + "loss": 2.0821, + "step": 626 + }, + { + "epoch": 0.3858164756557188, + "grad_norm": 7.130588054656982, + "learning_rate": 1.949567253976001e-05, + "loss": 2.1605, + "step": 627 + }, + { + "epoch": 0.38643181293746637, + "grad_norm": 6.0563154220581055, + "learning_rate": 1.9493586922956736e-05, + "loss": 2.2167, + "step": 628 + }, + { + "epoch": 0.38704715021921393, + "grad_norm": 9.021821975708008, + "learning_rate": 1.9491497114598892e-05, + "loss": 2.4192, + "step": 629 + }, + { + "epoch": 0.38766248750096144, + "grad_norm": 7.077023983001709, + "learning_rate": 1.9489403115609157e-05, + "loss": 1.8631, + "step": 630 + }, + { + "epoch": 0.388277824782709, + "grad_norm": 7.049808502197266, + "learning_rate": 1.948730492691206e-05, + "loss": 2.0149, + "step": 631 + }, + { + "epoch": 0.38889316206445657, + "grad_norm": 6.801271915435791, + "learning_rate": 1.9485202549433986e-05, + "loss": 2.2389, + "step": 632 + }, + { + "epoch": 0.38950849934620413, + "grad_norm": 6.658502101898193, + "learning_rate": 1.948309598410316e-05, + "loss": 1.9566, + "step": 633 + }, + { + "epoch": 0.3901238366279517, + "grad_norm": 6.527298927307129, + "learning_rate": 1.948098523184966e-05, + "loss": 2.238, + "step": 634 + }, + { + "epoch": 0.39073917390969926, + "grad_norm": 6.2011399269104, + "learning_rate": 1.9478870293605416e-05, + "loss": 2.0912, + "step": 635 + }, + { + "epoch": 0.3913545111914468, + "grad_norm": 6.1173415184021, + "learning_rate": 1.9476751170304206e-05, + "loss": 2.1356, + "step": 636 + }, + { + "epoch": 0.3919698484731944, + "grad_norm": 6.2792134284973145, + "learning_rate": 1.9474627862881644e-05, + "loss": 2.1088, + "step": 637 + }, + { + "epoch": 0.39258518575494195, + "grad_norm": 5.7378830909729, + "learning_rate": 1.9472500372275212e-05, + "loss": 2.1791, + "step": 638 + }, + { + "epoch": 0.3932005230366895, + "grad_norm": 6.007324695587158, + "learning_rate": 1.947036869942422e-05, + "loss": 2.0437, + "step": 639 + }, + { + "epoch": 0.393815860318437, + "grad_norm": 6.859370708465576, + "learning_rate": 1.9468232845269837e-05, + "loss": 2.065, + "step": 640 + }, + { + "epoch": 0.3944311976001846, + "grad_norm": 7.909273147583008, + "learning_rate": 1.946609281075507e-05, + "loss": 2.0249, + "step": 641 + }, + { + "epoch": 0.39504653488193214, + "grad_norm": 6.309966564178467, + "learning_rate": 1.9463948596824774e-05, + "loss": 2.1516, + "step": 642 + }, + { + "epoch": 0.3956618721636797, + "grad_norm": 7.858499526977539, + "learning_rate": 1.9461800204425653e-05, + "loss": 1.9052, + "step": 643 + }, + { + "epoch": 0.39627720944542727, + "grad_norm": 6.322279930114746, + "learning_rate": 1.9459647634506253e-05, + "loss": 1.9562, + "step": 644 + }, + { + "epoch": 0.39689254672717483, + "grad_norm": 6.176844596862793, + "learning_rate": 1.9457490888016962e-05, + "loss": 2.3734, + "step": 645 + }, + { + "epoch": 0.3975078840089224, + "grad_norm": 6.733726501464844, + "learning_rate": 1.9455329965910016e-05, + "loss": 2.0876, + "step": 646 + }, + { + "epoch": 0.39812322129066996, + "grad_norm": 6.4710917472839355, + "learning_rate": 1.9453164869139488e-05, + "loss": 2.1578, + "step": 647 + }, + { + "epoch": 0.3987385585724175, + "grad_norm": 6.76784086227417, + "learning_rate": 1.945099559866131e-05, + "loss": 2.1506, + "step": 648 + }, + { + "epoch": 0.3993538958541651, + "grad_norm": 8.631237030029297, + "learning_rate": 1.9448822155433235e-05, + "loss": 2.2725, + "step": 649 + }, + { + "epoch": 0.39996923313591265, + "grad_norm": 8.151426315307617, + "learning_rate": 1.9446644540414882e-05, + "loss": 2.0255, + "step": 650 + }, + { + "epoch": 0.40058457041766016, + "grad_norm": 7.900586128234863, + "learning_rate": 1.9444462754567682e-05, + "loss": 1.955, + "step": 651 + }, + { + "epoch": 0.4011999076994077, + "grad_norm": 6.347893714904785, + "learning_rate": 1.944227679885494e-05, + "loss": 2.1448, + "step": 652 + }, + { + "epoch": 0.4018152449811553, + "grad_norm": 6.414784908294678, + "learning_rate": 1.944008667424178e-05, + "loss": 2.193, + "step": 653 + }, + { + "epoch": 0.40243058226290285, + "grad_norm": 7.849375247955322, + "learning_rate": 1.9437892381695174e-05, + "loss": 2.068, + "step": 654 + }, + { + "epoch": 0.4030459195446504, + "grad_norm": 7.642889976501465, + "learning_rate": 1.9435693922183935e-05, + "loss": 2.0517, + "step": 655 + }, + { + "epoch": 0.403661256826398, + "grad_norm": 6.006908416748047, + "learning_rate": 1.9433491296678712e-05, + "loss": 2.355, + "step": 656 + }, + { + "epoch": 0.40427659410814554, + "grad_norm": 6.41367244720459, + "learning_rate": 1.9431284506151996e-05, + "loss": 2.058, + "step": 657 + }, + { + "epoch": 0.4048919313898931, + "grad_norm": 6.3076324462890625, + "learning_rate": 1.942907355157812e-05, + "loss": 2.1955, + "step": 658 + }, + { + "epoch": 0.40550726867164066, + "grad_norm": 7.7937846183776855, + "learning_rate": 1.9426858433933248e-05, + "loss": 2.0888, + "step": 659 + }, + { + "epoch": 0.4061226059533882, + "grad_norm": 6.8000969886779785, + "learning_rate": 1.9424639154195386e-05, + "loss": 2.0332, + "step": 660 + }, + { + "epoch": 0.40673794323513573, + "grad_norm": 5.666341304779053, + "learning_rate": 1.9422415713344382e-05, + "loss": 2.1198, + "step": 661 + }, + { + "epoch": 0.4073532805168833, + "grad_norm": 6.546113967895508, + "learning_rate": 1.9420188112361914e-05, + "loss": 2.0452, + "step": 662 + }, + { + "epoch": 0.40796861779863086, + "grad_norm": 5.356297016143799, + "learning_rate": 1.94179563522315e-05, + "loss": 2.1616, + "step": 663 + }, + { + "epoch": 0.4085839550803784, + "grad_norm": 8.680123329162598, + "learning_rate": 1.9415720433938495e-05, + "loss": 2.2552, + "step": 664 + }, + { + "epoch": 0.409199292362126, + "grad_norm": 8.679762840270996, + "learning_rate": 1.9413480358470088e-05, + "loss": 2.0614, + "step": 665 + }, + { + "epoch": 0.40981462964387355, + "grad_norm": 5.595808029174805, + "learning_rate": 1.9411236126815306e-05, + "loss": 2.3155, + "step": 666 + }, + { + "epoch": 0.4104299669256211, + "grad_norm": 5.766740798950195, + "learning_rate": 1.9408987739965006e-05, + "loss": 2.0765, + "step": 667 + }, + { + "epoch": 0.4110453042073687, + "grad_norm": 6.245460510253906, + "learning_rate": 1.9406735198911885e-05, + "loss": 2.3289, + "step": 668 + }, + { + "epoch": 0.41166064148911624, + "grad_norm": 6.1597981452941895, + "learning_rate": 1.9404478504650473e-05, + "loss": 2.1536, + "step": 669 + }, + { + "epoch": 0.4122759787708638, + "grad_norm": 6.202880382537842, + "learning_rate": 1.940221765817713e-05, + "loss": 2.1758, + "step": 670 + }, + { + "epoch": 0.4128913160526113, + "grad_norm": 6.37293004989624, + "learning_rate": 1.9399952660490057e-05, + "loss": 2.2813, + "step": 671 + }, + { + "epoch": 0.4135066533343589, + "grad_norm": 6.769295692443848, + "learning_rate": 1.9397683512589282e-05, + "loss": 1.9846, + "step": 672 + }, + { + "epoch": 0.41412199061610644, + "grad_norm": 6.569505214691162, + "learning_rate": 1.9395410215476662e-05, + "loss": 2.1312, + "step": 673 + }, + { + "epoch": 0.414737327897854, + "grad_norm": 6.9404826164245605, + "learning_rate": 1.939313277015589e-05, + "loss": 2.1604, + "step": 674 + }, + { + "epoch": 0.41535266517960157, + "grad_norm": 7.606903076171875, + "learning_rate": 1.9390851177632496e-05, + "loss": 2.2779, + "step": 675 + }, + { + "epoch": 0.41596800246134913, + "grad_norm": 6.346901893615723, + "learning_rate": 1.9388565438913832e-05, + "loss": 2.1305, + "step": 676 + }, + { + "epoch": 0.4165833397430967, + "grad_norm": 5.594636917114258, + "learning_rate": 1.938627555500909e-05, + "loss": 2.0227, + "step": 677 + }, + { + "epoch": 0.41719867702484426, + "grad_norm": 5.93821907043457, + "learning_rate": 1.9383981526929277e-05, + "loss": 1.8945, + "step": 678 + }, + { + "epoch": 0.4178140143065918, + "grad_norm": 7.389327049255371, + "learning_rate": 1.9381683355687245e-05, + "loss": 1.934, + "step": 679 + }, + { + "epoch": 0.4184293515883394, + "grad_norm": 5.95435094833374, + "learning_rate": 1.937938104229767e-05, + "loss": 2.0394, + "step": 680 + }, + { + "epoch": 0.4190446888700869, + "grad_norm": 7.808548927307129, + "learning_rate": 1.937707458777705e-05, + "loss": 1.9812, + "step": 681 + }, + { + "epoch": 0.41966002615183445, + "grad_norm": 6.301791667938232, + "learning_rate": 1.9374763993143727e-05, + "loss": 2.1281, + "step": 682 + }, + { + "epoch": 0.420275363433582, + "grad_norm": 6.1903204917907715, + "learning_rate": 1.937244925941786e-05, + "loss": 2.0276, + "step": 683 + }, + { + "epoch": 0.4208907007153296, + "grad_norm": 6.277788162231445, + "learning_rate": 1.937013038762143e-05, + "loss": 2.1959, + "step": 684 + }, + { + "epoch": 0.42150603799707714, + "grad_norm": 5.987131118774414, + "learning_rate": 1.9367807378778257e-05, + "loss": 2.0035, + "step": 685 + }, + { + "epoch": 0.4221213752788247, + "grad_norm": 7.882297515869141, + "learning_rate": 1.9365480233913984e-05, + "loss": 2.1935, + "step": 686 + }, + { + "epoch": 0.42273671256057227, + "grad_norm": 6.925199508666992, + "learning_rate": 1.9363148954056077e-05, + "loss": 2.0481, + "step": 687 + }, + { + "epoch": 0.42335204984231983, + "grad_norm": 6.69683313369751, + "learning_rate": 1.936081354023383e-05, + "loss": 2.0139, + "step": 688 + }, + { + "epoch": 0.4239673871240674, + "grad_norm": 6.429065704345703, + "learning_rate": 1.9358473993478358e-05, + "loss": 2.2319, + "step": 689 + }, + { + "epoch": 0.42458272440581496, + "grad_norm": 7.351758003234863, + "learning_rate": 1.935613031482261e-05, + "loss": 2.1133, + "step": 690 + }, + { + "epoch": 0.42519806168756247, + "grad_norm": 6.553668975830078, + "learning_rate": 1.9353782505301352e-05, + "loss": 2.1859, + "step": 691 + }, + { + "epoch": 0.42581339896931003, + "grad_norm": 6.174842834472656, + "learning_rate": 1.9351430565951174e-05, + "loss": 2.0647, + "step": 692 + }, + { + "epoch": 0.4264287362510576, + "grad_norm": 5.904472827911377, + "learning_rate": 1.934907449781049e-05, + "loss": 2.2981, + "step": 693 + }, + { + "epoch": 0.42704407353280516, + "grad_norm": 6.566816329956055, + "learning_rate": 1.934671430191954e-05, + "loss": 2.1774, + "step": 694 + }, + { + "epoch": 0.4276594108145527, + "grad_norm": 6.399332523345947, + "learning_rate": 1.9344349979320386e-05, + "loss": 1.9871, + "step": 695 + }, + { + "epoch": 0.4282747480963003, + "grad_norm": 6.084597587585449, + "learning_rate": 1.9341981531056905e-05, + "loss": 2.0811, + "step": 696 + }, + { + "epoch": 0.42889008537804785, + "grad_norm": 6.012515544891357, + "learning_rate": 1.9339608958174806e-05, + "loss": 1.9485, + "step": 697 + }, + { + "epoch": 0.4295054226597954, + "grad_norm": 6.579845428466797, + "learning_rate": 1.933723226172161e-05, + "loss": 2.0216, + "step": 698 + }, + { + "epoch": 0.430120759941543, + "grad_norm": 6.680030822753906, + "learning_rate": 1.9334851442746665e-05, + "loss": 1.9808, + "step": 699 + }, + { + "epoch": 0.43073609722329054, + "grad_norm": 6.242409706115723, + "learning_rate": 1.9332466502301134e-05, + "loss": 2.0912, + "step": 700 + }, + { + "epoch": 0.4313514345050381, + "grad_norm": 6.666229724884033, + "learning_rate": 1.9330077441438007e-05, + "loss": 2.2028, + "step": 701 + }, + { + "epoch": 0.4319667717867856, + "grad_norm": 5.692910671234131, + "learning_rate": 1.9327684261212084e-05, + "loss": 1.9945, + "step": 702 + }, + { + "epoch": 0.43258210906853317, + "grad_norm": 7.141720294952393, + "learning_rate": 1.9325286962679993e-05, + "loss": 2.0688, + "step": 703 + }, + { + "epoch": 0.43319744635028073, + "grad_norm": 6.733546733856201, + "learning_rate": 1.932288554690017e-05, + "loss": 1.9228, + "step": 704 + }, + { + "epoch": 0.4338127836320283, + "grad_norm": 5.67270040512085, + "learning_rate": 1.932048001493288e-05, + "loss": 2.0107, + "step": 705 + }, + { + "epoch": 0.43442812091377586, + "grad_norm": 7.053188323974609, + "learning_rate": 1.9318070367840197e-05, + "loss": 2.2279, + "step": 706 + }, + { + "epoch": 0.4350434581955234, + "grad_norm": 5.633090972900391, + "learning_rate": 1.9315656606686012e-05, + "loss": 2.1181, + "step": 707 + }, + { + "epoch": 0.435658795477271, + "grad_norm": 9.276235580444336, + "learning_rate": 1.931323873253604e-05, + "loss": 2.1589, + "step": 708 + }, + { + "epoch": 0.43627413275901855, + "grad_norm": 6.924360275268555, + "learning_rate": 1.9310816746457802e-05, + "loss": 2.021, + "step": 709 + }, + { + "epoch": 0.4368894700407661, + "grad_norm": 6.993905067443848, + "learning_rate": 1.9308390649520645e-05, + "loss": 2.141, + "step": 710 + }, + { + "epoch": 0.4375048073225137, + "grad_norm": 6.010927200317383, + "learning_rate": 1.930596044279572e-05, + "loss": 2.1264, + "step": 711 + }, + { + "epoch": 0.4381201446042612, + "grad_norm": 7.483770847320557, + "learning_rate": 1.9303526127356008e-05, + "loss": 2.1553, + "step": 712 + }, + { + "epoch": 0.43873548188600875, + "grad_norm": 7.473067283630371, + "learning_rate": 1.9301087704276283e-05, + "loss": 2.058, + "step": 713 + }, + { + "epoch": 0.4393508191677563, + "grad_norm": 6.614870071411133, + "learning_rate": 1.9298645174633147e-05, + "loss": 2.2756, + "step": 714 + }, + { + "epoch": 0.4399661564495039, + "grad_norm": 6.784712314605713, + "learning_rate": 1.9296198539505013e-05, + "loss": 2.2214, + "step": 715 + }, + { + "epoch": 0.44058149373125144, + "grad_norm": 6.582732200622559, + "learning_rate": 1.9293747799972108e-05, + "loss": 2.0439, + "step": 716 + }, + { + "epoch": 0.441196831012999, + "grad_norm": 7.454766750335693, + "learning_rate": 1.929129295711646e-05, + "loss": 2.0716, + "step": 717 + }, + { + "epoch": 0.44181216829474657, + "grad_norm": 6.434202194213867, + "learning_rate": 1.928883401202193e-05, + "loss": 1.9267, + "step": 718 + }, + { + "epoch": 0.44242750557649413, + "grad_norm": 6.148949146270752, + "learning_rate": 1.9286370965774166e-05, + "loss": 2.277, + "step": 719 + }, + { + "epoch": 0.4430428428582417, + "grad_norm": 6.419177055358887, + "learning_rate": 1.9283903819460644e-05, + "loss": 2.0848, + "step": 720 + }, + { + "epoch": 0.44365818013998926, + "grad_norm": 7.307488918304443, + "learning_rate": 1.9281432574170644e-05, + "loss": 2.3593, + "step": 721 + }, + { + "epoch": 0.44427351742173676, + "grad_norm": 6.170140266418457, + "learning_rate": 1.927895723099525e-05, + "loss": 2.3641, + "step": 722 + }, + { + "epoch": 0.4448888547034843, + "grad_norm": 5.1873955726623535, + "learning_rate": 1.9276477791027374e-05, + "loss": 2.1883, + "step": 723 + }, + { + "epoch": 0.4455041919852319, + "grad_norm": 7.554636001586914, + "learning_rate": 1.9273994255361717e-05, + "loss": 2.1133, + "step": 724 + }, + { + "epoch": 0.44611952926697945, + "grad_norm": 6.975783824920654, + "learning_rate": 1.927150662509479e-05, + "loss": 2.0336, + "step": 725 + }, + { + "epoch": 0.446734866548727, + "grad_norm": 10.457385063171387, + "learning_rate": 1.9269014901324926e-05, + "loss": 2.2527, + "step": 726 + }, + { + "epoch": 0.4473502038304746, + "grad_norm": 6.454582691192627, + "learning_rate": 1.9266519085152254e-05, + "loss": 2.1109, + "step": 727 + }, + { + "epoch": 0.44796554111222214, + "grad_norm": 6.664186000823975, + "learning_rate": 1.9264019177678712e-05, + "loss": 2.0341, + "step": 728 + }, + { + "epoch": 0.4485808783939697, + "grad_norm": 5.278791427612305, + "learning_rate": 1.9261515180008047e-05, + "loss": 2.0725, + "step": 729 + }, + { + "epoch": 0.44919621567571727, + "grad_norm": 7.052336692810059, + "learning_rate": 1.9259007093245804e-05, + "loss": 2.2265, + "step": 730 + }, + { + "epoch": 0.44981155295746483, + "grad_norm": 9.757405281066895, + "learning_rate": 1.9256494918499348e-05, + "loss": 2.4089, + "step": 731 + }, + { + "epoch": 0.45042689023921234, + "grad_norm": 6.563255310058594, + "learning_rate": 1.925397865687783e-05, + "loss": 2.029, + "step": 732 + }, + { + "epoch": 0.4510422275209599, + "grad_norm": 5.596587181091309, + "learning_rate": 1.9251458309492227e-05, + "loss": 1.9007, + "step": 733 + }, + { + "epoch": 0.45165756480270747, + "grad_norm": 6.464388847351074, + "learning_rate": 1.9248933877455296e-05, + "loss": 2.1196, + "step": 734 + }, + { + "epoch": 0.45227290208445503, + "grad_norm": 6.56611442565918, + "learning_rate": 1.9246405361881623e-05, + "loss": 2.0849, + "step": 735 + }, + { + "epoch": 0.4528882393662026, + "grad_norm": 8.356101989746094, + "learning_rate": 1.9243872763887574e-05, + "loss": 1.9161, + "step": 736 + }, + { + "epoch": 0.45350357664795016, + "grad_norm": 5.548551559448242, + "learning_rate": 1.9241336084591327e-05, + "loss": 1.9741, + "step": 737 + }, + { + "epoch": 0.4541189139296977, + "grad_norm": 5.744799613952637, + "learning_rate": 1.9238795325112867e-05, + "loss": 2.094, + "step": 738 + }, + { + "epoch": 0.4547342512114453, + "grad_norm": 7.624112129211426, + "learning_rate": 1.9236250486573978e-05, + "loss": 2.0583, + "step": 739 + }, + { + "epoch": 0.45534958849319285, + "grad_norm": 5.396349906921387, + "learning_rate": 1.9233701570098237e-05, + "loss": 2.1334, + "step": 740 + }, + { + "epoch": 0.4559649257749404, + "grad_norm": 5.835684299468994, + "learning_rate": 1.923114857681103e-05, + "loss": 2.218, + "step": 741 + }, + { + "epoch": 0.456580263056688, + "grad_norm": 9.1834135055542, + "learning_rate": 1.9228591507839538e-05, + "loss": 2.3377, + "step": 742 + }, + { + "epoch": 0.4571956003384355, + "grad_norm": 8.384282112121582, + "learning_rate": 1.9226030364312747e-05, + "loss": 2.0626, + "step": 743 + }, + { + "epoch": 0.45781093762018304, + "grad_norm": 6.535480976104736, + "learning_rate": 1.9223465147361433e-05, + "loss": 2.1988, + "step": 744 + }, + { + "epoch": 0.4584262749019306, + "grad_norm": 6.0138258934021, + "learning_rate": 1.922089585811818e-05, + "loss": 2.071, + "step": 745 + }, + { + "epoch": 0.45904161218367817, + "grad_norm": 6.317763328552246, + "learning_rate": 1.921832249771737e-05, + "loss": 2.0735, + "step": 746 + }, + { + "epoch": 0.45965694946542573, + "grad_norm": 6.526129722595215, + "learning_rate": 1.9215745067295168e-05, + "loss": 2.1293, + "step": 747 + }, + { + "epoch": 0.4602722867471733, + "grad_norm": 7.146112442016602, + "learning_rate": 1.9213163567989554e-05, + "loss": 2.3711, + "step": 748 + }, + { + "epoch": 0.46088762402892086, + "grad_norm": 6.14267635345459, + "learning_rate": 1.9210578000940297e-05, + "loss": 2.1833, + "step": 749 + }, + { + "epoch": 0.4615029613106684, + "grad_norm": 7.413352966308594, + "learning_rate": 1.9207988367288956e-05, + "loss": 2.0622, + "step": 750 + }, + { + "epoch": 0.462118298592416, + "grad_norm": 6.409780025482178, + "learning_rate": 1.92053946681789e-05, + "loss": 2.1235, + "step": 751 + }, + { + "epoch": 0.46273363587416355, + "grad_norm": 6.982135772705078, + "learning_rate": 1.9202796904755276e-05, + "loss": 2.236, + "step": 752 + }, + { + "epoch": 0.46334897315591106, + "grad_norm": 6.548732757568359, + "learning_rate": 1.920019507816504e-05, + "loss": 1.9935, + "step": 753 + }, + { + "epoch": 0.4639643104376586, + "grad_norm": 8.625593185424805, + "learning_rate": 1.919758918955693e-05, + "loss": 2.0471, + "step": 754 + }, + { + "epoch": 0.4645796477194062, + "grad_norm": 6.883953094482422, + "learning_rate": 1.919497924008149e-05, + "loss": 1.863, + "step": 755 + }, + { + "epoch": 0.46519498500115375, + "grad_norm": 8.053544998168945, + "learning_rate": 1.9192365230891044e-05, + "loss": 2.3225, + "step": 756 + }, + { + "epoch": 0.4658103222829013, + "grad_norm": 6.812694072723389, + "learning_rate": 1.9189747163139717e-05, + "loss": 1.9809, + "step": 757 + }, + { + "epoch": 0.4664256595646489, + "grad_norm": 5.7628631591796875, + "learning_rate": 1.9187125037983424e-05, + "loss": 2.0442, + "step": 758 + }, + { + "epoch": 0.46704099684639644, + "grad_norm": 6.236103534698486, + "learning_rate": 1.918449885657987e-05, + "loss": 2.1149, + "step": 759 + }, + { + "epoch": 0.467656334128144, + "grad_norm": 6.699904441833496, + "learning_rate": 1.918186862008855e-05, + "loss": 1.884, + "step": 760 + }, + { + "epoch": 0.46827167140989157, + "grad_norm": 5.66964054107666, + "learning_rate": 1.9179234329670757e-05, + "loss": 2.0331, + "step": 761 + }, + { + "epoch": 0.46888700869163913, + "grad_norm": 5.376523494720459, + "learning_rate": 1.9176595986489558e-05, + "loss": 1.9197, + "step": 762 + }, + { + "epoch": 0.46950234597338664, + "grad_norm": 6.917234897613525, + "learning_rate": 1.917395359170983e-05, + "loss": 2.0093, + "step": 763 + }, + { + "epoch": 0.4701176832551342, + "grad_norm": 8.011265754699707, + "learning_rate": 1.917130714649822e-05, + "loss": 2.1351, + "step": 764 + }, + { + "epoch": 0.47073302053688176, + "grad_norm": 9.000468254089355, + "learning_rate": 1.9168656652023176e-05, + "loss": 2.0154, + "step": 765 + }, + { + "epoch": 0.4713483578186293, + "grad_norm": 5.950881004333496, + "learning_rate": 1.9166002109454927e-05, + "loss": 2.1636, + "step": 766 + }, + { + "epoch": 0.4719636951003769, + "grad_norm": 7.364047527313232, + "learning_rate": 1.9163343519965494e-05, + "loss": 2.0081, + "step": 767 + }, + { + "epoch": 0.47257903238212445, + "grad_norm": 6.926871299743652, + "learning_rate": 1.916068088472868e-05, + "loss": 2.212, + "step": 768 + }, + { + "epoch": 0.473194369663872, + "grad_norm": 6.287258148193359, + "learning_rate": 1.9158014204920076e-05, + "loss": 2.0932, + "step": 769 + }, + { + "epoch": 0.4738097069456196, + "grad_norm": 7.3404316902160645, + "learning_rate": 1.915534348171706e-05, + "loss": 2.2726, + "step": 770 + }, + { + "epoch": 0.47442504422736714, + "grad_norm": 6.861839294433594, + "learning_rate": 1.91526687162988e-05, + "loss": 2.029, + "step": 771 + }, + { + "epoch": 0.4750403815091147, + "grad_norm": 6.497978687286377, + "learning_rate": 1.9149989909846235e-05, + "loss": 2.0199, + "step": 772 + }, + { + "epoch": 0.4756557187908622, + "grad_norm": 6.417537212371826, + "learning_rate": 1.91473070635421e-05, + "loss": 2.1988, + "step": 773 + }, + { + "epoch": 0.4762710560726098, + "grad_norm": 6.90789270401001, + "learning_rate": 1.914462017857091e-05, + "loss": 2.2551, + "step": 774 + }, + { + "epoch": 0.47688639335435734, + "grad_norm": 5.451094627380371, + "learning_rate": 1.914192925611896e-05, + "loss": 2.0113, + "step": 775 + }, + { + "epoch": 0.4775017306361049, + "grad_norm": 7.097751617431641, + "learning_rate": 1.913923429737434e-05, + "loss": 1.974, + "step": 776 + }, + { + "epoch": 0.47811706791785247, + "grad_norm": 7.267444133758545, + "learning_rate": 1.9136535303526906e-05, + "loss": 2.1168, + "step": 777 + }, + { + "epoch": 0.47873240519960003, + "grad_norm": 6.68643045425415, + "learning_rate": 1.91338322757683e-05, + "loss": 2.0442, + "step": 778 + }, + { + "epoch": 0.4793477424813476, + "grad_norm": 6.749536514282227, + "learning_rate": 1.913112521529195e-05, + "loss": 2.2087, + "step": 779 + }, + { + "epoch": 0.47996307976309516, + "grad_norm": 8.22842025756836, + "learning_rate": 1.9128414123293065e-05, + "loss": 2.0525, + "step": 780 + }, + { + "epoch": 0.4805784170448427, + "grad_norm": 5.559959411621094, + "learning_rate": 1.912569900096863e-05, + "loss": 2.0957, + "step": 781 + }, + { + "epoch": 0.4811937543265903, + "grad_norm": 7.373110771179199, + "learning_rate": 1.9122979849517406e-05, + "loss": 2.1706, + "step": 782 + }, + { + "epoch": 0.48180909160833785, + "grad_norm": 5.807012557983398, + "learning_rate": 1.9120256670139942e-05, + "loss": 2.014, + "step": 783 + }, + { + "epoch": 0.48242442889008536, + "grad_norm": 7.08981466293335, + "learning_rate": 1.9117529464038557e-05, + "loss": 1.9827, + "step": 784 + }, + { + "epoch": 0.4830397661718329, + "grad_norm": 6.074545860290527, + "learning_rate": 1.9114798232417357e-05, + "loss": 2.0586, + "step": 785 + }, + { + "epoch": 0.4836551034535805, + "grad_norm": 6.17758321762085, + "learning_rate": 1.9112062976482213e-05, + "loss": 2.1633, + "step": 786 + }, + { + "epoch": 0.48427044073532804, + "grad_norm": 7.180741786956787, + "learning_rate": 1.9109323697440782e-05, + "loss": 2.1215, + "step": 787 + }, + { + "epoch": 0.4848857780170756, + "grad_norm": 6.877057075500488, + "learning_rate": 1.91065803965025e-05, + "loss": 2.1648, + "step": 788 + }, + { + "epoch": 0.48550111529882317, + "grad_norm": 7.960079669952393, + "learning_rate": 1.9103833074878565e-05, + "loss": 2.0103, + "step": 789 + }, + { + "epoch": 0.48611645258057073, + "grad_norm": 8.077539443969727, + "learning_rate": 1.9101081733781966e-05, + "loss": 2.2398, + "step": 790 + }, + { + "epoch": 0.4867317898623183, + "grad_norm": 7.334413051605225, + "learning_rate": 1.909832637442746e-05, + "loss": 2.0967, + "step": 791 + }, + { + "epoch": 0.48734712714406586, + "grad_norm": 6.657857894897461, + "learning_rate": 1.909556699803157e-05, + "loss": 1.9576, + "step": 792 + }, + { + "epoch": 0.4879624644258134, + "grad_norm": 6.647533893585205, + "learning_rate": 1.9092803605812607e-05, + "loss": 2.158, + "step": 793 + }, + { + "epoch": 0.48857780170756093, + "grad_norm": 7.300939559936523, + "learning_rate": 1.9090036198990645e-05, + "loss": 2.025, + "step": 794 + }, + { + "epoch": 0.4891931389893085, + "grad_norm": 8.70045280456543, + "learning_rate": 1.9087264778787534e-05, + "loss": 2.096, + "step": 795 + }, + { + "epoch": 0.48980847627105606, + "grad_norm": 6.226105213165283, + "learning_rate": 1.90844893464269e-05, + "loss": 2.1785, + "step": 796 + }, + { + "epoch": 0.4904238135528036, + "grad_norm": 7.376537799835205, + "learning_rate": 1.908170990313413e-05, + "loss": 2.1442, + "step": 797 + }, + { + "epoch": 0.4910391508345512, + "grad_norm": 6.8408522605896, + "learning_rate": 1.907892645013639e-05, + "loss": 1.9296, + "step": 798 + }, + { + "epoch": 0.49165448811629875, + "grad_norm": 5.866824150085449, + "learning_rate": 1.9076138988662615e-05, + "loss": 2.2254, + "step": 799 + }, + { + "epoch": 0.4922698253980463, + "grad_norm": 6.0195465087890625, + "learning_rate": 1.9073347519943512e-05, + "loss": 1.8624, + "step": 800 + }, + { + "epoch": 0.4928851626797939, + "grad_norm": 7.2599778175354, + "learning_rate": 1.9070552045211544e-05, + "loss": 2.0202, + "step": 801 + }, + { + "epoch": 0.49350049996154144, + "grad_norm": 6.548917770385742, + "learning_rate": 1.906775256570097e-05, + "loss": 2.1611, + "step": 802 + }, + { + "epoch": 0.494115837243289, + "grad_norm": 7.537008285522461, + "learning_rate": 1.9064949082647785e-05, + "loss": 1.8565, + "step": 803 + }, + { + "epoch": 0.4947311745250365, + "grad_norm": 7.787591934204102, + "learning_rate": 1.9062141597289773e-05, + "loss": 2.4401, + "step": 804 + }, + { + "epoch": 0.4953465118067841, + "grad_norm": 5.584489345550537, + "learning_rate": 1.905933011086648e-05, + "loss": 2.0662, + "step": 805 + }, + { + "epoch": 0.49596184908853164, + "grad_norm": 11.47817611694336, + "learning_rate": 1.9056514624619217e-05, + "loss": 1.9211, + "step": 806 + }, + { + "epoch": 0.4965771863702792, + "grad_norm": 6.618988037109375, + "learning_rate": 1.9053695139791058e-05, + "loss": 2.1296, + "step": 807 + }, + { + "epoch": 0.49719252365202676, + "grad_norm": 6.225590229034424, + "learning_rate": 1.9050871657626854e-05, + "loss": 2.1692, + "step": 808 + }, + { + "epoch": 0.4978078609337743, + "grad_norm": 6.918593406677246, + "learning_rate": 1.9048044179373204e-05, + "loss": 2.0517, + "step": 809 + }, + { + "epoch": 0.4984231982155219, + "grad_norm": 7.103034019470215, + "learning_rate": 1.9045212706278485e-05, + "loss": 2.1758, + "step": 810 + }, + { + "epoch": 0.49903853549726945, + "grad_norm": 6.897078037261963, + "learning_rate": 1.904237723959283e-05, + "loss": 2.0163, + "step": 811 + }, + { + "epoch": 0.499653872779017, + "grad_norm": 6.486918926239014, + "learning_rate": 1.903953778056814e-05, + "loss": 1.9905, + "step": 812 + }, + { + "epoch": 0.5002692100607645, + "grad_norm": 5.886736869812012, + "learning_rate": 1.9036694330458077e-05, + "loss": 2.1024, + "step": 813 + }, + { + "epoch": 0.5008845473425121, + "grad_norm": 5.936880111694336, + "learning_rate": 1.9033846890518063e-05, + "loss": 2.0864, + "step": 814 + }, + { + "epoch": 0.5014998846242597, + "grad_norm": 8.183404922485352, + "learning_rate": 1.903099546200529e-05, + "loss": 2.1216, + "step": 815 + }, + { + "epoch": 0.5021152219060072, + "grad_norm": 6.184354782104492, + "learning_rate": 1.9028140046178696e-05, + "loss": 2.0167, + "step": 816 + }, + { + "epoch": 0.5027305591877548, + "grad_norm": 7.765209197998047, + "learning_rate": 1.902528064429899e-05, + "loss": 2.3614, + "step": 817 + }, + { + "epoch": 0.5033458964695023, + "grad_norm": 6.63641881942749, + "learning_rate": 1.902241725762864e-05, + "loss": 1.8772, + "step": 818 + }, + { + "epoch": 0.5039612337512499, + "grad_norm": 6.48709774017334, + "learning_rate": 1.901954988743188e-05, + "loss": 1.9598, + "step": 819 + }, + { + "epoch": 0.5045765710329975, + "grad_norm": 9.321575164794922, + "learning_rate": 1.901667853497468e-05, + "loss": 2.1039, + "step": 820 + }, + { + "epoch": 0.505191908314745, + "grad_norm": 7.153107643127441, + "learning_rate": 1.9013803201524794e-05, + "loss": 2.1985, + "step": 821 + }, + { + "epoch": 0.5058072455964926, + "grad_norm": 6.9203877449035645, + "learning_rate": 1.9010923888351717e-05, + "loss": 2.176, + "step": 822 + }, + { + "epoch": 0.5064225828782402, + "grad_norm": 6.149457931518555, + "learning_rate": 1.9008040596726713e-05, + "loss": 2.0015, + "step": 823 + }, + { + "epoch": 0.5070379201599877, + "grad_norm": 6.627574920654297, + "learning_rate": 1.9005153327922792e-05, + "loss": 2.0853, + "step": 824 + }, + { + "epoch": 0.5076532574417353, + "grad_norm": 6.685698509216309, + "learning_rate": 1.9002262083214728e-05, + "loss": 2.0401, + "step": 825 + }, + { + "epoch": 0.5082685947234828, + "grad_norm": 6.359735488891602, + "learning_rate": 1.899936686387904e-05, + "loss": 1.9951, + "step": 826 + }, + { + "epoch": 0.5088839320052304, + "grad_norm": 6.742758750915527, + "learning_rate": 1.8996467671194017e-05, + "loss": 2.2857, + "step": 827 + }, + { + "epoch": 0.509499269286978, + "grad_norm": 5.682316303253174, + "learning_rate": 1.8993564506439687e-05, + "loss": 2.0993, + "step": 828 + }, + { + "epoch": 0.5101146065687255, + "grad_norm": 5.649241924285889, + "learning_rate": 1.899065737089785e-05, + "loss": 2.0109, + "step": 829 + }, + { + "epoch": 0.510729943850473, + "grad_norm": 6.038517951965332, + "learning_rate": 1.898774626585203e-05, + "loss": 2.1263, + "step": 830 + }, + { + "epoch": 0.5113452811322206, + "grad_norm": 8.011950492858887, + "learning_rate": 1.898483119258754e-05, + "loss": 2.1193, + "step": 831 + }, + { + "epoch": 0.5119606184139681, + "grad_norm": 6.183477878570557, + "learning_rate": 1.8981912152391417e-05, + "loss": 2.1121, + "step": 832 + }, + { + "epoch": 0.5125759556957157, + "grad_norm": 6.9221038818359375, + "learning_rate": 1.8978989146552457e-05, + "loss": 2.1606, + "step": 833 + }, + { + "epoch": 0.5131912929774632, + "grad_norm": 7.778171062469482, + "learning_rate": 1.8976062176361213e-05, + "loss": 2.1426, + "step": 834 + }, + { + "epoch": 0.5138066302592108, + "grad_norm": 6.583261489868164, + "learning_rate": 1.8973131243109987e-05, + "loss": 2.0183, + "step": 835 + }, + { + "epoch": 0.5144219675409584, + "grad_norm": 6.350189685821533, + "learning_rate": 1.8970196348092824e-05, + "loss": 2.2703, + "step": 836 + }, + { + "epoch": 0.5150373048227059, + "grad_norm": 6.215139865875244, + "learning_rate": 1.8967257492605522e-05, + "loss": 1.818, + "step": 837 + }, + { + "epoch": 0.5156526421044535, + "grad_norm": 8.452610969543457, + "learning_rate": 1.896431467794563e-05, + "loss": 1.9942, + "step": 838 + }, + { + "epoch": 0.5162679793862011, + "grad_norm": 6.459280967712402, + "learning_rate": 1.896136790541244e-05, + "loss": 2.06, + "step": 839 + }, + { + "epoch": 0.5168833166679486, + "grad_norm": 6.528064250946045, + "learning_rate": 1.8958417176307e-05, + "loss": 2.063, + "step": 840 + }, + { + "epoch": 0.5174986539496962, + "grad_norm": 7.5022501945495605, + "learning_rate": 1.8955462491932088e-05, + "loss": 2.0567, + "step": 841 + }, + { + "epoch": 0.5181139912314437, + "grad_norm": 6.257793426513672, + "learning_rate": 1.895250385359225e-05, + "loss": 2.1465, + "step": 842 + }, + { + "epoch": 0.5187293285131913, + "grad_norm": 7.571986675262451, + "learning_rate": 1.8949541262593764e-05, + "loss": 2.2744, + "step": 843 + }, + { + "epoch": 0.5193446657949389, + "grad_norm": 7.789044380187988, + "learning_rate": 1.8946574720244656e-05, + "loss": 1.9569, + "step": 844 + }, + { + "epoch": 0.5199600030766864, + "grad_norm": 7.156865119934082, + "learning_rate": 1.8943604227854696e-05, + "loss": 2.1041, + "step": 845 + }, + { + "epoch": 0.520575340358434, + "grad_norm": 5.661634922027588, + "learning_rate": 1.89406297867354e-05, + "loss": 1.9395, + "step": 846 + }, + { + "epoch": 0.5211906776401816, + "grad_norm": 6.903003215789795, + "learning_rate": 1.8937651398200027e-05, + "loss": 1.9009, + "step": 847 + }, + { + "epoch": 0.5218060149219291, + "grad_norm": 6.922883033752441, + "learning_rate": 1.8934669063563577e-05, + "loss": 2.3146, + "step": 848 + }, + { + "epoch": 0.5224213522036767, + "grad_norm": 5.620602607727051, + "learning_rate": 1.8931682784142792e-05, + "loss": 2.227, + "step": 849 + }, + { + "epoch": 0.5230366894854241, + "grad_norm": 6.603902816772461, + "learning_rate": 1.8928692561256162e-05, + "loss": 2.2299, + "step": 850 + }, + { + "epoch": 0.5236520267671717, + "grad_norm": 7.2959513664245605, + "learning_rate": 1.892569839622391e-05, + "loss": 2.0705, + "step": 851 + }, + { + "epoch": 0.5242673640489193, + "grad_norm": 6.09302282333374, + "learning_rate": 1.8922700290367998e-05, + "loss": 2.114, + "step": 852 + }, + { + "epoch": 0.5248827013306668, + "grad_norm": 6.344806671142578, + "learning_rate": 1.8919698245012143e-05, + "loss": 2.0855, + "step": 853 + }, + { + "epoch": 0.5254980386124144, + "grad_norm": 6.6020121574401855, + "learning_rate": 1.8916692261481784e-05, + "loss": 2.349, + "step": 854 + }, + { + "epoch": 0.526113375894162, + "grad_norm": 6.052628517150879, + "learning_rate": 1.8913682341104107e-05, + "loss": 1.9108, + "step": 855 + }, + { + "epoch": 0.5267287131759095, + "grad_norm": 9.044781684875488, + "learning_rate": 1.8910668485208036e-05, + "loss": 2.5235, + "step": 856 + }, + { + "epoch": 0.5273440504576571, + "grad_norm": 7.7590250968933105, + "learning_rate": 1.8907650695124227e-05, + "loss": 2.2159, + "step": 857 + }, + { + "epoch": 0.5279593877394047, + "grad_norm": 6.473677635192871, + "learning_rate": 1.8904628972185086e-05, + "loss": 1.9787, + "step": 858 + }, + { + "epoch": 0.5285747250211522, + "grad_norm": 6.527915954589844, + "learning_rate": 1.8901603317724742e-05, + "loss": 2.0272, + "step": 859 + }, + { + "epoch": 0.5291900623028998, + "grad_norm": 7.150423526763916, + "learning_rate": 1.8898573733079062e-05, + "loss": 2.0951, + "step": 860 + }, + { + "epoch": 0.5298053995846473, + "grad_norm": 6.667748928070068, + "learning_rate": 1.8895540219585656e-05, + "loss": 2.1577, + "step": 861 + }, + { + "epoch": 0.5304207368663949, + "grad_norm": 6.240249156951904, + "learning_rate": 1.8892502778583864e-05, + "loss": 1.9896, + "step": 862 + }, + { + "epoch": 0.5310360741481425, + "grad_norm": 7.589881420135498, + "learning_rate": 1.8889461411414754e-05, + "loss": 1.9928, + "step": 863 + }, + { + "epoch": 0.53165141142989, + "grad_norm": 5.7154130935668945, + "learning_rate": 1.888641611942114e-05, + "loss": 2.2784, + "step": 864 + }, + { + "epoch": 0.5322667487116376, + "grad_norm": 5.6380839347839355, + "learning_rate": 1.8883366903947554e-05, + "loss": 2.0409, + "step": 865 + }, + { + "epoch": 0.5328820859933852, + "grad_norm": 8.035577774047852, + "learning_rate": 1.8880313766340274e-05, + "loss": 2.1061, + "step": 866 + }, + { + "epoch": 0.5334974232751327, + "grad_norm": 7.249325275421143, + "learning_rate": 1.8877256707947308e-05, + "loss": 2.2327, + "step": 867 + }, + { + "epoch": 0.5341127605568803, + "grad_norm": 5.792886734008789, + "learning_rate": 1.887419573011838e-05, + "loss": 1.9047, + "step": 868 + }, + { + "epoch": 0.5347280978386278, + "grad_norm": 8.157000541687012, + "learning_rate": 1.8871130834204964e-05, + "loss": 2.0659, + "step": 869 + }, + { + "epoch": 0.5353434351203754, + "grad_norm": 8.46573543548584, + "learning_rate": 1.8868062021560254e-05, + "loss": 2.1608, + "step": 870 + }, + { + "epoch": 0.5359587724021229, + "grad_norm": 6.364355564117432, + "learning_rate": 1.886498929353917e-05, + "loss": 2.0781, + "step": 871 + }, + { + "epoch": 0.5365741096838704, + "grad_norm": 6.999208450317383, + "learning_rate": 1.8861912651498372e-05, + "loss": 1.9468, + "step": 872 + }, + { + "epoch": 0.537189446965618, + "grad_norm": 5.838455677032471, + "learning_rate": 1.885883209679624e-05, + "loss": 2.1075, + "step": 873 + }, + { + "epoch": 0.5378047842473656, + "grad_norm": 6.2151970863342285, + "learning_rate": 1.8855747630792878e-05, + "loss": 2.219, + "step": 874 + }, + { + "epoch": 0.5384201215291131, + "grad_norm": 5.593589782714844, + "learning_rate": 1.8852659254850128e-05, + "loss": 2.0471, + "step": 875 + }, + { + "epoch": 0.5390354588108607, + "grad_norm": 6.373289108276367, + "learning_rate": 1.884956697033155e-05, + "loss": 2.0088, + "step": 876 + }, + { + "epoch": 0.5396507960926082, + "grad_norm": 5.725294589996338, + "learning_rate": 1.8846470778602427e-05, + "loss": 2.2311, + "step": 877 + }, + { + "epoch": 0.5402661333743558, + "grad_norm": 5.983836650848389, + "learning_rate": 1.884337068102978e-05, + "loss": 2.0746, + "step": 878 + }, + { + "epoch": 0.5408814706561034, + "grad_norm": 6.97803258895874, + "learning_rate": 1.8840266678982343e-05, + "loss": 2.01, + "step": 879 + }, + { + "epoch": 0.5414968079378509, + "grad_norm": 7.540825843811035, + "learning_rate": 1.883715877383058e-05, + "loss": 1.9876, + "step": 880 + }, + { + "epoch": 0.5421121452195985, + "grad_norm": 6.3205647468566895, + "learning_rate": 1.883404696694667e-05, + "loss": 2.2161, + "step": 881 + }, + { + "epoch": 0.5427274825013461, + "grad_norm": 6.25108528137207, + "learning_rate": 1.883093125970452e-05, + "loss": 2.0445, + "step": 882 + }, + { + "epoch": 0.5433428197830936, + "grad_norm": 5.923949718475342, + "learning_rate": 1.882781165347977e-05, + "loss": 2.254, + "step": 883 + }, + { + "epoch": 0.5439581570648412, + "grad_norm": 5.425839424133301, + "learning_rate": 1.882468814964976e-05, + "loss": 2.1657, + "step": 884 + }, + { + "epoch": 0.5445734943465887, + "grad_norm": 6.041243076324463, + "learning_rate": 1.8821560749593564e-05, + "loss": 2.119, + "step": 885 + }, + { + "epoch": 0.5451888316283363, + "grad_norm": 6.613852500915527, + "learning_rate": 1.8818429454691974e-05, + "loss": 2.179, + "step": 886 + }, + { + "epoch": 0.5458041689100839, + "grad_norm": 7.69509744644165, + "learning_rate": 1.8815294266327507e-05, + "loss": 2.1971, + "step": 887 + }, + { + "epoch": 0.5464195061918314, + "grad_norm": 6.67446756362915, + "learning_rate": 1.8812155185884385e-05, + "loss": 2.2164, + "step": 888 + }, + { + "epoch": 0.547034843473579, + "grad_norm": 6.411123275756836, + "learning_rate": 1.8809012214748567e-05, + "loss": 1.97, + "step": 889 + }, + { + "epoch": 0.5476501807553266, + "grad_norm": 5.884811878204346, + "learning_rate": 1.880586535430771e-05, + "loss": 2.114, + "step": 890 + }, + { + "epoch": 0.548265518037074, + "grad_norm": 6.397862911224365, + "learning_rate": 1.88027146059512e-05, + "loss": 2.115, + "step": 891 + }, + { + "epoch": 0.5488808553188216, + "grad_norm": 6.290619850158691, + "learning_rate": 1.879955997107014e-05, + "loss": 2.061, + "step": 892 + }, + { + "epoch": 0.5494961926005691, + "grad_norm": 8.000036239624023, + "learning_rate": 1.8796401451057348e-05, + "loss": 2.3515, + "step": 893 + }, + { + "epoch": 0.5501115298823167, + "grad_norm": 5.554999351501465, + "learning_rate": 1.8793239047307358e-05, + "loss": 2.1328, + "step": 894 + }, + { + "epoch": 0.5507268671640643, + "grad_norm": 7.2834296226501465, + "learning_rate": 1.8790072761216406e-05, + "loss": 2.0137, + "step": 895 + }, + { + "epoch": 0.5513422044458118, + "grad_norm": 7.129612922668457, + "learning_rate": 1.878690259418246e-05, + "loss": 2.093, + "step": 896 + }, + { + "epoch": 0.5519575417275594, + "grad_norm": 5.697007656097412, + "learning_rate": 1.8783728547605192e-05, + "loss": 2.1213, + "step": 897 + }, + { + "epoch": 0.552572879009307, + "grad_norm": 6.530676364898682, + "learning_rate": 1.878055062288599e-05, + "loss": 1.9558, + "step": 898 + }, + { + "epoch": 0.5531882162910545, + "grad_norm": 8.088905334472656, + "learning_rate": 1.8777368821427954e-05, + "loss": 1.9357, + "step": 899 + }, + { + "epoch": 0.5538035535728021, + "grad_norm": 5.695494174957275, + "learning_rate": 1.8774183144635894e-05, + "loss": 1.8888, + "step": 900 + }, + { + "epoch": 0.5544188908545497, + "grad_norm": 6.9519171714782715, + "learning_rate": 1.8770993593916332e-05, + "loss": 2.142, + "step": 901 + }, + { + "epoch": 0.5550342281362972, + "grad_norm": 6.295409679412842, + "learning_rate": 1.8767800170677505e-05, + "loss": 2.1366, + "step": 902 + }, + { + "epoch": 0.5556495654180448, + "grad_norm": 7.492076396942139, + "learning_rate": 1.8764602876329346e-05, + "loss": 2.0921, + "step": 903 + }, + { + "epoch": 0.5562649026997923, + "grad_norm": 7.407884120941162, + "learning_rate": 1.8761401712283513e-05, + "loss": 2.2969, + "step": 904 + }, + { + "epoch": 0.5568802399815399, + "grad_norm": 7.565658092498779, + "learning_rate": 1.875819667995336e-05, + "loss": 2.0557, + "step": 905 + }, + { + "epoch": 0.5574955772632875, + "grad_norm": 8.184779167175293, + "learning_rate": 1.875498778075396e-05, + "loss": 2.3547, + "step": 906 + }, + { + "epoch": 0.558110914545035, + "grad_norm": 6.091540336608887, + "learning_rate": 1.8751775016102087e-05, + "loss": 2.1706, + "step": 907 + }, + { + "epoch": 0.5587262518267826, + "grad_norm": 5.0458574295043945, + "learning_rate": 1.874855838741622e-05, + "loss": 2.0711, + "step": 908 + }, + { + "epoch": 0.5593415891085302, + "grad_norm": 6.050383567810059, + "learning_rate": 1.874533789611655e-05, + "loss": 1.868, + "step": 909 + }, + { + "epoch": 0.5599569263902777, + "grad_norm": 7.359173774719238, + "learning_rate": 1.874211354362497e-05, + "loss": 2.0393, + "step": 910 + }, + { + "epoch": 0.5605722636720253, + "grad_norm": 6.323384761810303, + "learning_rate": 1.8738885331365073e-05, + "loss": 2.1559, + "step": 911 + }, + { + "epoch": 0.5611876009537727, + "grad_norm": 6.152583122253418, + "learning_rate": 1.8735653260762165e-05, + "loss": 2.0891, + "step": 912 + }, + { + "epoch": 0.5618029382355203, + "grad_norm": 6.174867630004883, + "learning_rate": 1.873241733324325e-05, + "loss": 1.9641, + "step": 913 + }, + { + "epoch": 0.5624182755172679, + "grad_norm": 7.503718376159668, + "learning_rate": 1.8729177550237038e-05, + "loss": 2.0832, + "step": 914 + }, + { + "epoch": 0.5630336127990154, + "grad_norm": 6.044493198394775, + "learning_rate": 1.872593391317394e-05, + "loss": 2.2453, + "step": 915 + }, + { + "epoch": 0.563648950080763, + "grad_norm": 6.809248447418213, + "learning_rate": 1.8722686423486066e-05, + "loss": 2.1014, + "step": 916 + }, + { + "epoch": 0.5642642873625106, + "grad_norm": 7.070122241973877, + "learning_rate": 1.871943508260723e-05, + "loss": 1.8878, + "step": 917 + }, + { + "epoch": 0.5648796246442581, + "grad_norm": 6.298173904418945, + "learning_rate": 1.8716179891972942e-05, + "loss": 2.0931, + "step": 918 + }, + { + "epoch": 0.5654949619260057, + "grad_norm": 7.3790154457092285, + "learning_rate": 1.871292085302042e-05, + "loss": 2.0948, + "step": 919 + }, + { + "epoch": 0.5661102992077532, + "grad_norm": 6.696272373199463, + "learning_rate": 1.8709657967188576e-05, + "loss": 2.0819, + "step": 920 + }, + { + "epoch": 0.5667256364895008, + "grad_norm": 6.298511505126953, + "learning_rate": 1.8706391235918022e-05, + "loss": 1.9605, + "step": 921 + }, + { + "epoch": 0.5673409737712484, + "grad_norm": 6.252894401550293, + "learning_rate": 1.870312066065106e-05, + "loss": 2.1632, + "step": 922 + }, + { + "epoch": 0.5679563110529959, + "grad_norm": 6.138218402862549, + "learning_rate": 1.8699846242831707e-05, + "loss": 2.203, + "step": 923 + }, + { + "epoch": 0.5685716483347435, + "grad_norm": 6.9497270584106445, + "learning_rate": 1.8696567983905655e-05, + "loss": 1.8706, + "step": 924 + }, + { + "epoch": 0.5691869856164911, + "grad_norm": 6.054098606109619, + "learning_rate": 1.8693285885320305e-05, + "loss": 2.1358, + "step": 925 + }, + { + "epoch": 0.5698023228982386, + "grad_norm": 5.313518047332764, + "learning_rate": 1.8689999948524753e-05, + "loss": 2.1191, + "step": 926 + }, + { + "epoch": 0.5704176601799862, + "grad_norm": 5.863306522369385, + "learning_rate": 1.8686710174969786e-05, + "loss": 2.0987, + "step": 927 + }, + { + "epoch": 0.5710329974617337, + "grad_norm": 6.7454118728637695, + "learning_rate": 1.8683416566107886e-05, + "loss": 2.0428, + "step": 928 + }, + { + "epoch": 0.5716483347434813, + "grad_norm": 6.870327949523926, + "learning_rate": 1.8680119123393225e-05, + "loss": 2.2735, + "step": 929 + }, + { + "epoch": 0.5722636720252289, + "grad_norm": 5.402509689331055, + "learning_rate": 1.8676817848281677e-05, + "loss": 2.0606, + "step": 930 + }, + { + "epoch": 0.5728790093069764, + "grad_norm": 6.9694952964782715, + "learning_rate": 1.86735127422308e-05, + "loss": 1.7324, + "step": 931 + }, + { + "epoch": 0.5734943465887239, + "grad_norm": 5.7062296867370605, + "learning_rate": 1.8670203806699845e-05, + "loss": 2.0868, + "step": 932 + }, + { + "epoch": 0.5741096838704715, + "grad_norm": 6.769274711608887, + "learning_rate": 1.8666891043149757e-05, + "loss": 2.1087, + "step": 933 + }, + { + "epoch": 0.574725021152219, + "grad_norm": 7.217459201812744, + "learning_rate": 1.8663574453043162e-05, + "loss": 2.1046, + "step": 934 + }, + { + "epoch": 0.5753403584339666, + "grad_norm": 7.107117176055908, + "learning_rate": 1.866025403784439e-05, + "loss": 1.9108, + "step": 935 + }, + { + "epoch": 0.5759556957157141, + "grad_norm": 7.855188369750977, + "learning_rate": 1.8656929799019444e-05, + "loss": 2.1562, + "step": 936 + }, + { + "epoch": 0.5765710329974617, + "grad_norm": 6.321917533874512, + "learning_rate": 1.8653601738036033e-05, + "loss": 2.1142, + "step": 937 + }, + { + "epoch": 0.5771863702792093, + "grad_norm": 6.714559078216553, + "learning_rate": 1.8650269856363532e-05, + "loss": 2.3876, + "step": 938 + }, + { + "epoch": 0.5778017075609568, + "grad_norm": 6.086818218231201, + "learning_rate": 1.8646934155473025e-05, + "loss": 2.1042, + "step": 939 + }, + { + "epoch": 0.5784170448427044, + "grad_norm": 6.615048885345459, + "learning_rate": 1.8643594636837262e-05, + "loss": 1.958, + "step": 940 + }, + { + "epoch": 0.579032382124452, + "grad_norm": 7.887397289276123, + "learning_rate": 1.8640251301930696e-05, + "loss": 2.4313, + "step": 941 + }, + { + "epoch": 0.5796477194061995, + "grad_norm": 6.632091045379639, + "learning_rate": 1.8636904152229455e-05, + "loss": 2.0475, + "step": 942 + }, + { + "epoch": 0.5802630566879471, + "grad_norm": 5.7257256507873535, + "learning_rate": 1.8633553189211353e-05, + "loss": 1.9698, + "step": 943 + }, + { + "epoch": 0.5808783939696947, + "grad_norm": 6.3653106689453125, + "learning_rate": 1.8630198414355888e-05, + "loss": 2.1525, + "step": 944 + }, + { + "epoch": 0.5814937312514422, + "grad_norm": 6.766574859619141, + "learning_rate": 1.8626839829144244e-05, + "loss": 1.9025, + "step": 945 + }, + { + "epoch": 0.5821090685331898, + "grad_norm": 6.703938007354736, + "learning_rate": 1.862347743505928e-05, + "loss": 2.2198, + "step": 946 + }, + { + "epoch": 0.5827244058149373, + "grad_norm": 5.868868827819824, + "learning_rate": 1.862011123358554e-05, + "loss": 1.948, + "step": 947 + }, + { + "epoch": 0.5833397430966849, + "grad_norm": 6.220643997192383, + "learning_rate": 1.8616741226209256e-05, + "loss": 2.2143, + "step": 948 + }, + { + "epoch": 0.5839550803784325, + "grad_norm": 6.305080890655518, + "learning_rate": 1.861336741441833e-05, + "loss": 2.0507, + "step": 949 + }, + { + "epoch": 0.58457041766018, + "grad_norm": 6.605630397796631, + "learning_rate": 1.8609989799702352e-05, + "loss": 1.9782, + "step": 950 + }, + { + "epoch": 0.5851857549419276, + "grad_norm": 6.669106960296631, + "learning_rate": 1.8606608383552583e-05, + "loss": 2.1681, + "step": 951 + }, + { + "epoch": 0.5858010922236752, + "grad_norm": 8.118387222290039, + "learning_rate": 1.860322316746197e-05, + "loss": 2.0882, + "step": 952 + }, + { + "epoch": 0.5864164295054226, + "grad_norm": 6.287439346313477, + "learning_rate": 1.859983415292513e-05, + "loss": 2.0354, + "step": 953 + }, + { + "epoch": 0.5870317667871702, + "grad_norm": 7.643050193786621, + "learning_rate": 1.859644134143837e-05, + "loss": 2.0217, + "step": 954 + }, + { + "epoch": 0.5876471040689177, + "grad_norm": 6.502023696899414, + "learning_rate": 1.8593044734499657e-05, + "loss": 2.0585, + "step": 955 + }, + { + "epoch": 0.5882624413506653, + "grad_norm": 7.407291412353516, + "learning_rate": 1.8589644333608645e-05, + "loss": 1.9567, + "step": 956 + }, + { + "epoch": 0.5888777786324129, + "grad_norm": 6.3491058349609375, + "learning_rate": 1.8586240140266655e-05, + "loss": 2.1496, + "step": 957 + }, + { + "epoch": 0.5894931159141604, + "grad_norm": 5.963798999786377, + "learning_rate": 1.8582832155976694e-05, + "loss": 1.9845, + "step": 958 + }, + { + "epoch": 0.590108453195908, + "grad_norm": 6.546308517456055, + "learning_rate": 1.8579420382243433e-05, + "loss": 1.8729, + "step": 959 + }, + { + "epoch": 0.5907237904776556, + "grad_norm": 6.186330795288086, + "learning_rate": 1.8576004820573217e-05, + "loss": 2.1806, + "step": 960 + }, + { + "epoch": 0.5913391277594031, + "grad_norm": 7.788473129272461, + "learning_rate": 1.8572585472474068e-05, + "loss": 2.1341, + "step": 961 + }, + { + "epoch": 0.5919544650411507, + "grad_norm": 6.244277477264404, + "learning_rate": 1.8569162339455675e-05, + "loss": 2.2203, + "step": 962 + }, + { + "epoch": 0.5925698023228982, + "grad_norm": 5.311842441558838, + "learning_rate": 1.8565735423029406e-05, + "loss": 2.0185, + "step": 963 + }, + { + "epoch": 0.5931851396046458, + "grad_norm": 6.12239933013916, + "learning_rate": 1.8562304724708285e-05, + "loss": 2.1565, + "step": 964 + }, + { + "epoch": 0.5938004768863934, + "grad_norm": 7.605432033538818, + "learning_rate": 1.8558870246007015e-05, + "loss": 1.9486, + "step": 965 + }, + { + "epoch": 0.5944158141681409, + "grad_norm": 6.076027870178223, + "learning_rate": 1.8555431988441977e-05, + "loss": 2.1557, + "step": 966 + }, + { + "epoch": 0.5950311514498885, + "grad_norm": 6.009294033050537, + "learning_rate": 1.8551989953531204e-05, + "loss": 2.1645, + "step": 967 + }, + { + "epoch": 0.5956464887316361, + "grad_norm": 7.158121109008789, + "learning_rate": 1.8548544142794404e-05, + "loss": 2.2575, + "step": 968 + }, + { + "epoch": 0.5962618260133836, + "grad_norm": 6.902040481567383, + "learning_rate": 1.854509455775295e-05, + "loss": 2.1078, + "step": 969 + }, + { + "epoch": 0.5968771632951312, + "grad_norm": 7.241510391235352, + "learning_rate": 1.854164119992989e-05, + "loss": 2.04, + "step": 970 + }, + { + "epoch": 0.5974925005768787, + "grad_norm": 7.6888227462768555, + "learning_rate": 1.8538184070849926e-05, + "loss": 2.1314, + "step": 971 + }, + { + "epoch": 0.5981078378586263, + "grad_norm": 5.7001848220825195, + "learning_rate": 1.853472317203943e-05, + "loss": 2.051, + "step": 972 + }, + { + "epoch": 0.5987231751403738, + "grad_norm": 5.35050106048584, + "learning_rate": 1.853125850502644e-05, + "loss": 2.0456, + "step": 973 + }, + { + "epoch": 0.5993385124221213, + "grad_norm": 6.306005477905273, + "learning_rate": 1.852779007134065e-05, + "loss": 2.0917, + "step": 974 + }, + { + "epoch": 0.5999538497038689, + "grad_norm": 7.854207515716553, + "learning_rate": 1.8524317872513434e-05, + "loss": 1.9323, + "step": 975 + }, + { + "epoch": 0.6005691869856165, + "grad_norm": 12.401104927062988, + "learning_rate": 1.852084191007781e-05, + "loss": 2.2677, + "step": 976 + }, + { + "epoch": 0.601184524267364, + "grad_norm": 7.277547359466553, + "learning_rate": 1.8517362185568465e-05, + "loss": 1.9361, + "step": 977 + }, + { + "epoch": 0.6017998615491116, + "grad_norm": 6.134153366088867, + "learning_rate": 1.8513878700521752e-05, + "loss": 1.9506, + "step": 978 + }, + { + "epoch": 0.6024151988308591, + "grad_norm": 7.651949405670166, + "learning_rate": 1.8510391456475674e-05, + "loss": 1.9156, + "step": 979 + }, + { + "epoch": 0.6030305361126067, + "grad_norm": 6.357011795043945, + "learning_rate": 1.8506900454969904e-05, + "loss": 1.9001, + "step": 980 + }, + { + "epoch": 0.6036458733943543, + "grad_norm": 5.814262866973877, + "learning_rate": 1.8503405697545767e-05, + "loss": 1.966, + "step": 981 + }, + { + "epoch": 0.6042612106761018, + "grad_norm": 5.703295707702637, + "learning_rate": 1.849990718574625e-05, + "loss": 1.8598, + "step": 982 + }, + { + "epoch": 0.6048765479578494, + "grad_norm": 7.121742248535156, + "learning_rate": 1.8496404921115993e-05, + "loss": 2.1093, + "step": 983 + }, + { + "epoch": 0.605491885239597, + "grad_norm": 5.948090076446533, + "learning_rate": 1.8492898905201297e-05, + "loss": 1.9377, + "step": 984 + }, + { + "epoch": 0.6061072225213445, + "grad_norm": 5.775730133056641, + "learning_rate": 1.8489389139550114e-05, + "loss": 2.1606, + "step": 985 + }, + { + "epoch": 0.6067225598030921, + "grad_norm": 7.18971061706543, + "learning_rate": 1.8485875625712068e-05, + "loss": 2.2367, + "step": 986 + }, + { + "epoch": 0.6073378970848397, + "grad_norm": 7.135913848876953, + "learning_rate": 1.8482358365238414e-05, + "loss": 2.2242, + "step": 987 + }, + { + "epoch": 0.6079532343665872, + "grad_norm": 6.361776828765869, + "learning_rate": 1.8478837359682076e-05, + "loss": 1.9576, + "step": 988 + }, + { + "epoch": 0.6085685716483348, + "grad_norm": 6.966860294342041, + "learning_rate": 1.8475312610597634e-05, + "loss": 2.2638, + "step": 989 + }, + { + "epoch": 0.6091839089300823, + "grad_norm": 7.213875770568848, + "learning_rate": 1.847178411954131e-05, + "loss": 2.0697, + "step": 990 + }, + { + "epoch": 0.6097992462118299, + "grad_norm": 6.6561479568481445, + "learning_rate": 1.8468251888070982e-05, + "loss": 2.0922, + "step": 991 + }, + { + "epoch": 0.6104145834935775, + "grad_norm": 8.228968620300293, + "learning_rate": 1.8464715917746182e-05, + "loss": 2.1334, + "step": 992 + }, + { + "epoch": 0.6110299207753249, + "grad_norm": 5.728860378265381, + "learning_rate": 1.8461176210128095e-05, + "loss": 2.2847, + "step": 993 + }, + { + "epoch": 0.6116452580570725, + "grad_norm": 6.3604817390441895, + "learning_rate": 1.845763276677955e-05, + "loss": 2.09, + "step": 994 + }, + { + "epoch": 0.61226059533882, + "grad_norm": 5.697984218597412, + "learning_rate": 1.845408558926502e-05, + "loss": 2.083, + "step": 995 + }, + { + "epoch": 0.6128759326205676, + "grad_norm": 7.816307544708252, + "learning_rate": 1.845053467915065e-05, + "loss": 1.8783, + "step": 996 + }, + { + "epoch": 0.6134912699023152, + "grad_norm": 6.54819393157959, + "learning_rate": 1.844698003800421e-05, + "loss": 2.1643, + "step": 997 + }, + { + "epoch": 0.6141066071840627, + "grad_norm": 6.919043064117432, + "learning_rate": 1.8443421667395122e-05, + "loss": 2.037, + "step": 998 + }, + { + "epoch": 0.6147219444658103, + "grad_norm": 7.897428512573242, + "learning_rate": 1.8439859568894464e-05, + "loss": 2.2525, + "step": 999 + }, + { + "epoch": 0.6153372817475579, + "grad_norm": 6.361285209655762, + "learning_rate": 1.843629374407495e-05, + "loss": 2.0702, + "step": 1000 + }, + { + "epoch": 0.6159526190293054, + "grad_norm": 6.025935173034668, + "learning_rate": 1.843272419451094e-05, + "loss": 2.0337, + "step": 1001 + }, + { + "epoch": 0.616567956311053, + "grad_norm": 7.167662620544434, + "learning_rate": 1.8429150921778448e-05, + "loss": 1.9035, + "step": 1002 + }, + { + "epoch": 0.6171832935928006, + "grad_norm": 6.278049468994141, + "learning_rate": 1.842557392745512e-05, + "loss": 2.2255, + "step": 1003 + }, + { + "epoch": 0.6177986308745481, + "grad_norm": 5.913668155670166, + "learning_rate": 1.842199321312025e-05, + "loss": 2.0066, + "step": 1004 + }, + { + "epoch": 0.6184139681562957, + "grad_norm": 6.883291244506836, + "learning_rate": 1.8418408780354776e-05, + "loss": 2.1794, + "step": 1005 + }, + { + "epoch": 0.6190293054380432, + "grad_norm": 6.502501010894775, + "learning_rate": 1.8414820630741278e-05, + "loss": 1.9461, + "step": 1006 + }, + { + "epoch": 0.6196446427197908, + "grad_norm": 6.383057594299316, + "learning_rate": 1.8411228765863973e-05, + "loss": 2.1078, + "step": 1007 + }, + { + "epoch": 0.6202599800015384, + "grad_norm": 6.007820129394531, + "learning_rate": 1.8407633187308722e-05, + "loss": 2.0381, + "step": 1008 + }, + { + "epoch": 0.6208753172832859, + "grad_norm": 6.136646747589111, + "learning_rate": 1.8404033896663022e-05, + "loss": 2.0615, + "step": 1009 + }, + { + "epoch": 0.6214906545650335, + "grad_norm": 5.929768085479736, + "learning_rate": 1.840043089551602e-05, + "loss": 2.2102, + "step": 1010 + }, + { + "epoch": 0.6221059918467811, + "grad_norm": 6.92222261428833, + "learning_rate": 1.839682418545848e-05, + "loss": 2.025, + "step": 1011 + }, + { + "epoch": 0.6227213291285286, + "grad_norm": 7.819103240966797, + "learning_rate": 1.8393213768082823e-05, + "loss": 1.8955, + "step": 1012 + }, + { + "epoch": 0.6233366664102762, + "grad_norm": 8.904016494750977, + "learning_rate": 1.8389599644983102e-05, + "loss": 1.6673, + "step": 1013 + }, + { + "epoch": 0.6239520036920236, + "grad_norm": 6.5159149169921875, + "learning_rate": 1.8385981817754997e-05, + "loss": 2.1988, + "step": 1014 + }, + { + "epoch": 0.6245673409737712, + "grad_norm": 6.578975200653076, + "learning_rate": 1.838236028799584e-05, + "loss": 2.1403, + "step": 1015 + }, + { + "epoch": 0.6251826782555188, + "grad_norm": 7.636806488037109, + "learning_rate": 1.837873505730458e-05, + "loss": 2.1671, + "step": 1016 + }, + { + "epoch": 0.6257980155372663, + "grad_norm": 6.6806769371032715, + "learning_rate": 1.837510612728181e-05, + "loss": 2.0918, + "step": 1017 + }, + { + "epoch": 0.6264133528190139, + "grad_norm": 6.2169623374938965, + "learning_rate": 1.837147349952976e-05, + "loss": 2.2221, + "step": 1018 + }, + { + "epoch": 0.6270286901007615, + "grad_norm": 7.413076877593994, + "learning_rate": 1.8367837175652284e-05, + "loss": 1.9138, + "step": 1019 + }, + { + "epoch": 0.627644027382509, + "grad_norm": 6.9115471839904785, + "learning_rate": 1.8364197157254863e-05, + "loss": 2.0442, + "step": 1020 + }, + { + "epoch": 0.6282593646642566, + "grad_norm": 6.251332759857178, + "learning_rate": 1.8360553445944632e-05, + "loss": 1.9896, + "step": 1021 + }, + { + "epoch": 0.6288747019460041, + "grad_norm": 7.269402503967285, + "learning_rate": 1.8356906043330333e-05, + "loss": 1.8786, + "step": 1022 + }, + { + "epoch": 0.6294900392277517, + "grad_norm": 7.804546356201172, + "learning_rate": 1.8353254951022342e-05, + "loss": 1.9203, + "step": 1023 + }, + { + "epoch": 0.6301053765094993, + "grad_norm": 6.854145526885986, + "learning_rate": 1.834960017063268e-05, + "loss": 2.3358, + "step": 1024 + }, + { + "epoch": 0.6307207137912468, + "grad_norm": 8.5806884765625, + "learning_rate": 1.8345941703774975e-05, + "loss": 1.8954, + "step": 1025 + }, + { + "epoch": 0.6313360510729944, + "grad_norm": 6.047177791595459, + "learning_rate": 1.83422795520645e-05, + "loss": 2.2049, + "step": 1026 + }, + { + "epoch": 0.631951388354742, + "grad_norm": 6.210206985473633, + "learning_rate": 1.833861371711814e-05, + "loss": 1.8922, + "step": 1027 + }, + { + "epoch": 0.6325667256364895, + "grad_norm": 5.595113754272461, + "learning_rate": 1.833494420055442e-05, + "loss": 1.8231, + "step": 1028 + }, + { + "epoch": 0.6331820629182371, + "grad_norm": 6.599974155426025, + "learning_rate": 1.833127100399348e-05, + "loss": 2.1284, + "step": 1029 + }, + { + "epoch": 0.6337974001999847, + "grad_norm": 7.5806660652160645, + "learning_rate": 1.8327594129057095e-05, + "loss": 2.2632, + "step": 1030 + }, + { + "epoch": 0.6344127374817322, + "grad_norm": 6.944150447845459, + "learning_rate": 1.832391357736865e-05, + "loss": 2.1003, + "step": 1031 + }, + { + "epoch": 0.6350280747634798, + "grad_norm": 6.150269031524658, + "learning_rate": 1.8320229350553165e-05, + "loss": 1.8456, + "step": 1032 + }, + { + "epoch": 0.6356434120452273, + "grad_norm": 5.010760307312012, + "learning_rate": 1.831654145023728e-05, + "loss": 2.0136, + "step": 1033 + }, + { + "epoch": 0.6362587493269748, + "grad_norm": 5.488649845123291, + "learning_rate": 1.8312849878049253e-05, + "loss": 2.1125, + "step": 1034 + }, + { + "epoch": 0.6368740866087224, + "grad_norm": 7.516749858856201, + "learning_rate": 1.8309154635618967e-05, + "loss": 2.093, + "step": 1035 + }, + { + "epoch": 0.6374894238904699, + "grad_norm": 8.405596733093262, + "learning_rate": 1.8305455724577926e-05, + "loss": 2.0981, + "step": 1036 + }, + { + "epoch": 0.6381047611722175, + "grad_norm": 6.465787410736084, + "learning_rate": 1.830175314655925e-05, + "loss": 1.74, + "step": 1037 + }, + { + "epoch": 0.638720098453965, + "grad_norm": 6.678417682647705, + "learning_rate": 1.8298046903197685e-05, + "loss": 1.9108, + "step": 1038 + }, + { + "epoch": 0.6393354357357126, + "grad_norm": 6.765127658843994, + "learning_rate": 1.829433699612958e-05, + "loss": 2.0281, + "step": 1039 + }, + { + "epoch": 0.6399507730174602, + "grad_norm": 8.378372192382812, + "learning_rate": 1.8290623426992928e-05, + "loss": 2.0203, + "step": 1040 + }, + { + "epoch": 0.6405661102992077, + "grad_norm": 6.120928764343262, + "learning_rate": 1.828690619742731e-05, + "loss": 2.0911, + "step": 1041 + }, + { + "epoch": 0.6411814475809553, + "grad_norm": 6.111254692077637, + "learning_rate": 1.8283185309073937e-05, + "loss": 1.986, + "step": 1042 + }, + { + "epoch": 0.6417967848627029, + "grad_norm": 6.288693428039551, + "learning_rate": 1.827946076357564e-05, + "loss": 2.169, + "step": 1043 + }, + { + "epoch": 0.6424121221444504, + "grad_norm": 6.435813903808594, + "learning_rate": 1.8275732562576857e-05, + "loss": 2.1177, + "step": 1044 + }, + { + "epoch": 0.643027459426198, + "grad_norm": 6.083995819091797, + "learning_rate": 1.827200070772364e-05, + "loss": 2.0909, + "step": 1045 + }, + { + "epoch": 0.6436427967079456, + "grad_norm": 6.9806809425354, + "learning_rate": 1.8268265200663657e-05, + "loss": 1.9731, + "step": 1046 + }, + { + "epoch": 0.6442581339896931, + "grad_norm": 6.7890238761901855, + "learning_rate": 1.8264526043046193e-05, + "loss": 2.1301, + "step": 1047 + }, + { + "epoch": 0.6448734712714407, + "grad_norm": 9.693398475646973, + "learning_rate": 1.8260783236522136e-05, + "loss": 2.1093, + "step": 1048 + }, + { + "epoch": 0.6454888085531882, + "grad_norm": 6.656716823577881, + "learning_rate": 1.8257036782743983e-05, + "loss": 2.0422, + "step": 1049 + }, + { + "epoch": 0.6461041458349358, + "grad_norm": 8.320241928100586, + "learning_rate": 1.8253286683365855e-05, + "loss": 2.2005, + "step": 1050 + }, + { + "epoch": 0.6467194831166834, + "grad_norm": 5.749904155731201, + "learning_rate": 1.824953294004347e-05, + "loss": 2.1326, + "step": 1051 + }, + { + "epoch": 0.6473348203984309, + "grad_norm": 7.717979907989502, + "learning_rate": 1.8245775554434163e-05, + "loss": 2.0087, + "step": 1052 + }, + { + "epoch": 0.6479501576801785, + "grad_norm": 7.95952033996582, + "learning_rate": 1.8242014528196875e-05, + "loss": 2.004, + "step": 1053 + }, + { + "epoch": 0.6485654949619261, + "grad_norm": 6.30872106552124, + "learning_rate": 1.8238249862992144e-05, + "loss": 2.1793, + "step": 1054 + }, + { + "epoch": 0.6491808322436735, + "grad_norm": 8.448993682861328, + "learning_rate": 1.8234481560482136e-05, + "loss": 2.1138, + "step": 1055 + }, + { + "epoch": 0.6497961695254211, + "grad_norm": 6.053447723388672, + "learning_rate": 1.8230709622330596e-05, + "loss": 2.1682, + "step": 1056 + }, + { + "epoch": 0.6504115068071686, + "grad_norm": 6.014619827270508, + "learning_rate": 1.8226934050202904e-05, + "loss": 2.2261, + "step": 1057 + }, + { + "epoch": 0.6510268440889162, + "grad_norm": 7.143733024597168, + "learning_rate": 1.822315484576602e-05, + "loss": 2.0033, + "step": 1058 + }, + { + "epoch": 0.6516421813706638, + "grad_norm": 7.496119499206543, + "learning_rate": 1.8219372010688516e-05, + "loss": 2.1048, + "step": 1059 + }, + { + "epoch": 0.6522575186524113, + "grad_norm": 5.83280086517334, + "learning_rate": 1.821558554664057e-05, + "loss": 2.2004, + "step": 1060 + }, + { + "epoch": 0.6528728559341589, + "grad_norm": 8.031530380249023, + "learning_rate": 1.8211795455293962e-05, + "loss": 1.9669, + "step": 1061 + }, + { + "epoch": 0.6534881932159065, + "grad_norm": 5.441525459289551, + "learning_rate": 1.820800173832207e-05, + "loss": 2.132, + "step": 1062 + }, + { + "epoch": 0.654103530497654, + "grad_norm": 6.648221015930176, + "learning_rate": 1.8204204397399875e-05, + "loss": 2.0074, + "step": 1063 + }, + { + "epoch": 0.6547188677794016, + "grad_norm": 8.271278381347656, + "learning_rate": 1.8200403434203957e-05, + "loss": 2.1021, + "step": 1064 + }, + { + "epoch": 0.6553342050611491, + "grad_norm": 5.864350318908691, + "learning_rate": 1.8196598850412496e-05, + "loss": 2.1952, + "step": 1065 + }, + { + "epoch": 0.6559495423428967, + "grad_norm": 6.868346691131592, + "learning_rate": 1.819279064770527e-05, + "loss": 2.1765, + "step": 1066 + }, + { + "epoch": 0.6565648796246443, + "grad_norm": 7.473548412322998, + "learning_rate": 1.8188978827763654e-05, + "loss": 2.2136, + "step": 1067 + }, + { + "epoch": 0.6571802169063918, + "grad_norm": 6.247257709503174, + "learning_rate": 1.8185163392270623e-05, + "loss": 2.1329, + "step": 1068 + }, + { + "epoch": 0.6577955541881394, + "grad_norm": 5.771441459655762, + "learning_rate": 1.8181344342910743e-05, + "loss": 2.0507, + "step": 1069 + }, + { + "epoch": 0.658410891469887, + "grad_norm": 6.004544734954834, + "learning_rate": 1.8177521681370183e-05, + "loss": 2.1351, + "step": 1070 + }, + { + "epoch": 0.6590262287516345, + "grad_norm": 8.96556282043457, + "learning_rate": 1.8173695409336703e-05, + "loss": 2.108, + "step": 1071 + }, + { + "epoch": 0.6596415660333821, + "grad_norm": 5.866920471191406, + "learning_rate": 1.816986552849965e-05, + "loss": 2.1968, + "step": 1072 + }, + { + "epoch": 0.6602569033151297, + "grad_norm": 6.2176032066345215, + "learning_rate": 1.816603204054998e-05, + "loss": 2.0223, + "step": 1073 + }, + { + "epoch": 0.6608722405968772, + "grad_norm": 6.5354461669921875, + "learning_rate": 1.8162194947180228e-05, + "loss": 2.1745, + "step": 1074 + }, + { + "epoch": 0.6614875778786247, + "grad_norm": 5.86188268661499, + "learning_rate": 1.815835425008453e-05, + "loss": 2.0644, + "step": 1075 + }, + { + "epoch": 0.6621029151603722, + "grad_norm": 6.620099067687988, + "learning_rate": 1.81545099509586e-05, + "loss": 2.1436, + "step": 1076 + }, + { + "epoch": 0.6627182524421198, + "grad_norm": 5.31129264831543, + "learning_rate": 1.815066205149976e-05, + "loss": 1.9966, + "step": 1077 + }, + { + "epoch": 0.6633335897238674, + "grad_norm": 7.12163782119751, + "learning_rate": 1.814681055340691e-05, + "loss": 1.9181, + "step": 1078 + }, + { + "epoch": 0.6639489270056149, + "grad_norm": 5.014780521392822, + "learning_rate": 1.814295545838054e-05, + "loss": 2.1309, + "step": 1079 + }, + { + "epoch": 0.6645642642873625, + "grad_norm": 6.095749855041504, + "learning_rate": 1.8139096768122727e-05, + "loss": 2.042, + "step": 1080 + }, + { + "epoch": 0.66517960156911, + "grad_norm": 6.7334885597229, + "learning_rate": 1.8135234484337148e-05, + "loss": 2.1681, + "step": 1081 + }, + { + "epoch": 0.6657949388508576, + "grad_norm": 7.005643844604492, + "learning_rate": 1.8131368608729047e-05, + "loss": 2.0168, + "step": 1082 + }, + { + "epoch": 0.6664102761326052, + "grad_norm": 7.236417293548584, + "learning_rate": 1.8127499143005266e-05, + "loss": 2.0787, + "step": 1083 + }, + { + "epoch": 0.6670256134143527, + "grad_norm": 7.299646377563477, + "learning_rate": 1.8123626088874232e-05, + "loss": 2.0437, + "step": 1084 + }, + { + "epoch": 0.6676409506961003, + "grad_norm": 6.099231719970703, + "learning_rate": 1.8119749448045947e-05, + "loss": 1.9502, + "step": 1085 + }, + { + "epoch": 0.6682562879778479, + "grad_norm": 6.907180309295654, + "learning_rate": 1.8115869222232015e-05, + "loss": 1.8932, + "step": 1086 + }, + { + "epoch": 0.6688716252595954, + "grad_norm": 6.553896903991699, + "learning_rate": 1.81119854131456e-05, + "loss": 2.2703, + "step": 1087 + }, + { + "epoch": 0.669486962541343, + "grad_norm": 6.931576728820801, + "learning_rate": 1.810809802250147e-05, + "loss": 1.9195, + "step": 1088 + }, + { + "epoch": 0.6701022998230906, + "grad_norm": 5.057276248931885, + "learning_rate": 1.8104207052015952e-05, + "loss": 2.1338, + "step": 1089 + }, + { + "epoch": 0.6707176371048381, + "grad_norm": 6.673529624938965, + "learning_rate": 1.810031250340697e-05, + "loss": 2.2255, + "step": 1090 + }, + { + "epoch": 0.6713329743865857, + "grad_norm": 6.843230247497559, + "learning_rate": 1.809641437839403e-05, + "loss": 1.8383, + "step": 1091 + }, + { + "epoch": 0.6719483116683332, + "grad_norm": 6.697035789489746, + "learning_rate": 1.80925126786982e-05, + "loss": 2.0414, + "step": 1092 + }, + { + "epoch": 0.6725636489500808, + "grad_norm": 5.823075294494629, + "learning_rate": 1.808860740604214e-05, + "loss": 2.0405, + "step": 1093 + }, + { + "epoch": 0.6731789862318284, + "grad_norm": 6.632768154144287, + "learning_rate": 1.8084698562150086e-05, + "loss": 2.1897, + "step": 1094 + }, + { + "epoch": 0.6737943235135759, + "grad_norm": 5.7694783210754395, + "learning_rate": 1.8080786148747842e-05, + "loss": 2.1532, + "step": 1095 + }, + { + "epoch": 0.6744096607953234, + "grad_norm": 5.331549167633057, + "learning_rate": 1.8076870167562803e-05, + "loss": 2.0243, + "step": 1096 + }, + { + "epoch": 0.675024998077071, + "grad_norm": 5.776970863342285, + "learning_rate": 1.8072950620323926e-05, + "loss": 2.08, + "step": 1097 + }, + { + "epoch": 0.6756403353588185, + "grad_norm": 6.959576606750488, + "learning_rate": 1.8069027508761747e-05, + "loss": 1.9262, + "step": 1098 + }, + { + "epoch": 0.6762556726405661, + "grad_norm": 5.956053733825684, + "learning_rate": 1.8065100834608378e-05, + "loss": 2.0664, + "step": 1099 + }, + { + "epoch": 0.6768710099223136, + "grad_norm": 6.233561992645264, + "learning_rate": 1.80611705995975e-05, + "loss": 2.1923, + "step": 1100 + }, + { + "epoch": 0.6774863472040612, + "grad_norm": 6.608743190765381, + "learning_rate": 1.805723680546437e-05, + "loss": 2.0487, + "step": 1101 + }, + { + "epoch": 0.6781016844858088, + "grad_norm": 5.557980060577393, + "learning_rate": 1.8053299453945813e-05, + "loss": 1.9928, + "step": 1102 + }, + { + "epoch": 0.6787170217675563, + "grad_norm": 6.010790824890137, + "learning_rate": 1.8049358546780227e-05, + "loss": 2.1284, + "step": 1103 + }, + { + "epoch": 0.6793323590493039, + "grad_norm": 6.688076496124268, + "learning_rate": 1.8045414085707578e-05, + "loss": 1.9577, + "step": 1104 + }, + { + "epoch": 0.6799476963310515, + "grad_norm": 7.096404552459717, + "learning_rate": 1.80414660724694e-05, + "loss": 1.9309, + "step": 1105 + }, + { + "epoch": 0.680563033612799, + "grad_norm": 8.135313034057617, + "learning_rate": 1.80375145088088e-05, + "loss": 2.0339, + "step": 1106 + }, + { + "epoch": 0.6811783708945466, + "grad_norm": 5.318109035491943, + "learning_rate": 1.8033559396470455e-05, + "loss": 2.122, + "step": 1107 + }, + { + "epoch": 0.6817937081762941, + "grad_norm": 6.941799640655518, + "learning_rate": 1.802960073720059e-05, + "loss": 2.1221, + "step": 1108 + }, + { + "epoch": 0.6824090454580417, + "grad_norm": 5.98546028137207, + "learning_rate": 1.8025638532747027e-05, + "loss": 2.0361, + "step": 1109 + }, + { + "epoch": 0.6830243827397893, + "grad_norm": 7.961409568786621, + "learning_rate": 1.802167278485912e-05, + "loss": 2.218, + "step": 1110 + }, + { + "epoch": 0.6836397200215368, + "grad_norm": 6.568901538848877, + "learning_rate": 1.8017703495287815e-05, + "loss": 1.9472, + "step": 1111 + }, + { + "epoch": 0.6842550573032844, + "grad_norm": 6.081099033355713, + "learning_rate": 1.8013730665785605e-05, + "loss": 1.9564, + "step": 1112 + }, + { + "epoch": 0.684870394585032, + "grad_norm": 6.065432548522949, + "learning_rate": 1.8009754298106553e-05, + "loss": 2.1308, + "step": 1113 + }, + { + "epoch": 0.6854857318667795, + "grad_norm": 6.697269439697266, + "learning_rate": 1.800577439400628e-05, + "loss": 1.9696, + "step": 1114 + }, + { + "epoch": 0.6861010691485271, + "grad_norm": 8.802401542663574, + "learning_rate": 1.8001790955241972e-05, + "loss": 2.1097, + "step": 1115 + }, + { + "epoch": 0.6867164064302745, + "grad_norm": 5.316067218780518, + "learning_rate": 1.7997803983572372e-05, + "loss": 2.1269, + "step": 1116 + }, + { + "epoch": 0.6873317437120221, + "grad_norm": 6.829866409301758, + "learning_rate": 1.799381348075779e-05, + "loss": 2.1781, + "step": 1117 + }, + { + "epoch": 0.6879470809937697, + "grad_norm": 6.13006591796875, + "learning_rate": 1.7989819448560087e-05, + "loss": 1.9095, + "step": 1118 + }, + { + "epoch": 0.6885624182755172, + "grad_norm": 5.999732494354248, + "learning_rate": 1.7985821888742687e-05, + "loss": 2.2938, + "step": 1119 + }, + { + "epoch": 0.6891777555572648, + "grad_norm": 6.784516334533691, + "learning_rate": 1.7981820803070564e-05, + "loss": 2.0717, + "step": 1120 + }, + { + "epoch": 0.6897930928390124, + "grad_norm": 5.283970832824707, + "learning_rate": 1.7977816193310265e-05, + "loss": 2.0661, + "step": 1121 + }, + { + "epoch": 0.6904084301207599, + "grad_norm": 5.994729518890381, + "learning_rate": 1.7973808061229873e-05, + "loss": 2.0714, + "step": 1122 + }, + { + "epoch": 0.6910237674025075, + "grad_norm": 7.373093128204346, + "learning_rate": 1.7969796408599042e-05, + "loss": 2.2065, + "step": 1123 + }, + { + "epoch": 0.691639104684255, + "grad_norm": 5.947018146514893, + "learning_rate": 1.796578123718897e-05, + "loss": 2.1804, + "step": 1124 + }, + { + "epoch": 0.6922544419660026, + "grad_norm": 6.016225814819336, + "learning_rate": 1.7961762548772413e-05, + "loss": 1.9968, + "step": 1125 + }, + { + "epoch": 0.6928697792477502, + "grad_norm": 7.788586616516113, + "learning_rate": 1.7957740345123685e-05, + "loss": 2.1202, + "step": 1126 + }, + { + "epoch": 0.6934851165294977, + "grad_norm": 6.221169948577881, + "learning_rate": 1.795371462801864e-05, + "loss": 1.9808, + "step": 1127 + }, + { + "epoch": 0.6941004538112453, + "grad_norm": 5.5342230796813965, + "learning_rate": 1.794968539923469e-05, + "loss": 2.2, + "step": 1128 + }, + { + "epoch": 0.6947157910929929, + "grad_norm": 8.435332298278809, + "learning_rate": 1.7945652660550802e-05, + "loss": 2.075, + "step": 1129 + }, + { + "epoch": 0.6953311283747404, + "grad_norm": 6.46060037612915, + "learning_rate": 1.7941616413747485e-05, + "loss": 1.9177, + "step": 1130 + }, + { + "epoch": 0.695946465656488, + "grad_norm": 7.768758773803711, + "learning_rate": 1.79375766606068e-05, + "loss": 2.3106, + "step": 1131 + }, + { + "epoch": 0.6965618029382356, + "grad_norm": 7.33948278427124, + "learning_rate": 1.7933533402912354e-05, + "loss": 1.9187, + "step": 1132 + }, + { + "epoch": 0.6971771402199831, + "grad_norm": 7.2011518478393555, + "learning_rate": 1.7929486642449307e-05, + "loss": 2.0725, + "step": 1133 + }, + { + "epoch": 0.6977924775017307, + "grad_norm": 6.434672832489014, + "learning_rate": 1.7925436381004356e-05, + "loss": 2.0493, + "step": 1134 + }, + { + "epoch": 0.6984078147834782, + "grad_norm": 5.27788782119751, + "learning_rate": 1.7921382620365757e-05, + "loss": 1.9931, + "step": 1135 + }, + { + "epoch": 0.6990231520652258, + "grad_norm": 6.183602809906006, + "learning_rate": 1.79173253623233e-05, + "loss": 2.1634, + "step": 1136 + }, + { + "epoch": 0.6996384893469733, + "grad_norm": 7.205876350402832, + "learning_rate": 1.7913264608668316e-05, + "loss": 2.0982, + "step": 1137 + }, + { + "epoch": 0.7002538266287208, + "grad_norm": 6.202657699584961, + "learning_rate": 1.7909200361193698e-05, + "loss": 1.9926, + "step": 1138 + }, + { + "epoch": 0.7008691639104684, + "grad_norm": 5.503357410430908, + "learning_rate": 1.7905132621693862e-05, + "loss": 2.0402, + "step": 1139 + }, + { + "epoch": 0.701484501192216, + "grad_norm": 5.615154266357422, + "learning_rate": 1.790106139196477e-05, + "loss": 2.0089, + "step": 1140 + }, + { + "epoch": 0.7020998384739635, + "grad_norm": 6.939348220825195, + "learning_rate": 1.789698667380394e-05, + "loss": 2.0149, + "step": 1141 + }, + { + "epoch": 0.7027151757557111, + "grad_norm": 7.1892008781433105, + "learning_rate": 1.7892908469010404e-05, + "loss": 2.1304, + "step": 1142 + }, + { + "epoch": 0.7033305130374586, + "grad_norm": 6.445766925811768, + "learning_rate": 1.788882677938476e-05, + "loss": 1.9484, + "step": 1143 + }, + { + "epoch": 0.7039458503192062, + "grad_norm": 6.534034729003906, + "learning_rate": 1.7884741606729128e-05, + "loss": 2.1671, + "step": 1144 + }, + { + "epoch": 0.7045611876009538, + "grad_norm": 5.8909759521484375, + "learning_rate": 1.7880652952847164e-05, + "loss": 2.2049, + "step": 1145 + }, + { + "epoch": 0.7051765248827013, + "grad_norm": 5.665266036987305, + "learning_rate": 1.7876560819544077e-05, + "loss": 1.8311, + "step": 1146 + }, + { + "epoch": 0.7057918621644489, + "grad_norm": 6.436813831329346, + "learning_rate": 1.78724652086266e-05, + "loss": 2.0238, + "step": 1147 + }, + { + "epoch": 0.7064071994461965, + "grad_norm": 6.289281845092773, + "learning_rate": 1.7868366121903e-05, + "loss": 2.0928, + "step": 1148 + }, + { + "epoch": 0.707022536727944, + "grad_norm": 6.4610772132873535, + "learning_rate": 1.7864263561183085e-05, + "loss": 2.0469, + "step": 1149 + }, + { + "epoch": 0.7076378740096916, + "grad_norm": 6.945637226104736, + "learning_rate": 1.786015752827819e-05, + "loss": 1.9805, + "step": 1150 + }, + { + "epoch": 0.7082532112914391, + "grad_norm": 6.111583232879639, + "learning_rate": 1.7856048025001195e-05, + "loss": 1.8962, + "step": 1151 + }, + { + "epoch": 0.7088685485731867, + "grad_norm": 5.718967914581299, + "learning_rate": 1.7851935053166503e-05, + "loss": 2.1979, + "step": 1152 + }, + { + "epoch": 0.7094838858549343, + "grad_norm": 7.934896945953369, + "learning_rate": 1.7847818614590045e-05, + "loss": 1.9061, + "step": 1153 + }, + { + "epoch": 0.7100992231366818, + "grad_norm": 5.864793300628662, + "learning_rate": 1.784369871108929e-05, + "loss": 2.0976, + "step": 1154 + }, + { + "epoch": 0.7107145604184294, + "grad_norm": 7.934084415435791, + "learning_rate": 1.7839575344483237e-05, + "loss": 2.1161, + "step": 1155 + }, + { + "epoch": 0.711329897700177, + "grad_norm": 7.1962127685546875, + "learning_rate": 1.7835448516592408e-05, + "loss": 1.8407, + "step": 1156 + }, + { + "epoch": 0.7119452349819244, + "grad_norm": 5.78460168838501, + "learning_rate": 1.7831318229238858e-05, + "loss": 2.0348, + "step": 1157 + }, + { + "epoch": 0.712560572263672, + "grad_norm": 7.951099872589111, + "learning_rate": 1.782718448424617e-05, + "loss": 2.1964, + "step": 1158 + }, + { + "epoch": 0.7131759095454195, + "grad_norm": 6.751318454742432, + "learning_rate": 1.7823047283439444e-05, + "loss": 2.0575, + "step": 1159 + }, + { + "epoch": 0.7137912468271671, + "grad_norm": 5.002671718597412, + "learning_rate": 1.7818906628645322e-05, + "loss": 2.0922, + "step": 1160 + }, + { + "epoch": 0.7144065841089147, + "grad_norm": 7.2857666015625, + "learning_rate": 1.781476252169196e-05, + "loss": 2.0201, + "step": 1161 + }, + { + "epoch": 0.7150219213906622, + "grad_norm": 6.051436424255371, + "learning_rate": 1.7810614964409037e-05, + "loss": 2.0546, + "step": 1162 + }, + { + "epoch": 0.7156372586724098, + "grad_norm": 6.743741512298584, + "learning_rate": 1.7806463958627765e-05, + "loss": 2.1813, + "step": 1163 + }, + { + "epoch": 0.7162525959541574, + "grad_norm": 5.869334697723389, + "learning_rate": 1.7802309506180867e-05, + "loss": 1.9258, + "step": 1164 + }, + { + "epoch": 0.7168679332359049, + "grad_norm": 6.147205352783203, + "learning_rate": 1.7798151608902595e-05, + "loss": 1.9938, + "step": 1165 + }, + { + "epoch": 0.7174832705176525, + "grad_norm": 5.696031093597412, + "learning_rate": 1.7793990268628722e-05, + "loss": 2.0603, + "step": 1166 + }, + { + "epoch": 0.7180986077994, + "grad_norm": 6.9896063804626465, + "learning_rate": 1.7789825487196538e-05, + "loss": 1.9743, + "step": 1167 + }, + { + "epoch": 0.7187139450811476, + "grad_norm": 5.718892574310303, + "learning_rate": 1.7785657266444853e-05, + "loss": 1.8877, + "step": 1168 + }, + { + "epoch": 0.7193292823628952, + "grad_norm": 6.89406156539917, + "learning_rate": 1.7781485608213995e-05, + "loss": 2.0389, + "step": 1169 + }, + { + "epoch": 0.7199446196446427, + "grad_norm": 6.034156799316406, + "learning_rate": 1.7777310514345816e-05, + "loss": 2.2197, + "step": 1170 + }, + { + "epoch": 0.7205599569263903, + "grad_norm": 7.2844109535217285, + "learning_rate": 1.7773131986683675e-05, + "loss": 2.1384, + "step": 1171 + }, + { + "epoch": 0.7211752942081379, + "grad_norm": 5.698202133178711, + "learning_rate": 1.7768950027072452e-05, + "loss": 2.0969, + "step": 1172 + }, + { + "epoch": 0.7217906314898854, + "grad_norm": 6.305366516113281, + "learning_rate": 1.7764764637358545e-05, + "loss": 1.9621, + "step": 1173 + }, + { + "epoch": 0.722405968771633, + "grad_norm": 6.2185587882995605, + "learning_rate": 1.7760575819389857e-05, + "loss": 2.1209, + "step": 1174 + }, + { + "epoch": 0.7230213060533806, + "grad_norm": 7.261756896972656, + "learning_rate": 1.775638357501582e-05, + "loss": 2.0648, + "step": 1175 + }, + { + "epoch": 0.7236366433351281, + "grad_norm": 9.633955001831055, + "learning_rate": 1.7752187906087366e-05, + "loss": 2.1121, + "step": 1176 + }, + { + "epoch": 0.7242519806168757, + "grad_norm": 6.471198558807373, + "learning_rate": 1.774798881445694e-05, + "loss": 2.1995, + "step": 1177 + }, + { + "epoch": 0.7248673178986231, + "grad_norm": 7.655094623565674, + "learning_rate": 1.7743786301978507e-05, + "loss": 1.9468, + "step": 1178 + }, + { + "epoch": 0.7254826551803707, + "grad_norm": 5.8330278396606445, + "learning_rate": 1.7739580370507533e-05, + "loss": 2.0847, + "step": 1179 + }, + { + "epoch": 0.7260979924621183, + "grad_norm": 6.619964122772217, + "learning_rate": 1.7735371021900993e-05, + "loss": 2.0627, + "step": 1180 + }, + { + "epoch": 0.7267133297438658, + "grad_norm": 6.0102057456970215, + "learning_rate": 1.7731158258017382e-05, + "loss": 2.0448, + "step": 1181 + }, + { + "epoch": 0.7273286670256134, + "grad_norm": 6.561736106872559, + "learning_rate": 1.7726942080716693e-05, + "loss": 1.9794, + "step": 1182 + }, + { + "epoch": 0.727944004307361, + "grad_norm": 6.526302337646484, + "learning_rate": 1.7722722491860427e-05, + "loss": 1.9651, + "step": 1183 + }, + { + "epoch": 0.7285593415891085, + "grad_norm": 6.529086112976074, + "learning_rate": 1.7718499493311593e-05, + "loss": 2.063, + "step": 1184 + }, + { + "epoch": 0.7291746788708561, + "grad_norm": 6.7415995597839355, + "learning_rate": 1.7714273086934705e-05, + "loss": 2.1739, + "step": 1185 + }, + { + "epoch": 0.7297900161526036, + "grad_norm": 6.032292366027832, + "learning_rate": 1.7710043274595785e-05, + "loss": 1.9117, + "step": 1186 + }, + { + "epoch": 0.7304053534343512, + "grad_norm": 5.786758899688721, + "learning_rate": 1.7705810058162354e-05, + "loss": 1.9341, + "step": 1187 + }, + { + "epoch": 0.7310206907160988, + "grad_norm": 6.255584716796875, + "learning_rate": 1.7701573439503437e-05, + "loss": 2.1688, + "step": 1188 + }, + { + "epoch": 0.7316360279978463, + "grad_norm": 6.384731769561768, + "learning_rate": 1.7697333420489565e-05, + "loss": 1.7604, + "step": 1189 + }, + { + "epoch": 0.7322513652795939, + "grad_norm": 5.84483528137207, + "learning_rate": 1.769309000299276e-05, + "loss": 2.0788, + "step": 1190 + }, + { + "epoch": 0.7328667025613415, + "grad_norm": 5.964205265045166, + "learning_rate": 1.768884318888656e-05, + "loss": 1.9764, + "step": 1191 + }, + { + "epoch": 0.733482039843089, + "grad_norm": 6.077455997467041, + "learning_rate": 1.768459298004599e-05, + "loss": 1.8397, + "step": 1192 + }, + { + "epoch": 0.7340973771248366, + "grad_norm": 6.460726737976074, + "learning_rate": 1.7680339378347575e-05, + "loss": 1.9794, + "step": 1193 + }, + { + "epoch": 0.7347127144065841, + "grad_norm": 6.545133590698242, + "learning_rate": 1.7676082385669343e-05, + "loss": 2.1694, + "step": 1194 + }, + { + "epoch": 0.7353280516883317, + "grad_norm": 5.900578498840332, + "learning_rate": 1.7671822003890825e-05, + "loss": 2.1396, + "step": 1195 + }, + { + "epoch": 0.7359433889700793, + "grad_norm": 8.095240592956543, + "learning_rate": 1.766755823489303e-05, + "loss": 2.0369, + "step": 1196 + }, + { + "epoch": 0.7365587262518268, + "grad_norm": 7.5416483879089355, + "learning_rate": 1.7663291080558477e-05, + "loss": 1.8997, + "step": 1197 + }, + { + "epoch": 0.7371740635335743, + "grad_norm": 7.574727535247803, + "learning_rate": 1.765902054277118e-05, + "loss": 2.0568, + "step": 1198 + }, + { + "epoch": 0.7377894008153218, + "grad_norm": 6.893007755279541, + "learning_rate": 1.7654746623416637e-05, + "loss": 2.1122, + "step": 1199 + }, + { + "epoch": 0.7384047380970694, + "grad_norm": 6.172626972198486, + "learning_rate": 1.7650469324381843e-05, + "loss": 1.9297, + "step": 1200 + }, + { + "epoch": 0.739020075378817, + "grad_norm": 6.438818454742432, + "learning_rate": 1.7646188647555293e-05, + "loss": 2.1714, + "step": 1201 + }, + { + "epoch": 0.7396354126605645, + "grad_norm": 7.6881842613220215, + "learning_rate": 1.7641904594826965e-05, + "loss": 2.1537, + "step": 1202 + }, + { + "epoch": 0.7402507499423121, + "grad_norm": 7.184983730316162, + "learning_rate": 1.7637617168088327e-05, + "loss": 2.0623, + "step": 1203 + }, + { + "epoch": 0.7408660872240597, + "grad_norm": 5.9894514083862305, + "learning_rate": 1.7633326369232338e-05, + "loss": 2.052, + "step": 1204 + }, + { + "epoch": 0.7414814245058072, + "grad_norm": 6.295405864715576, + "learning_rate": 1.7629032200153456e-05, + "loss": 1.9675, + "step": 1205 + }, + { + "epoch": 0.7420967617875548, + "grad_norm": 6.731653213500977, + "learning_rate": 1.762473466274761e-05, + "loss": 2.0571, + "step": 1206 + }, + { + "epoch": 0.7427120990693024, + "grad_norm": 6.3880414962768555, + "learning_rate": 1.762043375891223e-05, + "loss": 2.041, + "step": 1207 + }, + { + "epoch": 0.7433274363510499, + "grad_norm": 7.497288227081299, + "learning_rate": 1.7616129490546217e-05, + "loss": 1.8475, + "step": 1208 + }, + { + "epoch": 0.7439427736327975, + "grad_norm": 6.625600337982178, + "learning_rate": 1.7611821859549977e-05, + "loss": 1.9596, + "step": 1209 + }, + { + "epoch": 0.744558110914545, + "grad_norm": 5.9422807693481445, + "learning_rate": 1.7607510867825385e-05, + "loss": 2.0514, + "step": 1210 + }, + { + "epoch": 0.7451734481962926, + "grad_norm": 6.304619312286377, + "learning_rate": 1.7603196517275808e-05, + "loss": 1.9513, + "step": 1211 + }, + { + "epoch": 0.7457887854780402, + "grad_norm": 6.06371545791626, + "learning_rate": 1.7598878809806095e-05, + "loss": 1.9858, + "step": 1212 + }, + { + "epoch": 0.7464041227597877, + "grad_norm": 6.035061359405518, + "learning_rate": 1.759455774732257e-05, + "loss": 2.1848, + "step": 1213 + }, + { + "epoch": 0.7470194600415353, + "grad_norm": 6.290915489196777, + "learning_rate": 1.7590233331733054e-05, + "loss": 1.9107, + "step": 1214 + }, + { + "epoch": 0.7476347973232829, + "grad_norm": 6.5088982582092285, + "learning_rate": 1.758590556494683e-05, + "loss": 2.17, + "step": 1215 + }, + { + "epoch": 0.7482501346050304, + "grad_norm": 5.755478858947754, + "learning_rate": 1.7581574448874668e-05, + "loss": 2.1832, + "step": 1216 + }, + { + "epoch": 0.748865471886778, + "grad_norm": 6.965065956115723, + "learning_rate": 1.7577239985428825e-05, + "loss": 2.0205, + "step": 1217 + }, + { + "epoch": 0.7494808091685256, + "grad_norm": 7.8030266761779785, + "learning_rate": 1.7572902176523023e-05, + "loss": 2.0915, + "step": 1218 + }, + { + "epoch": 0.750096146450273, + "grad_norm": 7.634258270263672, + "learning_rate": 1.756856102407247e-05, + "loss": 2.0788, + "step": 1219 + }, + { + "epoch": 0.7507114837320206, + "grad_norm": 6.077826976776123, + "learning_rate": 1.7564216529993845e-05, + "loss": 1.8541, + "step": 1220 + }, + { + "epoch": 0.7513268210137681, + "grad_norm": 5.893485069274902, + "learning_rate": 1.7559868696205304e-05, + "loss": 2.1068, + "step": 1221 + }, + { + "epoch": 0.7519421582955157, + "grad_norm": 6.573964595794678, + "learning_rate": 1.7555517524626476e-05, + "loss": 1.9813, + "step": 1222 + }, + { + "epoch": 0.7525574955772633, + "grad_norm": 7.34799337387085, + "learning_rate": 1.7551163017178473e-05, + "loss": 2.1767, + "step": 1223 + }, + { + "epoch": 0.7531728328590108, + "grad_norm": 6.063894748687744, + "learning_rate": 1.7546805175783866e-05, + "loss": 1.998, + "step": 1224 + }, + { + "epoch": 0.7537881701407584, + "grad_norm": 6.100310325622559, + "learning_rate": 1.7542444002366705e-05, + "loss": 2.0144, + "step": 1225 + }, + { + "epoch": 0.754403507422506, + "grad_norm": 6.353453636169434, + "learning_rate": 1.7538079498852512e-05, + "loss": 2.3153, + "step": 1226 + }, + { + "epoch": 0.7550188447042535, + "grad_norm": 7.887106895446777, + "learning_rate": 1.753371166716828e-05, + "loss": 2.029, + "step": 1227 + }, + { + "epoch": 0.7556341819860011, + "grad_norm": 7.021947860717773, + "learning_rate": 1.7529340509242464e-05, + "loss": 2.0443, + "step": 1228 + }, + { + "epoch": 0.7562495192677486, + "grad_norm": 6.390924453735352, + "learning_rate": 1.7524966027004995e-05, + "loss": 1.8928, + "step": 1229 + }, + { + "epoch": 0.7568648565494962, + "grad_norm": 5.9113640785217285, + "learning_rate": 1.752058822238727e-05, + "loss": 1.9791, + "step": 1230 + }, + { + "epoch": 0.7574801938312438, + "grad_norm": 7.603540897369385, + "learning_rate": 1.7516207097322154e-05, + "loss": 2.2376, + "step": 1231 + }, + { + "epoch": 0.7580955311129913, + "grad_norm": 5.746059417724609, + "learning_rate": 1.7511822653743973e-05, + "loss": 2.2865, + "step": 1232 + }, + { + "epoch": 0.7587108683947389, + "grad_norm": 4.956630706787109, + "learning_rate": 1.7507434893588525e-05, + "loss": 2.0414, + "step": 1233 + }, + { + "epoch": 0.7593262056764865, + "grad_norm": 6.698592185974121, + "learning_rate": 1.7503043818793068e-05, + "loss": 2.0611, + "step": 1234 + }, + { + "epoch": 0.759941542958234, + "grad_norm": 6.094048976898193, + "learning_rate": 1.749864943129632e-05, + "loss": 2.1138, + "step": 1235 + }, + { + "epoch": 0.7605568802399816, + "grad_norm": 6.395570755004883, + "learning_rate": 1.7494251733038473e-05, + "loss": 1.9986, + "step": 1236 + }, + { + "epoch": 0.7611722175217291, + "grad_norm": 5.488734722137451, + "learning_rate": 1.748985072596117e-05, + "loss": 1.9645, + "step": 1237 + }, + { + "epoch": 0.7617875548034767, + "grad_norm": 6.047366619110107, + "learning_rate": 1.748544641200752e-05, + "loss": 2.0748, + "step": 1238 + }, + { + "epoch": 0.7624028920852242, + "grad_norm": 6.112788677215576, + "learning_rate": 1.748103879312209e-05, + "loss": 2.0764, + "step": 1239 + }, + { + "epoch": 0.7630182293669717, + "grad_norm": 7.144918441772461, + "learning_rate": 1.7476627871250905e-05, + "loss": 2.1838, + "step": 1240 + }, + { + "epoch": 0.7636335666487193, + "grad_norm": 6.1358962059021, + "learning_rate": 1.7472213648341455e-05, + "loss": 1.8968, + "step": 1241 + }, + { + "epoch": 0.7642489039304668, + "grad_norm": 6.0695271492004395, + "learning_rate": 1.7467796126342677e-05, + "loss": 2.0961, + "step": 1242 + }, + { + "epoch": 0.7648642412122144, + "grad_norm": 7.308156967163086, + "learning_rate": 1.746337530720497e-05, + "loss": 2.1999, + "step": 1243 + }, + { + "epoch": 0.765479578493962, + "grad_norm": 7.572962284088135, + "learning_rate": 1.7458951192880196e-05, + "loss": 2.1221, + "step": 1244 + }, + { + "epoch": 0.7660949157757095, + "grad_norm": 6.337164878845215, + "learning_rate": 1.745452378532166e-05, + "loss": 2.4395, + "step": 1245 + }, + { + "epoch": 0.7667102530574571, + "grad_norm": 7.6998066902160645, + "learning_rate": 1.7450093086484122e-05, + "loss": 2.0719, + "step": 1246 + }, + { + "epoch": 0.7673255903392047, + "grad_norm": 7.733184814453125, + "learning_rate": 1.7445659098323807e-05, + "loss": 2.0603, + "step": 1247 + }, + { + "epoch": 0.7679409276209522, + "grad_norm": 5.937858581542969, + "learning_rate": 1.7441221822798385e-05, + "loss": 2.136, + "step": 1248 + }, + { + "epoch": 0.7685562649026998, + "grad_norm": 5.937375545501709, + "learning_rate": 1.7436781261866965e-05, + "loss": 2.228, + "step": 1249 + }, + { + "epoch": 0.7691716021844474, + "grad_norm": 5.656648635864258, + "learning_rate": 1.743233741749013e-05, + "loss": 2.0245, + "step": 1250 + }, + { + "epoch": 0.7697869394661949, + "grad_norm": 7.053448677062988, + "learning_rate": 1.7427890291629895e-05, + "loss": 2.0574, + "step": 1251 + }, + { + "epoch": 0.7704022767479425, + "grad_norm": 5.478377819061279, + "learning_rate": 1.7423439886249732e-05, + "loss": 1.9725, + "step": 1252 + }, + { + "epoch": 0.77101761402969, + "grad_norm": 5.837745189666748, + "learning_rate": 1.7418986203314557e-05, + "loss": 2.2121, + "step": 1253 + }, + { + "epoch": 0.7716329513114376, + "grad_norm": 5.797937393188477, + "learning_rate": 1.7414529244790733e-05, + "loss": 2.0502, + "step": 1254 + }, + { + "epoch": 0.7722482885931852, + "grad_norm": 6.507638931274414, + "learning_rate": 1.7410069012646076e-05, + "loss": 2.0451, + "step": 1255 + }, + { + "epoch": 0.7728636258749327, + "grad_norm": 7.311460018157959, + "learning_rate": 1.740560550884984e-05, + "loss": 2.0865, + "step": 1256 + }, + { + "epoch": 0.7734789631566803, + "grad_norm": 5.524518013000488, + "learning_rate": 1.740113873537273e-05, + "loss": 2.2395, + "step": 1257 + }, + { + "epoch": 0.7740943004384279, + "grad_norm": 5.355138301849365, + "learning_rate": 1.739666869418688e-05, + "loss": 2.1187, + "step": 1258 + }, + { + "epoch": 0.7747096377201754, + "grad_norm": 6.308330535888672, + "learning_rate": 1.7392195387265888e-05, + "loss": 2.0875, + "step": 1259 + }, + { + "epoch": 0.7753249750019229, + "grad_norm": 7.194815158843994, + "learning_rate": 1.7387718816584775e-05, + "loss": 2.0049, + "step": 1260 + }, + { + "epoch": 0.7759403122836704, + "grad_norm": 5.68311071395874, + "learning_rate": 1.7383238984120016e-05, + "loss": 2.1997, + "step": 1261 + }, + { + "epoch": 0.776555649565418, + "grad_norm": 4.868226528167725, + "learning_rate": 1.7378755891849522e-05, + "loss": 1.8828, + "step": 1262 + }, + { + "epoch": 0.7771709868471656, + "grad_norm": 5.665606498718262, + "learning_rate": 1.737426954175264e-05, + "loss": 2.0594, + "step": 1263 + }, + { + "epoch": 0.7777863241289131, + "grad_norm": 5.814052581787109, + "learning_rate": 1.7369779935810152e-05, + "loss": 1.9728, + "step": 1264 + }, + { + "epoch": 0.7784016614106607, + "grad_norm": 5.7679033279418945, + "learning_rate": 1.73652870760043e-05, + "loss": 2.0852, + "step": 1265 + }, + { + "epoch": 0.7790169986924083, + "grad_norm": 5.6304121017456055, + "learning_rate": 1.7360790964318727e-05, + "loss": 1.9758, + "step": 1266 + }, + { + "epoch": 0.7796323359741558, + "grad_norm": 6.706708908081055, + "learning_rate": 1.7356291602738542e-05, + "loss": 2.1318, + "step": 1267 + }, + { + "epoch": 0.7802476732559034, + "grad_norm": 7.98246955871582, + "learning_rate": 1.7351788993250274e-05, + "loss": 2.1695, + "step": 1268 + }, + { + "epoch": 0.780863010537651, + "grad_norm": 5.529266357421875, + "learning_rate": 1.734728313784189e-05, + "loss": 2.2989, + "step": 1269 + }, + { + "epoch": 0.7814783478193985, + "grad_norm": 5.8758978843688965, + "learning_rate": 1.7342774038502792e-05, + "loss": 2.0275, + "step": 1270 + }, + { + "epoch": 0.7820936851011461, + "grad_norm": 6.657741546630859, + "learning_rate": 1.733826169722381e-05, + "loss": 1.9861, + "step": 1271 + }, + { + "epoch": 0.7827090223828936, + "grad_norm": 5.479569911956787, + "learning_rate": 1.7333746115997204e-05, + "loss": 1.985, + "step": 1272 + }, + { + "epoch": 0.7833243596646412, + "grad_norm": 7.788626194000244, + "learning_rate": 1.732922729681667e-05, + "loss": 1.8925, + "step": 1273 + }, + { + "epoch": 0.7839396969463888, + "grad_norm": 6.7317891120910645, + "learning_rate": 1.7324705241677335e-05, + "loss": 2.1507, + "step": 1274 + }, + { + "epoch": 0.7845550342281363, + "grad_norm": 5.524354457855225, + "learning_rate": 1.732017995257575e-05, + "loss": 2.0405, + "step": 1275 + }, + { + "epoch": 0.7851703715098839, + "grad_norm": 5.7767252922058105, + "learning_rate": 1.7315651431509893e-05, + "loss": 2.3155, + "step": 1276 + }, + { + "epoch": 0.7857857087916315, + "grad_norm": 6.86000394821167, + "learning_rate": 1.7311119680479168e-05, + "loss": 1.9968, + "step": 1277 + }, + { + "epoch": 0.786401046073379, + "grad_norm": 6.600097179412842, + "learning_rate": 1.7306584701484414e-05, + "loss": 1.9278, + "step": 1278 + }, + { + "epoch": 0.7870163833551266, + "grad_norm": 5.283498764038086, + "learning_rate": 1.7302046496527882e-05, + "loss": 2.1151, + "step": 1279 + }, + { + "epoch": 0.787631720636874, + "grad_norm": 6.657777786254883, + "learning_rate": 1.7297505067613262e-05, + "loss": 1.9598, + "step": 1280 + }, + { + "epoch": 0.7882470579186216, + "grad_norm": 5.9304327964782715, + "learning_rate": 1.7292960416745657e-05, + "loss": 2.0601, + "step": 1281 + }, + { + "epoch": 0.7888623952003692, + "grad_norm": 5.925037384033203, + "learning_rate": 1.728841254593159e-05, + "loss": 1.8799, + "step": 1282 + }, + { + "epoch": 0.7894777324821167, + "grad_norm": 6.545285224914551, + "learning_rate": 1.7283861457179022e-05, + "loss": 1.9654, + "step": 1283 + }, + { + "epoch": 0.7900930697638643, + "grad_norm": 6.266922473907471, + "learning_rate": 1.7279307152497316e-05, + "loss": 2.1714, + "step": 1284 + }, + { + "epoch": 0.7907084070456118, + "grad_norm": 5.458065986633301, + "learning_rate": 1.727474963389727e-05, + "loss": 2.0509, + "step": 1285 + }, + { + "epoch": 0.7913237443273594, + "grad_norm": 6.64530611038208, + "learning_rate": 1.727018890339108e-05, + "loss": 1.9551, + "step": 1286 + }, + { + "epoch": 0.791939081609107, + "grad_norm": 7.887524604797363, + "learning_rate": 1.726562496299239e-05, + "loss": 1.9464, + "step": 1287 + }, + { + "epoch": 0.7925544188908545, + "grad_norm": 5.759002208709717, + "learning_rate": 1.726105781471623e-05, + "loss": 2.0417, + "step": 1288 + }, + { + "epoch": 0.7931697561726021, + "grad_norm": 9.62018871307373, + "learning_rate": 1.7256487460579075e-05, + "loss": 2.1626, + "step": 1289 + }, + { + "epoch": 0.7937850934543497, + "grad_norm": 7.657482147216797, + "learning_rate": 1.7251913902598795e-05, + "loss": 2.1247, + "step": 1290 + }, + { + "epoch": 0.7944004307360972, + "grad_norm": 5.98975133895874, + "learning_rate": 1.724733714279468e-05, + "loss": 2.0851, + "step": 1291 + }, + { + "epoch": 0.7950157680178448, + "grad_norm": 7.858658790588379, + "learning_rate": 1.7242757183187435e-05, + "loss": 2.1151, + "step": 1292 + }, + { + "epoch": 0.7956311052995924, + "grad_norm": 6.593761920928955, + "learning_rate": 1.7238174025799183e-05, + "loss": 2.2638, + "step": 1293 + }, + { + "epoch": 0.7962464425813399, + "grad_norm": 5.882290363311768, + "learning_rate": 1.7233587672653446e-05, + "loss": 1.9972, + "step": 1294 + }, + { + "epoch": 0.7968617798630875, + "grad_norm": 7.016702175140381, + "learning_rate": 1.7228998125775175e-05, + "loss": 2.2345, + "step": 1295 + }, + { + "epoch": 0.797477117144835, + "grad_norm": 4.885353088378906, + "learning_rate": 1.722440538719071e-05, + "loss": 1.9644, + "step": 1296 + }, + { + "epoch": 0.7980924544265826, + "grad_norm": 6.8963847160339355, + "learning_rate": 1.7219809458927813e-05, + "loss": 2.1064, + "step": 1297 + }, + { + "epoch": 0.7987077917083302, + "grad_norm": 7.584693908691406, + "learning_rate": 1.721521034301566e-05, + "loss": 1.8813, + "step": 1298 + }, + { + "epoch": 0.7993231289900777, + "grad_norm": 6.0123419761657715, + "learning_rate": 1.721060804148482e-05, + "loss": 2.0993, + "step": 1299 + }, + { + "epoch": 0.7999384662718253, + "grad_norm": 7.265597343444824, + "learning_rate": 1.7206002556367276e-05, + "loss": 1.8345, + "step": 1300 + }, + { + "epoch": 0.8005538035535728, + "grad_norm": 5.532131195068359, + "learning_rate": 1.7201393889696413e-05, + "loss": 1.9529, + "step": 1301 + }, + { + "epoch": 0.8011691408353203, + "grad_norm": 7.595781326293945, + "learning_rate": 1.719678204350703e-05, + "loss": 2.1531, + "step": 1302 + }, + { + "epoch": 0.8017844781170679, + "grad_norm": 7.568321704864502, + "learning_rate": 1.7192167019835314e-05, + "loss": 2.0143, + "step": 1303 + }, + { + "epoch": 0.8023998153988154, + "grad_norm": 7.479677677154541, + "learning_rate": 1.7187548820718875e-05, + "loss": 2.2038, + "step": 1304 + }, + { + "epoch": 0.803015152680563, + "grad_norm": 5.840971946716309, + "learning_rate": 1.7182927448196705e-05, + "loss": 2.2078, + "step": 1305 + }, + { + "epoch": 0.8036304899623106, + "grad_norm": 6.882693290710449, + "learning_rate": 1.717830290430921e-05, + "loss": 2.0549, + "step": 1306 + }, + { + "epoch": 0.8042458272440581, + "grad_norm": 6.341563701629639, + "learning_rate": 1.717367519109819e-05, + "loss": 1.9555, + "step": 1307 + }, + { + "epoch": 0.8048611645258057, + "grad_norm": 7.100786209106445, + "learning_rate": 1.716904431060685e-05, + "loss": 1.9223, + "step": 1308 + }, + { + "epoch": 0.8054765018075533, + "grad_norm": 6.132269382476807, + "learning_rate": 1.7164410264879788e-05, + "loss": 2.0292, + "step": 1309 + }, + { + "epoch": 0.8060918390893008, + "grad_norm": 5.753194332122803, + "learning_rate": 1.7159773055963003e-05, + "loss": 2.1182, + "step": 1310 + }, + { + "epoch": 0.8067071763710484, + "grad_norm": 6.656703472137451, + "learning_rate": 1.7155132685903888e-05, + "loss": 2.03, + "step": 1311 + }, + { + "epoch": 0.807322513652796, + "grad_norm": 6.853048801422119, + "learning_rate": 1.715048915675123e-05, + "loss": 2.1308, + "step": 1312 + }, + { + "epoch": 0.8079378509345435, + "grad_norm": 6.039052486419678, + "learning_rate": 1.7145842470555218e-05, + "loss": 1.9932, + "step": 1313 + }, + { + "epoch": 0.8085531882162911, + "grad_norm": 6.935861587524414, + "learning_rate": 1.714119262936743e-05, + "loss": 1.9694, + "step": 1314 + }, + { + "epoch": 0.8091685254980386, + "grad_norm": 6.304679870605469, + "learning_rate": 1.713653963524084e-05, + "loss": 2.2438, + "step": 1315 + }, + { + "epoch": 0.8097838627797862, + "grad_norm": 6.268488883972168, + "learning_rate": 1.7131883490229803e-05, + "loss": 1.9088, + "step": 1316 + }, + { + "epoch": 0.8103992000615338, + "grad_norm": 6.242979526519775, + "learning_rate": 1.712722419639008e-05, + "loss": 2.0219, + "step": 1317 + }, + { + "epoch": 0.8110145373432813, + "grad_norm": 4.841079235076904, + "learning_rate": 1.7122561755778816e-05, + "loss": 2.0621, + "step": 1318 + }, + { + "epoch": 0.8116298746250289, + "grad_norm": 7.080075740814209, + "learning_rate": 1.7117896170454542e-05, + "loss": 2.0974, + "step": 1319 + }, + { + "epoch": 0.8122452119067765, + "grad_norm": 6.590303897857666, + "learning_rate": 1.7113227442477182e-05, + "loss": 2.3125, + "step": 1320 + }, + { + "epoch": 0.8128605491885239, + "grad_norm": 7.004912853240967, + "learning_rate": 1.710855557390805e-05, + "loss": 1.9315, + "step": 1321 + }, + { + "epoch": 0.8134758864702715, + "grad_norm": 6.721816539764404, + "learning_rate": 1.710388056680984e-05, + "loss": 2.0318, + "step": 1322 + }, + { + "epoch": 0.814091223752019, + "grad_norm": 7.745920658111572, + "learning_rate": 1.7099202423246632e-05, + "loss": 1.7291, + "step": 1323 + }, + { + "epoch": 0.8147065610337666, + "grad_norm": 6.35861349105835, + "learning_rate": 1.70945211452839e-05, + "loss": 2.0694, + "step": 1324 + }, + { + "epoch": 0.8153218983155142, + "grad_norm": 5.946057319641113, + "learning_rate": 1.7089836734988485e-05, + "loss": 1.8604, + "step": 1325 + }, + { + "epoch": 0.8159372355972617, + "grad_norm": 5.780205249786377, + "learning_rate": 1.7085149194428628e-05, + "loss": 2.0983, + "step": 1326 + }, + { + "epoch": 0.8165525728790093, + "grad_norm": 5.911633014678955, + "learning_rate": 1.7080458525673948e-05, + "loss": 2.0855, + "step": 1327 + }, + { + "epoch": 0.8171679101607568, + "grad_norm": 7.665287971496582, + "learning_rate": 1.7075764730795437e-05, + "loss": 1.9934, + "step": 1328 + }, + { + "epoch": 0.8177832474425044, + "grad_norm": 6.627066612243652, + "learning_rate": 1.7071067811865477e-05, + "loss": 2.0986, + "step": 1329 + }, + { + "epoch": 0.818398584724252, + "grad_norm": 6.743490219116211, + "learning_rate": 1.706636777095782e-05, + "loss": 1.8997, + "step": 1330 + }, + { + "epoch": 0.8190139220059995, + "grad_norm": 8.751930236816406, + "learning_rate": 1.7061664610147605e-05, + "loss": 2.0997, + "step": 1331 + }, + { + "epoch": 0.8196292592877471, + "grad_norm": 6.377986431121826, + "learning_rate": 1.7056958331511343e-05, + "loss": 1.9133, + "step": 1332 + }, + { + "epoch": 0.8202445965694947, + "grad_norm": 6.743743896484375, + "learning_rate": 1.7052248937126926e-05, + "loss": 2.1599, + "step": 1333 + }, + { + "epoch": 0.8208599338512422, + "grad_norm": 6.202991008758545, + "learning_rate": 1.7047536429073617e-05, + "loss": 2.087, + "step": 1334 + }, + { + "epoch": 0.8214752711329898, + "grad_norm": 6.06965446472168, + "learning_rate": 1.7042820809432057e-05, + "loss": 2.2577, + "step": 1335 + }, + { + "epoch": 0.8220906084147374, + "grad_norm": 6.28372859954834, + "learning_rate": 1.7038102080284257e-05, + "loss": 2.0441, + "step": 1336 + }, + { + "epoch": 0.8227059456964849, + "grad_norm": 7.4801740646362305, + "learning_rate": 1.7033380243713602e-05, + "loss": 2.0137, + "step": 1337 + }, + { + "epoch": 0.8233212829782325, + "grad_norm": 7.055887699127197, + "learning_rate": 1.7028655301804858e-05, + "loss": 1.9616, + "step": 1338 + }, + { + "epoch": 0.82393662025998, + "grad_norm": 5.311764717102051, + "learning_rate": 1.702392725664415e-05, + "loss": 2.1068, + "step": 1339 + }, + { + "epoch": 0.8245519575417276, + "grad_norm": 7.112353324890137, + "learning_rate": 1.7019196110318977e-05, + "loss": 1.9531, + "step": 1340 + }, + { + "epoch": 0.8251672948234752, + "grad_norm": 5.3938307762146, + "learning_rate": 1.7014461864918205e-05, + "loss": 1.9895, + "step": 1341 + }, + { + "epoch": 0.8257826321052226, + "grad_norm": 9.523309707641602, + "learning_rate": 1.700972452253208e-05, + "loss": 1.9158, + "step": 1342 + }, + { + "epoch": 0.8263979693869702, + "grad_norm": 5.399523735046387, + "learning_rate": 1.70049840852522e-05, + "loss": 1.9093, + "step": 1343 + }, + { + "epoch": 0.8270133066687178, + "grad_norm": 6.831374645233154, + "learning_rate": 1.7000240555171536e-05, + "loss": 2.0849, + "step": 1344 + }, + { + "epoch": 0.8276286439504653, + "grad_norm": 6.301061630249023, + "learning_rate": 1.6995493934384427e-05, + "loss": 2.0896, + "step": 1345 + }, + { + "epoch": 0.8282439812322129, + "grad_norm": 7.96735954284668, + "learning_rate": 1.699074422498657e-05, + "loss": 2.2403, + "step": 1346 + }, + { + "epoch": 0.8288593185139604, + "grad_norm": 6.142386436462402, + "learning_rate": 1.6985991429075038e-05, + "loss": 1.9982, + "step": 1347 + }, + { + "epoch": 0.829474655795708, + "grad_norm": 5.67518424987793, + "learning_rate": 1.6981235548748247e-05, + "loss": 1.9227, + "step": 1348 + }, + { + "epoch": 0.8300899930774556, + "grad_norm": 5.646626949310303, + "learning_rate": 1.6976476586105996e-05, + "loss": 2.0496, + "step": 1349 + }, + { + "epoch": 0.8307053303592031, + "grad_norm": 6.110504627227783, + "learning_rate": 1.697171454324943e-05, + "loss": 2.2416, + "step": 1350 + }, + { + "epoch": 0.8313206676409507, + "grad_norm": 6.586524486541748, + "learning_rate": 1.6966949422281058e-05, + "loss": 2.1256, + "step": 1351 + }, + { + "epoch": 0.8319360049226983, + "grad_norm": 5.5354509353637695, + "learning_rate": 1.6962181225304756e-05, + "loss": 2.0925, + "step": 1352 + }, + { + "epoch": 0.8325513422044458, + "grad_norm": 7.219810962677002, + "learning_rate": 1.6957409954425747e-05, + "loss": 2.0903, + "step": 1353 + }, + { + "epoch": 0.8331666794861934, + "grad_norm": 7.853287696838379, + "learning_rate": 1.695263561175061e-05, + "loss": 2.2265, + "step": 1354 + }, + { + "epoch": 0.833782016767941, + "grad_norm": 5.973521709442139, + "learning_rate": 1.6947858199387296e-05, + "loss": 1.8512, + "step": 1355 + }, + { + "epoch": 0.8343973540496885, + "grad_norm": 7.213109493255615, + "learning_rate": 1.694307771944509e-05, + "loss": 2.0607, + "step": 1356 + }, + { + "epoch": 0.8350126913314361, + "grad_norm": 8.05849838256836, + "learning_rate": 1.6938294174034652e-05, + "loss": 2.0942, + "step": 1357 + }, + { + "epoch": 0.8356280286131836, + "grad_norm": 6.25191068649292, + "learning_rate": 1.6933507565267982e-05, + "loss": 2.0401, + "step": 1358 + }, + { + "epoch": 0.8362433658949312, + "grad_norm": 7.4434814453125, + "learning_rate": 1.692871789525844e-05, + "loss": 2.1313, + "step": 1359 + }, + { + "epoch": 0.8368587031766788, + "grad_norm": 6.321520805358887, + "learning_rate": 1.6923925166120722e-05, + "loss": 2.0626, + "step": 1360 + }, + { + "epoch": 0.8374740404584263, + "grad_norm": 6.63942289352417, + "learning_rate": 1.69191293799709e-05, + "loss": 2.1602, + "step": 1361 + }, + { + "epoch": 0.8380893777401738, + "grad_norm": 6.359405040740967, + "learning_rate": 1.6914330538926377e-05, + "loss": 1.9579, + "step": 1362 + }, + { + "epoch": 0.8387047150219213, + "grad_norm": 7.1513671875, + "learning_rate": 1.6909528645105908e-05, + "loss": 2.0719, + "step": 1363 + }, + { + "epoch": 0.8393200523036689, + "grad_norm": 6.789982318878174, + "learning_rate": 1.69047237006296e-05, + "loss": 2.0648, + "step": 1364 + }, + { + "epoch": 0.8399353895854165, + "grad_norm": 6.079066753387451, + "learning_rate": 1.6899915707618905e-05, + "loss": 2.0123, + "step": 1365 + }, + { + "epoch": 0.840550726867164, + "grad_norm": 5.259195327758789, + "learning_rate": 1.6895104668196622e-05, + "loss": 2.0033, + "step": 1366 + }, + { + "epoch": 0.8411660641489116, + "grad_norm": 6.658989429473877, + "learning_rate": 1.6890290584486892e-05, + "loss": 2.1922, + "step": 1367 + }, + { + "epoch": 0.8417814014306592, + "grad_norm": 6.631322860717773, + "learning_rate": 1.6885473458615204e-05, + "loss": 2.108, + "step": 1368 + }, + { + "epoch": 0.8423967387124067, + "grad_norm": 5.678969383239746, + "learning_rate": 1.6880653292708383e-05, + "loss": 2.0932, + "step": 1369 + }, + { + "epoch": 0.8430120759941543, + "grad_norm": 6.184486389160156, + "learning_rate": 1.6875830088894604e-05, + "loss": 1.9473, + "step": 1370 + }, + { + "epoch": 0.8436274132759018, + "grad_norm": 6.176492691040039, + "learning_rate": 1.687100384930338e-05, + "loss": 2.0567, + "step": 1371 + }, + { + "epoch": 0.8442427505576494, + "grad_norm": 6.213077068328857, + "learning_rate": 1.686617457606557e-05, + "loss": 1.9835, + "step": 1372 + }, + { + "epoch": 0.844858087839397, + "grad_norm": 5.617856502532959, + "learning_rate": 1.6861342271313363e-05, + "loss": 2.0608, + "step": 1373 + }, + { + "epoch": 0.8454734251211445, + "grad_norm": 8.898804664611816, + "learning_rate": 1.685650693718029e-05, + "loss": 2.0228, + "step": 1374 + }, + { + "epoch": 0.8460887624028921, + "grad_norm": 6.148120403289795, + "learning_rate": 1.685166857580122e-05, + "loss": 1.9325, + "step": 1375 + }, + { + "epoch": 0.8467040996846397, + "grad_norm": 6.523891925811768, + "learning_rate": 1.684682718931236e-05, + "loss": 2.0702, + "step": 1376 + }, + { + "epoch": 0.8473194369663872, + "grad_norm": 6.129950523376465, + "learning_rate": 1.6841982779851254e-05, + "loss": 2.1185, + "step": 1377 + }, + { + "epoch": 0.8479347742481348, + "grad_norm": 6.598451614379883, + "learning_rate": 1.6837135349556773e-05, + "loss": 2.121, + "step": 1378 + }, + { + "epoch": 0.8485501115298824, + "grad_norm": 5.8871870040893555, + "learning_rate": 1.683228490056913e-05, + "loss": 1.9399, + "step": 1379 + }, + { + "epoch": 0.8491654488116299, + "grad_norm": 8.425426483154297, + "learning_rate": 1.682743143502987e-05, + "loss": 1.7674, + "step": 1380 + }, + { + "epoch": 0.8497807860933775, + "grad_norm": 6.096135139465332, + "learning_rate": 1.6822574955081862e-05, + "loss": 1.9895, + "step": 1381 + }, + { + "epoch": 0.8503961233751249, + "grad_norm": 6.470090866088867, + "learning_rate": 1.6817715462869314e-05, + "loss": 1.8468, + "step": 1382 + }, + { + "epoch": 0.8510114606568725, + "grad_norm": 6.788223743438721, + "learning_rate": 1.6812852960537763e-05, + "loss": 2.1846, + "step": 1383 + }, + { + "epoch": 0.8516267979386201, + "grad_norm": 6.01656436920166, + "learning_rate": 1.680798745023407e-05, + "loss": 1.9922, + "step": 1384 + }, + { + "epoch": 0.8522421352203676, + "grad_norm": 6.60959005355835, + "learning_rate": 1.680311893410643e-05, + "loss": 2.3279, + "step": 1385 + }, + { + "epoch": 0.8528574725021152, + "grad_norm": 7.406412124633789, + "learning_rate": 1.6798247414304363e-05, + "loss": 2.1885, + "step": 1386 + }, + { + "epoch": 0.8534728097838628, + "grad_norm": 6.763721942901611, + "learning_rate": 1.6793372892978716e-05, + "loss": 2.1682, + "step": 1387 + }, + { + "epoch": 0.8540881470656103, + "grad_norm": 6.587805271148682, + "learning_rate": 1.6788495372281656e-05, + "loss": 2.2044, + "step": 1388 + }, + { + "epoch": 0.8547034843473579, + "grad_norm": 5.818113803863525, + "learning_rate": 1.678361485436668e-05, + "loss": 1.8865, + "step": 1389 + }, + { + "epoch": 0.8553188216291054, + "grad_norm": 9.31054401397705, + "learning_rate": 1.6778731341388607e-05, + "loss": 2.0785, + "step": 1390 + }, + { + "epoch": 0.855934158910853, + "grad_norm": 6.281113624572754, + "learning_rate": 1.6773844835503574e-05, + "loss": 2.0165, + "step": 1391 + }, + { + "epoch": 0.8565494961926006, + "grad_norm": 7.213371753692627, + "learning_rate": 1.676895533886905e-05, + "loss": 2.1523, + "step": 1392 + }, + { + "epoch": 0.8571648334743481, + "grad_norm": 5.903090953826904, + "learning_rate": 1.6764062853643813e-05, + "loss": 2.0573, + "step": 1393 + }, + { + "epoch": 0.8577801707560957, + "grad_norm": 6.384945869445801, + "learning_rate": 1.675916738198797e-05, + "loss": 1.8861, + "step": 1394 + }, + { + "epoch": 0.8583955080378433, + "grad_norm": 5.72274112701416, + "learning_rate": 1.6754268926062936e-05, + "loss": 2.2108, + "step": 1395 + }, + { + "epoch": 0.8590108453195908, + "grad_norm": 7.554296493530273, + "learning_rate": 1.6749367488031455e-05, + "loss": 2.1181, + "step": 1396 + }, + { + "epoch": 0.8596261826013384, + "grad_norm": 5.755009174346924, + "learning_rate": 1.6744463070057574e-05, + "loss": 1.9979, + "step": 1397 + }, + { + "epoch": 0.860241519883086, + "grad_norm": 6.300257682800293, + "learning_rate": 1.6739555674306675e-05, + "loss": 2.0148, + "step": 1398 + }, + { + "epoch": 0.8608568571648335, + "grad_norm": 6.618052005767822, + "learning_rate": 1.6734645302945434e-05, + "loss": 2.1605, + "step": 1399 + }, + { + "epoch": 0.8614721944465811, + "grad_norm": 7.629969596862793, + "learning_rate": 1.6729731958141852e-05, + "loss": 2.1432, + "step": 1400 + }, + { + "epoch": 0.8620875317283286, + "grad_norm": 5.95991849899292, + "learning_rate": 1.6724815642065245e-05, + "loss": 2.0678, + "step": 1401 + }, + { + "epoch": 0.8627028690100762, + "grad_norm": 7.331486701965332, + "learning_rate": 1.6719896356886238e-05, + "loss": 2.0336, + "step": 1402 + }, + { + "epoch": 0.8633182062918237, + "grad_norm": 7.1518168449401855, + "learning_rate": 1.671497410477676e-05, + "loss": 2.0273, + "step": 1403 + }, + { + "epoch": 0.8639335435735712, + "grad_norm": 5.750260353088379, + "learning_rate": 1.6710048887910058e-05, + "loss": 2.0209, + "step": 1404 + }, + { + "epoch": 0.8645488808553188, + "grad_norm": 7.14370584487915, + "learning_rate": 1.6705120708460686e-05, + "loss": 2.0231, + "step": 1405 + }, + { + "epoch": 0.8651642181370663, + "grad_norm": 6.207887649536133, + "learning_rate": 1.6700189568604506e-05, + "loss": 2.2543, + "step": 1406 + }, + { + "epoch": 0.8657795554188139, + "grad_norm": 6.924946308135986, + "learning_rate": 1.6695255470518687e-05, + "loss": 2.0369, + "step": 1407 + }, + { + "epoch": 0.8663948927005615, + "grad_norm": 5.892796516418457, + "learning_rate": 1.6690318416381706e-05, + "loss": 2.125, + "step": 1408 + }, + { + "epoch": 0.867010229982309, + "grad_norm": 7.2154083251953125, + "learning_rate": 1.6685378408373338e-05, + "loss": 2.0442, + "step": 1409 + }, + { + "epoch": 0.8676255672640566, + "grad_norm": 7.458181858062744, + "learning_rate": 1.6680435448674673e-05, + "loss": 1.9721, + "step": 1410 + }, + { + "epoch": 0.8682409045458042, + "grad_norm": 6.082408428192139, + "learning_rate": 1.6675489539468094e-05, + "loss": 2.1301, + "step": 1411 + }, + { + "epoch": 0.8688562418275517, + "grad_norm": 7.870296001434326, + "learning_rate": 1.667054068293729e-05, + "loss": 1.831, + "step": 1412 + }, + { + "epoch": 0.8694715791092993, + "grad_norm": 6.3857831954956055, + "learning_rate": 1.666558888126726e-05, + "loss": 2.0888, + "step": 1413 + }, + { + "epoch": 0.8700869163910468, + "grad_norm": 6.9964470863342285, + "learning_rate": 1.6660634136644284e-05, + "loss": 2.0467, + "step": 1414 + }, + { + "epoch": 0.8707022536727944, + "grad_norm": 7.052083492279053, + "learning_rate": 1.665567645125596e-05, + "loss": 2.2123, + "step": 1415 + }, + { + "epoch": 0.871317590954542, + "grad_norm": 5.875755786895752, + "learning_rate": 1.6650715827291178e-05, + "loss": 2.1669, + "step": 1416 + }, + { + "epoch": 0.8719329282362895, + "grad_norm": 5.907451152801514, + "learning_rate": 1.664575226694012e-05, + "loss": 2.0436, + "step": 1417 + }, + { + "epoch": 0.8725482655180371, + "grad_norm": 7.4824442863464355, + "learning_rate": 1.6640785772394275e-05, + "loss": 1.9105, + "step": 1418 + }, + { + "epoch": 0.8731636027997847, + "grad_norm": 5.841197490692139, + "learning_rate": 1.6635816345846413e-05, + "loss": 2.121, + "step": 1419 + }, + { + "epoch": 0.8737789400815322, + "grad_norm": 6.870905876159668, + "learning_rate": 1.6630843989490612e-05, + "loss": 1.9655, + "step": 1420 + }, + { + "epoch": 0.8743942773632798, + "grad_norm": 7.910282611846924, + "learning_rate": 1.6625868705522238e-05, + "loss": 2.1657, + "step": 1421 + }, + { + "epoch": 0.8750096146450274, + "grad_norm": 5.767560005187988, + "learning_rate": 1.6620890496137953e-05, + "loss": 2.0546, + "step": 1422 + }, + { + "epoch": 0.8756249519267748, + "grad_norm": 6.116088390350342, + "learning_rate": 1.66159093635357e-05, + "loss": 2.277, + "step": 1423 + }, + { + "epoch": 0.8762402892085224, + "grad_norm": 5.435750961303711, + "learning_rate": 1.661092530991473e-05, + "loss": 2.1076, + "step": 1424 + }, + { + "epoch": 0.8768556264902699, + "grad_norm": 5.1367292404174805, + "learning_rate": 1.660593833747557e-05, + "loss": 2.08, + "step": 1425 + }, + { + "epoch": 0.8774709637720175, + "grad_norm": 7.048637866973877, + "learning_rate": 1.6600948448420035e-05, + "loss": 1.9376, + "step": 1426 + }, + { + "epoch": 0.8780863010537651, + "grad_norm": 6.785215854644775, + "learning_rate": 1.659595564495124e-05, + "loss": 1.8702, + "step": 1427 + }, + { + "epoch": 0.8787016383355126, + "grad_norm": 6.99608850479126, + "learning_rate": 1.6590959929273574e-05, + "loss": 2.1187, + "step": 1428 + }, + { + "epoch": 0.8793169756172602, + "grad_norm": 6.555530548095703, + "learning_rate": 1.6585961303592716e-05, + "loss": 1.8001, + "step": 1429 + }, + { + "epoch": 0.8799323128990078, + "grad_norm": 6.611457824707031, + "learning_rate": 1.6580959770115637e-05, + "loss": 2.0467, + "step": 1430 + }, + { + "epoch": 0.8805476501807553, + "grad_norm": 5.693901538848877, + "learning_rate": 1.6575955331050585e-05, + "loss": 2.1503, + "step": 1431 + }, + { + "epoch": 0.8811629874625029, + "grad_norm": 6.674139976501465, + "learning_rate": 1.6570947988607084e-05, + "loss": 1.9779, + "step": 1432 + }, + { + "epoch": 0.8817783247442504, + "grad_norm": 6.2410783767700195, + "learning_rate": 1.6565937744995955e-05, + "loss": 1.8959, + "step": 1433 + }, + { + "epoch": 0.882393662025998, + "grad_norm": 6.198887825012207, + "learning_rate": 1.656092460242929e-05, + "loss": 2.0068, + "step": 1434 + }, + { + "epoch": 0.8830089993077456, + "grad_norm": 6.764228343963623, + "learning_rate": 1.6555908563120457e-05, + "loss": 2.0095, + "step": 1435 + }, + { + "epoch": 0.8836243365894931, + "grad_norm": 8.876399040222168, + "learning_rate": 1.655088962928412e-05, + "loss": 1.9972, + "step": 1436 + }, + { + "epoch": 0.8842396738712407, + "grad_norm": 6.65811824798584, + "learning_rate": 1.6545867803136203e-05, + "loss": 2.0022, + "step": 1437 + }, + { + "epoch": 0.8848550111529883, + "grad_norm": 8.390813827514648, + "learning_rate": 1.6540843086893915e-05, + "loss": 2.3547, + "step": 1438 + }, + { + "epoch": 0.8854703484347358, + "grad_norm": 6.150032997131348, + "learning_rate": 1.6535815482775745e-05, + "loss": 2.1469, + "step": 1439 + }, + { + "epoch": 0.8860856857164834, + "grad_norm": 8.349886894226074, + "learning_rate": 1.6530784993001448e-05, + "loss": 2.0087, + "step": 1440 + }, + { + "epoch": 0.886701022998231, + "grad_norm": 6.053825855255127, + "learning_rate": 1.6525751619792056e-05, + "loss": 2.0164, + "step": 1441 + }, + { + "epoch": 0.8873163602799785, + "grad_norm": 6.128987789154053, + "learning_rate": 1.6520715365369878e-05, + "loss": 1.9602, + "step": 1442 + }, + { + "epoch": 0.8879316975617261, + "grad_norm": 6.13970422744751, + "learning_rate": 1.6515676231958488e-05, + "loss": 2.0673, + "step": 1443 + }, + { + "epoch": 0.8885470348434735, + "grad_norm": 7.519302845001221, + "learning_rate": 1.6510634221782743e-05, + "loss": 1.8396, + "step": 1444 + }, + { + "epoch": 0.8891623721252211, + "grad_norm": 6.008177757263184, + "learning_rate": 1.650558933706876e-05, + "loss": 1.9608, + "step": 1445 + }, + { + "epoch": 0.8897777094069687, + "grad_norm": 5.4838433265686035, + "learning_rate": 1.650054158004392e-05, + "loss": 2.1516, + "step": 1446 + }, + { + "epoch": 0.8903930466887162, + "grad_norm": 7.265977382659912, + "learning_rate": 1.6495490952936898e-05, + "loss": 2.0419, + "step": 1447 + }, + { + "epoch": 0.8910083839704638, + "grad_norm": 9.79360580444336, + "learning_rate": 1.64904374579776e-05, + "loss": 2.122, + "step": 1448 + }, + { + "epoch": 0.8916237212522113, + "grad_norm": 8.679040908813477, + "learning_rate": 1.6485381097397223e-05, + "loss": 2.2286, + "step": 1449 + }, + { + "epoch": 0.8922390585339589, + "grad_norm": 6.755616664886475, + "learning_rate": 1.6480321873428225e-05, + "loss": 1.7934, + "step": 1450 + }, + { + "epoch": 0.8928543958157065, + "grad_norm": 6.273370265960693, + "learning_rate": 1.647525978830432e-05, + "loss": 2.1136, + "step": 1451 + }, + { + "epoch": 0.893469733097454, + "grad_norm": 6.752788543701172, + "learning_rate": 1.6470194844260497e-05, + "loss": 1.9574, + "step": 1452 + }, + { + "epoch": 0.8940850703792016, + "grad_norm": 6.3557329177856445, + "learning_rate": 1.6465127043533e-05, + "loss": 2.0434, + "step": 1453 + }, + { + "epoch": 0.8947004076609492, + "grad_norm": 6.308954238891602, + "learning_rate": 1.6460056388359336e-05, + "loss": 2.0139, + "step": 1454 + }, + { + "epoch": 0.8953157449426967, + "grad_norm": 7.536130905151367, + "learning_rate": 1.645498288097827e-05, + "loss": 1.9925, + "step": 1455 + }, + { + "epoch": 0.8959310822244443, + "grad_norm": 8.58876895904541, + "learning_rate": 1.6449906523629825e-05, + "loss": 2.1402, + "step": 1456 + }, + { + "epoch": 0.8965464195061918, + "grad_norm": 6.842116832733154, + "learning_rate": 1.644482731855529e-05, + "loss": 1.9743, + "step": 1457 + }, + { + "epoch": 0.8971617567879394, + "grad_norm": 5.5195770263671875, + "learning_rate": 1.643974526799721e-05, + "loss": 2.0039, + "step": 1458 + }, + { + "epoch": 0.897777094069687, + "grad_norm": 6.097393035888672, + "learning_rate": 1.6434660374199377e-05, + "loss": 2.1278, + "step": 1459 + }, + { + "epoch": 0.8983924313514345, + "grad_norm": 6.450613021850586, + "learning_rate": 1.6429572639406845e-05, + "loss": 1.966, + "step": 1460 + }, + { + "epoch": 0.8990077686331821, + "grad_norm": 6.053622245788574, + "learning_rate": 1.6424482065865925e-05, + "loss": 1.9581, + "step": 1461 + }, + { + "epoch": 0.8996231059149297, + "grad_norm": 6.611551761627197, + "learning_rate": 1.6419388655824175e-05, + "loss": 1.9821, + "step": 1462 + }, + { + "epoch": 0.9002384431966772, + "grad_norm": 6.6195478439331055, + "learning_rate": 1.641429241153041e-05, + "loss": 2.1825, + "step": 1463 + }, + { + "epoch": 0.9008537804784247, + "grad_norm": 5.963956832885742, + "learning_rate": 1.6409193335234698e-05, + "loss": 2.1259, + "step": 1464 + }, + { + "epoch": 0.9014691177601722, + "grad_norm": 6.794525146484375, + "learning_rate": 1.6404091429188348e-05, + "loss": 2.066, + "step": 1465 + }, + { + "epoch": 0.9020844550419198, + "grad_norm": 6.854122638702393, + "learning_rate": 1.6398986695643926e-05, + "loss": 2.0739, + "step": 1466 + }, + { + "epoch": 0.9026997923236674, + "grad_norm": 6.032322406768799, + "learning_rate": 1.6393879136855247e-05, + "loss": 1.877, + "step": 1467 + }, + { + "epoch": 0.9033151296054149, + "grad_norm": 5.233457088470459, + "learning_rate": 1.6388768755077374e-05, + "loss": 2.1242, + "step": 1468 + }, + { + "epoch": 0.9039304668871625, + "grad_norm": 6.805862903594971, + "learning_rate": 1.638365555256661e-05, + "loss": 2.0549, + "step": 1469 + }, + { + "epoch": 0.9045458041689101, + "grad_norm": 6.649767875671387, + "learning_rate": 1.6378539531580502e-05, + "loss": 2.08, + "step": 1470 + }, + { + "epoch": 0.9051611414506576, + "grad_norm": 5.873214244842529, + "learning_rate": 1.6373420694377857e-05, + "loss": 2.0971, + "step": 1471 + }, + { + "epoch": 0.9057764787324052, + "grad_norm": 7.083460807800293, + "learning_rate": 1.636829904321871e-05, + "loss": 1.8232, + "step": 1472 + }, + { + "epoch": 0.9063918160141528, + "grad_norm": 6.856746673583984, + "learning_rate": 1.636317458036434e-05, + "loss": 2.078, + "step": 1473 + }, + { + "epoch": 0.9070071532959003, + "grad_norm": 5.17281436920166, + "learning_rate": 1.635804730807728e-05, + "loss": 2.0503, + "step": 1474 + }, + { + "epoch": 0.9076224905776479, + "grad_norm": 7.251319885253906, + "learning_rate": 1.6352917228621284e-05, + "loss": 1.8813, + "step": 1475 + }, + { + "epoch": 0.9082378278593954, + "grad_norm": 5.396692752838135, + "learning_rate": 1.6347784344261365e-05, + "loss": 2.0051, + "step": 1476 + }, + { + "epoch": 0.908853165141143, + "grad_norm": 7.458128929138184, + "learning_rate": 1.634264865726375e-05, + "loss": 2.109, + "step": 1477 + }, + { + "epoch": 0.9094685024228906, + "grad_norm": 6.9574785232543945, + "learning_rate": 1.6337510169895935e-05, + "loss": 2.1573, + "step": 1478 + }, + { + "epoch": 0.9100838397046381, + "grad_norm": 6.285819053649902, + "learning_rate": 1.633236888442663e-05, + "loss": 1.9856, + "step": 1479 + }, + { + "epoch": 0.9106991769863857, + "grad_norm": 7.069048881530762, + "learning_rate": 1.6327224803125782e-05, + "loss": 2.152, + "step": 1480 + }, + { + "epoch": 0.9113145142681333, + "grad_norm": 5.900315284729004, + "learning_rate": 1.632207792826458e-05, + "loss": 2.1782, + "step": 1481 + }, + { + "epoch": 0.9119298515498808, + "grad_norm": 5.959699630737305, + "learning_rate": 1.6316928262115443e-05, + "loss": 2.0236, + "step": 1482 + }, + { + "epoch": 0.9125451888316284, + "grad_norm": 6.634212017059326, + "learning_rate": 1.631177580695202e-05, + "loss": 2.2755, + "step": 1483 + }, + { + "epoch": 0.913160526113376, + "grad_norm": 6.769792079925537, + "learning_rate": 1.6306620565049196e-05, + "loss": 1.994, + "step": 1484 + }, + { + "epoch": 0.9137758633951234, + "grad_norm": 5.980954170227051, + "learning_rate": 1.6301462538683087e-05, + "loss": 2.0797, + "step": 1485 + }, + { + "epoch": 0.914391200676871, + "grad_norm": 5.07311487197876, + "learning_rate": 1.6296301730131028e-05, + "loss": 2.0118, + "step": 1486 + }, + { + "epoch": 0.9150065379586185, + "grad_norm": 6.015927314758301, + "learning_rate": 1.6291138141671598e-05, + "loss": 1.8918, + "step": 1487 + }, + { + "epoch": 0.9156218752403661, + "grad_norm": 6.69703483581543, + "learning_rate": 1.6285971775584587e-05, + "loss": 2.0472, + "step": 1488 + }, + { + "epoch": 0.9162372125221137, + "grad_norm": 5.662362575531006, + "learning_rate": 1.628080263415103e-05, + "loss": 2.0689, + "step": 1489 + }, + { + "epoch": 0.9168525498038612, + "grad_norm": 6.534261226654053, + "learning_rate": 1.6275630719653167e-05, + "loss": 2.1182, + "step": 1490 + }, + { + "epoch": 0.9174678870856088, + "grad_norm": 6.37846565246582, + "learning_rate": 1.6270456034374477e-05, + "loss": 2.0284, + "step": 1491 + }, + { + "epoch": 0.9180832243673563, + "grad_norm": 6.997260570526123, + "learning_rate": 1.6265278580599657e-05, + "loss": 1.9806, + "step": 1492 + }, + { + "epoch": 0.9186985616491039, + "grad_norm": 6.43035888671875, + "learning_rate": 1.6260098360614626e-05, + "loss": 1.9598, + "step": 1493 + }, + { + "epoch": 0.9193138989308515, + "grad_norm": 6.812088966369629, + "learning_rate": 1.6254915376706526e-05, + "loss": 1.9861, + "step": 1494 + }, + { + "epoch": 0.919929236212599, + "grad_norm": 5.523894309997559, + "learning_rate": 1.6249729631163718e-05, + "loss": 1.9501, + "step": 1495 + }, + { + "epoch": 0.9205445734943466, + "grad_norm": 7.995274543762207, + "learning_rate": 1.6244541126275786e-05, + "loss": 2.0896, + "step": 1496 + }, + { + "epoch": 0.9211599107760942, + "grad_norm": 6.21628475189209, + "learning_rate": 1.6239349864333527e-05, + "loss": 2.1092, + "step": 1497 + }, + { + "epoch": 0.9217752480578417, + "grad_norm": 7.204120635986328, + "learning_rate": 1.6234155847628955e-05, + "loss": 1.9507, + "step": 1498 + }, + { + "epoch": 0.9223905853395893, + "grad_norm": 6.7051472663879395, + "learning_rate": 1.6228959078455306e-05, + "loss": 2.1162, + "step": 1499 + }, + { + "epoch": 0.9230059226213368, + "grad_norm": 5.557857036590576, + "learning_rate": 1.6223759559107028e-05, + "loss": 2.0078, + "step": 1500 + }, + { + "epoch": 0.9236212599030844, + "grad_norm": 6.226946830749512, + "learning_rate": 1.621855729187978e-05, + "loss": 1.8111, + "step": 1501 + }, + { + "epoch": 0.924236597184832, + "grad_norm": 8.026422500610352, + "learning_rate": 1.6213352279070443e-05, + "loss": 2.0516, + "step": 1502 + }, + { + "epoch": 0.9248519344665795, + "grad_norm": 8.86412239074707, + "learning_rate": 1.62081445229771e-05, + "loss": 1.8683, + "step": 1503 + }, + { + "epoch": 0.9254672717483271, + "grad_norm": 7.078073978424072, + "learning_rate": 1.6202934025899052e-05, + "loss": 2.1748, + "step": 1504 + }, + { + "epoch": 0.9260826090300746, + "grad_norm": 6.734961986541748, + "learning_rate": 1.619772079013681e-05, + "loss": 1.982, + "step": 1505 + }, + { + "epoch": 0.9266979463118221, + "grad_norm": 6.3838582038879395, + "learning_rate": 1.6192504817992086e-05, + "loss": 2.1678, + "step": 1506 + }, + { + "epoch": 0.9273132835935697, + "grad_norm": 7.697085857391357, + "learning_rate": 1.6187286111767812e-05, + "loss": 1.986, + "step": 1507 + }, + { + "epoch": 0.9279286208753172, + "grad_norm": 7.354015827178955, + "learning_rate": 1.618206467376812e-05, + "loss": 1.9777, + "step": 1508 + }, + { + "epoch": 0.9285439581570648, + "grad_norm": 7.2398247718811035, + "learning_rate": 1.6176840506298345e-05, + "loss": 1.9816, + "step": 1509 + }, + { + "epoch": 0.9291592954388124, + "grad_norm": 6.529061317443848, + "learning_rate": 1.617161361166504e-05, + "loss": 1.9549, + "step": 1510 + }, + { + "epoch": 0.9297746327205599, + "grad_norm": 6.83713960647583, + "learning_rate": 1.616638399217595e-05, + "loss": 1.9891, + "step": 1511 + }, + { + "epoch": 0.9303899700023075, + "grad_norm": 7.491650581359863, + "learning_rate": 1.6161151650140027e-05, + "loss": 2.1239, + "step": 1512 + }, + { + "epoch": 0.9310053072840551, + "grad_norm": 5.559195518493652, + "learning_rate": 1.6155916587867424e-05, + "loss": 1.9141, + "step": 1513 + }, + { + "epoch": 0.9316206445658026, + "grad_norm": 5.072818756103516, + "learning_rate": 1.6150678807669494e-05, + "loss": 2.1588, + "step": 1514 + }, + { + "epoch": 0.9322359818475502, + "grad_norm": 6.539409637451172, + "learning_rate": 1.61454383118588e-05, + "loss": 1.9079, + "step": 1515 + }, + { + "epoch": 0.9328513191292978, + "grad_norm": 6.188858985900879, + "learning_rate": 1.6140195102749086e-05, + "loss": 1.8927, + "step": 1516 + }, + { + "epoch": 0.9334666564110453, + "grad_norm": 6.471725940704346, + "learning_rate": 1.613494918265531e-05, + "loss": 2.12, + "step": 1517 + }, + { + "epoch": 0.9340819936927929, + "grad_norm": 5.38123083114624, + "learning_rate": 1.612970055389362e-05, + "loss": 2.0941, + "step": 1518 + }, + { + "epoch": 0.9346973309745404, + "grad_norm": 6.3498148918151855, + "learning_rate": 1.6124449218781358e-05, + "loss": 2.155, + "step": 1519 + }, + { + "epoch": 0.935312668256288, + "grad_norm": 6.187236785888672, + "learning_rate": 1.611919517963707e-05, + "loss": 2.1531, + "step": 1520 + }, + { + "epoch": 0.9359280055380356, + "grad_norm": 6.560948848724365, + "learning_rate": 1.611393843878048e-05, + "loss": 2.2585, + "step": 1521 + }, + { + "epoch": 0.9365433428197831, + "grad_norm": 6.718316078186035, + "learning_rate": 1.6108678998532522e-05, + "loss": 1.8926, + "step": 1522 + }, + { + "epoch": 0.9371586801015307, + "grad_norm": 6.5279059410095215, + "learning_rate": 1.6103416861215314e-05, + "loss": 2.0501, + "step": 1523 + }, + { + "epoch": 0.9377740173832783, + "grad_norm": 8.248549461364746, + "learning_rate": 1.609815202915216e-05, + "loss": 2.0621, + "step": 1524 + }, + { + "epoch": 0.9383893546650258, + "grad_norm": 5.484820365905762, + "learning_rate": 1.6092884504667562e-05, + "loss": 2.024, + "step": 1525 + }, + { + "epoch": 0.9390046919467733, + "grad_norm": 6.495850086212158, + "learning_rate": 1.608761429008721e-05, + "loss": 1.9882, + "step": 1526 + }, + { + "epoch": 0.9396200292285208, + "grad_norm": 7.03759241104126, + "learning_rate": 1.608234138773797e-05, + "loss": 2.0945, + "step": 1527 + }, + { + "epoch": 0.9402353665102684, + "grad_norm": 6.663005352020264, + "learning_rate": 1.6077065799947914e-05, + "loss": 1.9468, + "step": 1528 + }, + { + "epoch": 0.940850703792016, + "grad_norm": 6.188401699066162, + "learning_rate": 1.6071787529046288e-05, + "loss": 1.977, + "step": 1529 + }, + { + "epoch": 0.9414660410737635, + "grad_norm": 6.0724778175354, + "learning_rate": 1.6066506577363518e-05, + "loss": 2.1945, + "step": 1530 + }, + { + "epoch": 0.9420813783555111, + "grad_norm": 8.906682014465332, + "learning_rate": 1.6061222947231224e-05, + "loss": 1.821, + "step": 1531 + }, + { + "epoch": 0.9426967156372587, + "grad_norm": 10.505105018615723, + "learning_rate": 1.6055936640982207e-05, + "loss": 2.098, + "step": 1532 + }, + { + "epoch": 0.9433120529190062, + "grad_norm": 6.124326229095459, + "learning_rate": 1.605064766095044e-05, + "loss": 2.1589, + "step": 1533 + }, + { + "epoch": 0.9439273902007538, + "grad_norm": 6.911174297332764, + "learning_rate": 1.604535600947109e-05, + "loss": 1.7732, + "step": 1534 + }, + { + "epoch": 0.9445427274825013, + "grad_norm": 6.8780436515808105, + "learning_rate": 1.6040061688880494e-05, + "loss": 2.0083, + "step": 1535 + }, + { + "epoch": 0.9451580647642489, + "grad_norm": 7.705031871795654, + "learning_rate": 1.603476470151617e-05, + "loss": 2.1385, + "step": 1536 + }, + { + "epoch": 0.9457734020459965, + "grad_norm": 5.712606430053711, + "learning_rate": 1.6029465049716813e-05, + "loss": 2.1063, + "step": 1537 + }, + { + "epoch": 0.946388739327744, + "grad_norm": 6.356781959533691, + "learning_rate": 1.6024162735822296e-05, + "loss": 2.1106, + "step": 1538 + }, + { + "epoch": 0.9470040766094916, + "grad_norm": 7.008456707000732, + "learning_rate": 1.6018857762173672e-05, + "loss": 2.0362, + "step": 1539 + }, + { + "epoch": 0.9476194138912392, + "grad_norm": 6.421850681304932, + "learning_rate": 1.601355013111315e-05, + "loss": 2.0738, + "step": 1540 + }, + { + "epoch": 0.9482347511729867, + "grad_norm": 5.714890480041504, + "learning_rate": 1.600823984498414e-05, + "loss": 1.9417, + "step": 1541 + }, + { + "epoch": 0.9488500884547343, + "grad_norm": 6.343738079071045, + "learning_rate": 1.6002926906131202e-05, + "loss": 1.9683, + "step": 1542 + }, + { + "epoch": 0.9494654257364818, + "grad_norm": 6.530696868896484, + "learning_rate": 1.5997611316900075e-05, + "loss": 1.9461, + "step": 1543 + }, + { + "epoch": 0.9500807630182294, + "grad_norm": 6.8803253173828125, + "learning_rate": 1.5992293079637665e-05, + "loss": 2.2284, + "step": 1544 + }, + { + "epoch": 0.950696100299977, + "grad_norm": 5.361053943634033, + "learning_rate": 1.5986972196692053e-05, + "loss": 1.8913, + "step": 1545 + }, + { + "epoch": 0.9513114375817244, + "grad_norm": 5.652524471282959, + "learning_rate": 1.5981648670412484e-05, + "loss": 1.961, + "step": 1546 + }, + { + "epoch": 0.951926774863472, + "grad_norm": 7.112715721130371, + "learning_rate": 1.5976322503149373e-05, + "loss": 2.1601, + "step": 1547 + }, + { + "epoch": 0.9525421121452196, + "grad_norm": 5.324873447418213, + "learning_rate": 1.5970993697254294e-05, + "loss": 2.1053, + "step": 1548 + }, + { + "epoch": 0.9531574494269671, + "grad_norm": 6.552524089813232, + "learning_rate": 1.5965662255080003e-05, + "loss": 2.0011, + "step": 1549 + }, + { + "epoch": 0.9537727867087147, + "grad_norm": 6.059303283691406, + "learning_rate": 1.5960328178980393e-05, + "loss": 1.9795, + "step": 1550 + }, + { + "epoch": 0.9543881239904622, + "grad_norm": 7.026639461517334, + "learning_rate": 1.5954991471310542e-05, + "loss": 2.0253, + "step": 1551 + }, + { + "epoch": 0.9550034612722098, + "grad_norm": 4.769665718078613, + "learning_rate": 1.5949652134426686e-05, + "loss": 2.0589, + "step": 1552 + }, + { + "epoch": 0.9556187985539574, + "grad_norm": 7.112276077270508, + "learning_rate": 1.5944310170686215e-05, + "loss": 1.9296, + "step": 1553 + }, + { + "epoch": 0.9562341358357049, + "grad_norm": 6.250385284423828, + "learning_rate": 1.5938965582447683e-05, + "loss": 2.0271, + "step": 1554 + }, + { + "epoch": 0.9568494731174525, + "grad_norm": 7.438846588134766, + "learning_rate": 1.5933618372070805e-05, + "loss": 1.8588, + "step": 1555 + }, + { + "epoch": 0.9574648103992001, + "grad_norm": 5.058011054992676, + "learning_rate": 1.5928268541916444e-05, + "loss": 2.186, + "step": 1556 + }, + { + "epoch": 0.9580801476809476, + "grad_norm": 6.507116794586182, + "learning_rate": 1.5922916094346633e-05, + "loss": 2.149, + "step": 1557 + }, + { + "epoch": 0.9586954849626952, + "grad_norm": 5.813977241516113, + "learning_rate": 1.591756103172455e-05, + "loss": 1.8383, + "step": 1558 + }, + { + "epoch": 0.9593108222444428, + "grad_norm": 6.447881698608398, + "learning_rate": 1.5912203356414532e-05, + "loss": 2.0134, + "step": 1559 + }, + { + "epoch": 0.9599261595261903, + "grad_norm": 5.954245567321777, + "learning_rate": 1.5906843070782077e-05, + "loss": 1.9944, + "step": 1560 + }, + { + "epoch": 0.9605414968079379, + "grad_norm": 7.64142370223999, + "learning_rate": 1.5901480177193816e-05, + "loss": 2.1217, + "step": 1561 + }, + { + "epoch": 0.9611568340896854, + "grad_norm": 5.59800910949707, + "learning_rate": 1.5896114678017555e-05, + "loss": 1.9969, + "step": 1562 + }, + { + "epoch": 0.961772171371433, + "grad_norm": 6.338011741638184, + "learning_rate": 1.589074657562223e-05, + "loss": 2.0895, + "step": 1563 + }, + { + "epoch": 0.9623875086531806, + "grad_norm": 5.619010925292969, + "learning_rate": 1.5885375872377942e-05, + "loss": 2.0118, + "step": 1564 + }, + { + "epoch": 0.9630028459349281, + "grad_norm": 7.114977836608887, + "learning_rate": 1.5880002570655925e-05, + "loss": 2.1604, + "step": 1565 + }, + { + "epoch": 0.9636181832166757, + "grad_norm": 7.877235412597656, + "learning_rate": 1.5874626672828573e-05, + "loss": 1.9719, + "step": 1566 + }, + { + "epoch": 0.9642335204984231, + "grad_norm": 6.789510726928711, + "learning_rate": 1.5869248181269427e-05, + "loss": 2.0193, + "step": 1567 + }, + { + "epoch": 0.9648488577801707, + "grad_norm": 5.645259857177734, + "learning_rate": 1.586386709835316e-05, + "loss": 2.2438, + "step": 1568 + }, + { + "epoch": 0.9654641950619183, + "grad_norm": 5.343993186950684, + "learning_rate": 1.58584834264556e-05, + "loss": 2.1532, + "step": 1569 + }, + { + "epoch": 0.9660795323436658, + "grad_norm": 6.878357887268066, + "learning_rate": 1.585309716795372e-05, + "loss": 2.2362, + "step": 1570 + }, + { + "epoch": 0.9666948696254134, + "grad_norm": 5.385640621185303, + "learning_rate": 1.5847708325225618e-05, + "loss": 2.1995, + "step": 1571 + }, + { + "epoch": 0.967310206907161, + "grad_norm": 8.191226959228516, + "learning_rate": 1.584231690065056e-05, + "loss": 1.9359, + "step": 1572 + }, + { + "epoch": 0.9679255441889085, + "grad_norm": 7.141463279724121, + "learning_rate": 1.5836922896608923e-05, + "loss": 2.2633, + "step": 1573 + }, + { + "epoch": 0.9685408814706561, + "grad_norm": 6.198462963104248, + "learning_rate": 1.583152631548225e-05, + "loss": 1.969, + "step": 1574 + }, + { + "epoch": 0.9691562187524037, + "grad_norm": 7.599320411682129, + "learning_rate": 1.5826127159653203e-05, + "loss": 2.0244, + "step": 1575 + }, + { + "epoch": 0.9697715560341512, + "grad_norm": 6.278257369995117, + "learning_rate": 1.5820725431505584e-05, + "loss": 2.2466, + "step": 1576 + }, + { + "epoch": 0.9703868933158988, + "grad_norm": 7.411245822906494, + "learning_rate": 1.581532113342434e-05, + "loss": 1.9681, + "step": 1577 + }, + { + "epoch": 0.9710022305976463, + "grad_norm": 6.887391090393066, + "learning_rate": 1.5809914267795542e-05, + "loss": 2.3049, + "step": 1578 + }, + { + "epoch": 0.9716175678793939, + "grad_norm": 6.13304328918457, + "learning_rate": 1.5804504837006396e-05, + "loss": 1.9848, + "step": 1579 + }, + { + "epoch": 0.9722329051611415, + "grad_norm": 5.754158973693848, + "learning_rate": 1.579909284344524e-05, + "loss": 1.9517, + "step": 1580 + }, + { + "epoch": 0.972848242442889, + "grad_norm": 6.345254421234131, + "learning_rate": 1.5793678289501564e-05, + "loss": 2.0896, + "step": 1581 + }, + { + "epoch": 0.9734635797246366, + "grad_norm": 6.630475997924805, + "learning_rate": 1.5788261177565952e-05, + "loss": 2.3675, + "step": 1582 + }, + { + "epoch": 0.9740789170063842, + "grad_norm": 5.816492080688477, + "learning_rate": 1.578284151003015e-05, + "loss": 2.1765, + "step": 1583 + }, + { + "epoch": 0.9746942542881317, + "grad_norm": 8.220222473144531, + "learning_rate": 1.5777419289287008e-05, + "loss": 1.8793, + "step": 1584 + }, + { + "epoch": 0.9753095915698793, + "grad_norm": 6.383854866027832, + "learning_rate": 1.577199451773052e-05, + "loss": 2.2262, + "step": 1585 + }, + { + "epoch": 0.9759249288516268, + "grad_norm": 6.38456916809082, + "learning_rate": 1.57665671977558e-05, + "loss": 2.2164, + "step": 1586 + }, + { + "epoch": 0.9765402661333743, + "grad_norm": 6.609987735748291, + "learning_rate": 1.5761137331759084e-05, + "loss": 2.0389, + "step": 1587 + }, + { + "epoch": 0.9771556034151219, + "grad_norm": 6.592121601104736, + "learning_rate": 1.5755704922137742e-05, + "loss": 1.9224, + "step": 1588 + }, + { + "epoch": 0.9777709406968694, + "grad_norm": 6.164999961853027, + "learning_rate": 1.5750269971290254e-05, + "loss": 2.1017, + "step": 1589 + }, + { + "epoch": 0.978386277978617, + "grad_norm": 5.4996466636657715, + "learning_rate": 1.5744832481616235e-05, + "loss": 1.955, + "step": 1590 + }, + { + "epoch": 0.9790016152603646, + "grad_norm": 6.789836406707764, + "learning_rate": 1.5739392455516408e-05, + "loss": 2.2492, + "step": 1591 + }, + { + "epoch": 0.9796169525421121, + "grad_norm": 6.541653633117676, + "learning_rate": 1.5733949895392626e-05, + "loss": 1.9496, + "step": 1592 + }, + { + "epoch": 0.9802322898238597, + "grad_norm": 6.1147894859313965, + "learning_rate": 1.5728504803647857e-05, + "loss": 2.0058, + "step": 1593 + }, + { + "epoch": 0.9808476271056072, + "grad_norm": 6.139481544494629, + "learning_rate": 1.572305718268619e-05, + "loss": 2.1452, + "step": 1594 + }, + { + "epoch": 0.9814629643873548, + "grad_norm": 6.242997169494629, + "learning_rate": 1.571760703491282e-05, + "loss": 2.0667, + "step": 1595 + }, + { + "epoch": 0.9820783016691024, + "grad_norm": 5.5975141525268555, + "learning_rate": 1.5712154362734073e-05, + "loss": 2.0092, + "step": 1596 + }, + { + "epoch": 0.9826936389508499, + "grad_norm": 5.382175922393799, + "learning_rate": 1.570669916855738e-05, + "loss": 2.0644, + "step": 1597 + }, + { + "epoch": 0.9833089762325975, + "grad_norm": 7.039137363433838, + "learning_rate": 1.570124145479128e-05, + "loss": 1.9354, + "step": 1598 + }, + { + "epoch": 0.9839243135143451, + "grad_norm": 6.8870134353637695, + "learning_rate": 1.5695781223845442e-05, + "loss": 1.9867, + "step": 1599 + }, + { + "epoch": 0.9845396507960926, + "grad_norm": 6.947288513183594, + "learning_rate": 1.5690318478130634e-05, + "loss": 2.0417, + "step": 1600 + }, + { + "epoch": 0.9851549880778402, + "grad_norm": 8.744593620300293, + "learning_rate": 1.5684853220058727e-05, + "loss": 2.0176, + "step": 1601 + }, + { + "epoch": 0.9857703253595878, + "grad_norm": 6.482842445373535, + "learning_rate": 1.5679385452042724e-05, + "loss": 2.2362, + "step": 1602 + }, + { + "epoch": 0.9863856626413353, + "grad_norm": 6.5556440353393555, + "learning_rate": 1.5673915176496716e-05, + "loss": 2.1052, + "step": 1603 + }, + { + "epoch": 0.9870009999230829, + "grad_norm": 6.929126739501953, + "learning_rate": 1.5668442395835907e-05, + "loss": 1.9648, + "step": 1604 + }, + { + "epoch": 0.9876163372048304, + "grad_norm": 5.975175380706787, + "learning_rate": 1.5662967112476613e-05, + "loss": 2.0817, + "step": 1605 + }, + { + "epoch": 0.988231674486578, + "grad_norm": 6.392477989196777, + "learning_rate": 1.5657489328836242e-05, + "loss": 1.9236, + "step": 1606 + }, + { + "epoch": 0.9888470117683256, + "grad_norm": 6.622708797454834, + "learning_rate": 1.5652009047333322e-05, + "loss": 2.0078, + "step": 1607 + }, + { + "epoch": 0.989462349050073, + "grad_norm": 8.373127937316895, + "learning_rate": 1.5646526270387473e-05, + "loss": 1.9998, + "step": 1608 + }, + { + "epoch": 0.9900776863318206, + "grad_norm": 8.009117126464844, + "learning_rate": 1.564104100041942e-05, + "loss": 2.0444, + "step": 1609 + }, + { + "epoch": 0.9906930236135681, + "grad_norm": 6.935191631317139, + "learning_rate": 1.5635553239850987e-05, + "loss": 2.0104, + "step": 1610 + }, + { + "epoch": 0.9913083608953157, + "grad_norm": 6.194570541381836, + "learning_rate": 1.56300629911051e-05, + "loss": 2.018, + "step": 1611 + }, + { + "epoch": 0.9919236981770633, + "grad_norm": 5.91082239151001, + "learning_rate": 1.562457025660578e-05, + "loss": 2.2235, + "step": 1612 + }, + { + "epoch": 0.9925390354588108, + "grad_norm": 6.3568501472473145, + "learning_rate": 1.561907503877815e-05, + "loss": 2.0291, + "step": 1613 + }, + { + "epoch": 0.9931543727405584, + "grad_norm": 6.271023273468018, + "learning_rate": 1.5613577340048423e-05, + "loss": 2.0856, + "step": 1614 + }, + { + "epoch": 0.993769710022306, + "grad_norm": 6.267033576965332, + "learning_rate": 1.560807716284392e-05, + "loss": 2.0721, + "step": 1615 + }, + { + "epoch": 0.9943850473040535, + "grad_norm": 5.563879489898682, + "learning_rate": 1.5602574509593043e-05, + "loss": 1.8154, + "step": 1616 + }, + { + "epoch": 0.9950003845858011, + "grad_norm": 5.823796272277832, + "learning_rate": 1.5597069382725293e-05, + "loss": 2.2059, + "step": 1617 + }, + { + "epoch": 0.9956157218675487, + "grad_norm": 5.896056175231934, + "learning_rate": 1.5591561784671266e-05, + "loss": 1.9953, + "step": 1618 + }, + { + "epoch": 0.9962310591492962, + "grad_norm": 5.803584098815918, + "learning_rate": 1.5586051717862634e-05, + "loss": 2.1828, + "step": 1619 + }, + { + "epoch": 0.9968463964310438, + "grad_norm": 6.01746940612793, + "learning_rate": 1.5580539184732188e-05, + "loss": 1.8024, + "step": 1620 + }, + { + "epoch": 0.9974617337127913, + "grad_norm": 5.557577610015869, + "learning_rate": 1.557502418771377e-05, + "loss": 2.3793, + "step": 1621 + }, + { + "epoch": 0.9980770709945389, + "grad_norm": 6.826696872711182, + "learning_rate": 1.5569506729242347e-05, + "loss": 2.0203, + "step": 1622 + }, + { + "epoch": 0.9986924082762865, + "grad_norm": 7.327406406402588, + "learning_rate": 1.5563986811753948e-05, + "loss": 2.2088, + "step": 1623 + }, + { + "epoch": 0.999307745558034, + "grad_norm": 6.1032867431640625, + "learning_rate": 1.5558464437685697e-05, + "loss": 2.0173, + "step": 1624 + }, + { + "epoch": 0.9999230828397816, + "grad_norm": 6.232659816741943, + "learning_rate": 1.5552939609475803e-05, + "loss": 2.0268, + "step": 1625 + }, + { + "epoch": 1.000538420121529, + "grad_norm": 8.664092063903809, + "learning_rate": 1.554741232956355e-05, + "loss": 1.9351, + "step": 1626 + }, + { + "epoch": 1.0011537574032767, + "grad_norm": 6.723713397979736, + "learning_rate": 1.554188260038932e-05, + "loss": 2.1018, + "step": 1627 + }, + { + "epoch": 1.0017690946850242, + "grad_norm": 5.569162368774414, + "learning_rate": 1.5536350424394565e-05, + "loss": 1.9516, + "step": 1628 + }, + { + "epoch": 1.0023844319667718, + "grad_norm": 6.344458103179932, + "learning_rate": 1.553081580402182e-05, + "loss": 1.9857, + "step": 1629 + }, + { + "epoch": 1.0029997692485193, + "grad_norm": 6.702969551086426, + "learning_rate": 1.5525278741714692e-05, + "loss": 1.9373, + "step": 1630 + }, + { + "epoch": 1.003615106530267, + "grad_norm": 5.949615478515625, + "learning_rate": 1.551973923991788e-05, + "loss": 1.7212, + "step": 1631 + }, + { + "epoch": 1.0042304438120144, + "grad_norm": 6.609753131866455, + "learning_rate": 1.551419730107716e-05, + "loss": 1.8, + "step": 1632 + }, + { + "epoch": 1.004845781093762, + "grad_norm": 6.519473552703857, + "learning_rate": 1.5508652927639362e-05, + "loss": 1.912, + "step": 1633 + }, + { + "epoch": 1.0054611183755096, + "grad_norm": 5.326961040496826, + "learning_rate": 1.5503106122052417e-05, + "loss": 1.8824, + "step": 1634 + }, + { + "epoch": 1.0060764556572572, + "grad_norm": 5.500513076782227, + "learning_rate": 1.5497556886765316e-05, + "loss": 1.9424, + "step": 1635 + }, + { + "epoch": 1.0066917929390047, + "grad_norm": 5.646092414855957, + "learning_rate": 1.5492005224228128e-05, + "loss": 2.1671, + "step": 1636 + }, + { + "epoch": 1.0073071302207524, + "grad_norm": 6.257505893707275, + "learning_rate": 1.548645113689199e-05, + "loss": 1.784, + "step": 1637 + }, + { + "epoch": 1.0079224675024998, + "grad_norm": 5.853558540344238, + "learning_rate": 1.548089462720911e-05, + "loss": 1.8997, + "step": 1638 + }, + { + "epoch": 1.0085378047842473, + "grad_norm": 6.634757995605469, + "learning_rate": 1.5475335697632768e-05, + "loss": 1.8016, + "step": 1639 + }, + { + "epoch": 1.009153142065995, + "grad_norm": 6.0219597816467285, + "learning_rate": 1.546977435061731e-05, + "loss": 1.9416, + "step": 1640 + }, + { + "epoch": 1.0097684793477424, + "grad_norm": 7.031144142150879, + "learning_rate": 1.5464210588618148e-05, + "loss": 1.7843, + "step": 1641 + }, + { + "epoch": 1.01038381662949, + "grad_norm": 5.704952239990234, + "learning_rate": 1.5458644414091767e-05, + "loss": 1.7585, + "step": 1642 + }, + { + "epoch": 1.0109991539112375, + "grad_norm": 7.677534103393555, + "learning_rate": 1.545307582949571e-05, + "loss": 2.062, + "step": 1643 + }, + { + "epoch": 1.0116144911929852, + "grad_norm": 7.072706699371338, + "learning_rate": 1.544750483728859e-05, + "loss": 1.9398, + "step": 1644 + }, + { + "epoch": 1.0122298284747326, + "grad_norm": 6.638393402099609, + "learning_rate": 1.544193143993008e-05, + "loss": 1.9557, + "step": 1645 + }, + { + "epoch": 1.0128451657564803, + "grad_norm": 5.990571022033691, + "learning_rate": 1.543635563988091e-05, + "loss": 2.0451, + "step": 1646 + }, + { + "epoch": 1.0134605030382278, + "grad_norm": 5.813577651977539, + "learning_rate": 1.5430777439602875e-05, + "loss": 2.05, + "step": 1647 + }, + { + "epoch": 1.0140758403199754, + "grad_norm": 6.569429874420166, + "learning_rate": 1.542519684155884e-05, + "loss": 1.9091, + "step": 1648 + }, + { + "epoch": 1.014691177601723, + "grad_norm": 6.622323036193848, + "learning_rate": 1.5419613848212708e-05, + "loss": 1.7614, + "step": 1649 + }, + { + "epoch": 1.0153065148834706, + "grad_norm": 5.407545566558838, + "learning_rate": 1.5414028462029454e-05, + "loss": 1.9248, + "step": 1650 + }, + { + "epoch": 1.015921852165218, + "grad_norm": 7.105026721954346, + "learning_rate": 1.540844068547511e-05, + "loss": 1.8193, + "step": 1651 + }, + { + "epoch": 1.0165371894469657, + "grad_norm": 6.3961920738220215, + "learning_rate": 1.5402850521016753e-05, + "loss": 2.0441, + "step": 1652 + }, + { + "epoch": 1.0171525267287131, + "grad_norm": 6.689556121826172, + "learning_rate": 1.5397257971122527e-05, + "loss": 1.9299, + "step": 1653 + }, + { + "epoch": 1.0177678640104608, + "grad_norm": 6.172722816467285, + "learning_rate": 1.539166303826162e-05, + "loss": 1.8417, + "step": 1654 + }, + { + "epoch": 1.0183832012922083, + "grad_norm": 5.479420185089111, + "learning_rate": 1.5386065724904273e-05, + "loss": 1.9314, + "step": 1655 + }, + { + "epoch": 1.018998538573956, + "grad_norm": 6.391892433166504, + "learning_rate": 1.5380466033521783e-05, + "loss": 1.9899, + "step": 1656 + }, + { + "epoch": 1.0196138758557034, + "grad_norm": 5.560680866241455, + "learning_rate": 1.5374863966586487e-05, + "loss": 1.904, + "step": 1657 + }, + { + "epoch": 1.020229213137451, + "grad_norm": 5.30397367477417, + "learning_rate": 1.536925952657179e-05, + "loss": 1.8544, + "step": 1658 + }, + { + "epoch": 1.0208445504191985, + "grad_norm": 6.713962554931641, + "learning_rate": 1.536365271595212e-05, + "loss": 1.871, + "step": 1659 + }, + { + "epoch": 1.021459887700946, + "grad_norm": 7.090426921844482, + "learning_rate": 1.5358043537202978e-05, + "loss": 1.8193, + "step": 1660 + }, + { + "epoch": 1.0220752249826937, + "grad_norm": 5.71984338760376, + "learning_rate": 1.5352431992800884e-05, + "loss": 1.9235, + "step": 1661 + }, + { + "epoch": 1.022690562264441, + "grad_norm": 7.204848766326904, + "learning_rate": 1.5346818085223423e-05, + "loss": 1.7052, + "step": 1662 + }, + { + "epoch": 1.0233058995461888, + "grad_norm": 5.551532745361328, + "learning_rate": 1.5341201816949208e-05, + "loss": 2.1023, + "step": 1663 + }, + { + "epoch": 1.0239212368279362, + "grad_norm": 5.530863285064697, + "learning_rate": 1.533558319045791e-05, + "loss": 1.9918, + "step": 1664 + }, + { + "epoch": 1.024536574109684, + "grad_norm": 5.177396774291992, + "learning_rate": 1.532996220823023e-05, + "loss": 1.6574, + "step": 1665 + }, + { + "epoch": 1.0251519113914314, + "grad_norm": 5.805763244628906, + "learning_rate": 1.5324338872747908e-05, + "loss": 1.7677, + "step": 1666 + }, + { + "epoch": 1.025767248673179, + "grad_norm": 7.396324634552002, + "learning_rate": 1.5318713186493736e-05, + "loss": 2.0289, + "step": 1667 + }, + { + "epoch": 1.0263825859549265, + "grad_norm": 6.758800983428955, + "learning_rate": 1.5313085151951527e-05, + "loss": 1.8913, + "step": 1668 + }, + { + "epoch": 1.0269979232366742, + "grad_norm": 6.433295726776123, + "learning_rate": 1.5307454771606137e-05, + "loss": 1.906, + "step": 1669 + }, + { + "epoch": 1.0276132605184216, + "grad_norm": 5.97529935836792, + "learning_rate": 1.5301822047943474e-05, + "loss": 1.6561, + "step": 1670 + }, + { + "epoch": 1.0282285978001693, + "grad_norm": 7.7520880699157715, + "learning_rate": 1.529618698345045e-05, + "loss": 2.1013, + "step": 1671 + }, + { + "epoch": 1.0288439350819167, + "grad_norm": 5.744178295135498, + "learning_rate": 1.5290549580615033e-05, + "loss": 2.073, + "step": 1672 + }, + { + "epoch": 1.0294592723636644, + "grad_norm": 6.627166748046875, + "learning_rate": 1.5284909841926217e-05, + "loss": 1.9237, + "step": 1673 + }, + { + "epoch": 1.0300746096454119, + "grad_norm": 5.9741950035095215, + "learning_rate": 1.5279267769874034e-05, + "loss": 1.9746, + "step": 1674 + }, + { + "epoch": 1.0306899469271595, + "grad_norm": 6.044044494628906, + "learning_rate": 1.5273623366949525e-05, + "loss": 1.835, + "step": 1675 + }, + { + "epoch": 1.031305284208907, + "grad_norm": 7.5063910484313965, + "learning_rate": 1.5267976635644787e-05, + "loss": 2.001, + "step": 1676 + }, + { + "epoch": 1.0319206214906547, + "grad_norm": 5.3966755867004395, + "learning_rate": 1.5262327578452927e-05, + "loss": 1.8756, + "step": 1677 + }, + { + "epoch": 1.0325359587724021, + "grad_norm": 4.961825370788574, + "learning_rate": 1.5256676197868086e-05, + "loss": 2.0452, + "step": 1678 + }, + { + "epoch": 1.0331512960541498, + "grad_norm": 6.98951530456543, + "learning_rate": 1.5251022496385433e-05, + "loss": 1.89, + "step": 1679 + }, + { + "epoch": 1.0337666333358972, + "grad_norm": 5.646378993988037, + "learning_rate": 1.5245366476501151e-05, + "loss": 1.789, + "step": 1680 + }, + { + "epoch": 1.0343819706176447, + "grad_norm": 5.873441219329834, + "learning_rate": 1.523970814071246e-05, + "loss": 1.8041, + "step": 1681 + }, + { + "epoch": 1.0349973078993924, + "grad_norm": 7.231013774871826, + "learning_rate": 1.5234047491517592e-05, + "loss": 2.0268, + "step": 1682 + }, + { + "epoch": 1.0356126451811398, + "grad_norm": 6.699387073516846, + "learning_rate": 1.5228384531415809e-05, + "loss": 1.7502, + "step": 1683 + }, + { + "epoch": 1.0362279824628875, + "grad_norm": 5.31864595413208, + "learning_rate": 1.5222719262907385e-05, + "loss": 1.9344, + "step": 1684 + }, + { + "epoch": 1.036843319744635, + "grad_norm": 7.3270673751831055, + "learning_rate": 1.5217051688493616e-05, + "loss": 2.0333, + "step": 1685 + }, + { + "epoch": 1.0374586570263826, + "grad_norm": 6.45682430267334, + "learning_rate": 1.5211381810676826e-05, + "loss": 1.8406, + "step": 1686 + }, + { + "epoch": 1.03807399430813, + "grad_norm": 6.3460516929626465, + "learning_rate": 1.5205709631960337e-05, + "loss": 1.7485, + "step": 1687 + }, + { + "epoch": 1.0386893315898778, + "grad_norm": 5.732699871063232, + "learning_rate": 1.5200035154848505e-05, + "loss": 1.8713, + "step": 1688 + }, + { + "epoch": 1.0393046688716252, + "grad_norm": 7.161590099334717, + "learning_rate": 1.5194358381846686e-05, + "loss": 1.7784, + "step": 1689 + }, + { + "epoch": 1.0399200061533729, + "grad_norm": 5.822014808654785, + "learning_rate": 1.5188679315461258e-05, + "loss": 1.9499, + "step": 1690 + }, + { + "epoch": 1.0405353434351203, + "grad_norm": 5.766219615936279, + "learning_rate": 1.5182997958199617e-05, + "loss": 2.0113, + "step": 1691 + }, + { + "epoch": 1.041150680716868, + "grad_norm": 6.924107074737549, + "learning_rate": 1.5177314312570155e-05, + "loss": 1.9609, + "step": 1692 + }, + { + "epoch": 1.0417660179986155, + "grad_norm": 4.856461048126221, + "learning_rate": 1.517162838108229e-05, + "loss": 1.8093, + "step": 1693 + }, + { + "epoch": 1.0423813552803631, + "grad_norm": 6.353387832641602, + "learning_rate": 1.5165940166246436e-05, + "loss": 1.8027, + "step": 1694 + }, + { + "epoch": 1.0429966925621106, + "grad_norm": 7.646724224090576, + "learning_rate": 1.5160249670574026e-05, + "loss": 2.1023, + "step": 1695 + }, + { + "epoch": 1.0436120298438583, + "grad_norm": 5.807473659515381, + "learning_rate": 1.515455689657749e-05, + "loss": 1.8775, + "step": 1696 + }, + { + "epoch": 1.0442273671256057, + "grad_norm": 6.602875709533691, + "learning_rate": 1.5148861846770275e-05, + "loss": 1.9143, + "step": 1697 + }, + { + "epoch": 1.0448427044073534, + "grad_norm": 6.103081226348877, + "learning_rate": 1.5143164523666824e-05, + "loss": 1.8554, + "step": 1698 + }, + { + "epoch": 1.0454580416891008, + "grad_norm": 6.121869087219238, + "learning_rate": 1.5137464929782586e-05, + "loss": 1.7995, + "step": 1699 + }, + { + "epoch": 1.0460733789708483, + "grad_norm": 6.660911560058594, + "learning_rate": 1.5131763067634017e-05, + "loss": 1.8654, + "step": 1700 + }, + { + "epoch": 1.046688716252596, + "grad_norm": 5.897146224975586, + "learning_rate": 1.5126058939738566e-05, + "loss": 1.9518, + "step": 1701 + }, + { + "epoch": 1.0473040535343434, + "grad_norm": 5.670334339141846, + "learning_rate": 1.5120352548614692e-05, + "loss": 1.8968, + "step": 1702 + }, + { + "epoch": 1.047919390816091, + "grad_norm": 7.226754665374756, + "learning_rate": 1.5114643896781844e-05, + "loss": 1.8111, + "step": 1703 + }, + { + "epoch": 1.0485347280978385, + "grad_norm": 6.423195838928223, + "learning_rate": 1.510893298676048e-05, + "loss": 2.0371, + "step": 1704 + }, + { + "epoch": 1.0491500653795862, + "grad_norm": 6.363276958465576, + "learning_rate": 1.5103219821072038e-05, + "loss": 1.93, + "step": 1705 + }, + { + "epoch": 1.0497654026613337, + "grad_norm": 6.568567752838135, + "learning_rate": 1.5097504402238969e-05, + "loss": 1.9445, + "step": 1706 + }, + { + "epoch": 1.0503807399430813, + "grad_norm": 5.359416961669922, + "learning_rate": 1.5091786732784717e-05, + "loss": 1.9085, + "step": 1707 + }, + { + "epoch": 1.0509960772248288, + "grad_norm": 7.724282264709473, + "learning_rate": 1.5086066815233706e-05, + "loss": 1.6142, + "step": 1708 + }, + { + "epoch": 1.0516114145065765, + "grad_norm": 7.259455680847168, + "learning_rate": 1.5080344652111365e-05, + "loss": 1.9818, + "step": 1709 + }, + { + "epoch": 1.052226751788324, + "grad_norm": 6.059990406036377, + "learning_rate": 1.5074620245944112e-05, + "loss": 1.7947, + "step": 1710 + }, + { + "epoch": 1.0528420890700716, + "grad_norm": 5.179537773132324, + "learning_rate": 1.5068893599259354e-05, + "loss": 1.9831, + "step": 1711 + }, + { + "epoch": 1.053457426351819, + "grad_norm": 5.691396713256836, + "learning_rate": 1.5063164714585485e-05, + "loss": 1.8924, + "step": 1712 + }, + { + "epoch": 1.0540727636335667, + "grad_norm": 7.857046127319336, + "learning_rate": 1.5057433594451891e-05, + "loss": 1.9712, + "step": 1713 + }, + { + "epoch": 1.0546881009153142, + "grad_norm": 7.2753777503967285, + "learning_rate": 1.5051700241388947e-05, + "loss": 1.9572, + "step": 1714 + }, + { + "epoch": 1.0553034381970618, + "grad_norm": 7.022783279418945, + "learning_rate": 1.5045964657928006e-05, + "loss": 2.0641, + "step": 1715 + }, + { + "epoch": 1.0559187754788093, + "grad_norm": 6.4770050048828125, + "learning_rate": 1.5040226846601413e-05, + "loss": 2.0107, + "step": 1716 + }, + { + "epoch": 1.056534112760557, + "grad_norm": 6.438579559326172, + "learning_rate": 1.5034486809942492e-05, + "loss": 1.9641, + "step": 1717 + }, + { + "epoch": 1.0571494500423044, + "grad_norm": 6.8940887451171875, + "learning_rate": 1.5028744550485553e-05, + "loss": 1.7248, + "step": 1718 + }, + { + "epoch": 1.057764787324052, + "grad_norm": 7.689363479614258, + "learning_rate": 1.5023000070765886e-05, + "loss": 1.9046, + "step": 1719 + }, + { + "epoch": 1.0583801246057996, + "grad_norm": 7.100428581237793, + "learning_rate": 1.5017253373319759e-05, + "loss": 1.8909, + "step": 1720 + }, + { + "epoch": 1.058995461887547, + "grad_norm": 7.288392543792725, + "learning_rate": 1.5011504460684418e-05, + "loss": 1.9591, + "step": 1721 + }, + { + "epoch": 1.0596107991692947, + "grad_norm": 6.9482293128967285, + "learning_rate": 1.50057533353981e-05, + "loss": 1.9561, + "step": 1722 + }, + { + "epoch": 1.0602261364510421, + "grad_norm": 5.307858467102051, + "learning_rate": 1.5000000000000002e-05, + "loss": 1.7832, + "step": 1723 + }, + { + "epoch": 1.0608414737327898, + "grad_norm": 5.73566198348999, + "learning_rate": 1.4994244457030304e-05, + "loss": 1.8343, + "step": 1724 + }, + { + "epoch": 1.0614568110145373, + "grad_norm": 7.304609775543213, + "learning_rate": 1.4988486709030166e-05, + "loss": 1.9274, + "step": 1725 + }, + { + "epoch": 1.062072148296285, + "grad_norm": 6.64449405670166, + "learning_rate": 1.4982726758541708e-05, + "loss": 1.9671, + "step": 1726 + }, + { + "epoch": 1.0626874855780324, + "grad_norm": 5.646178722381592, + "learning_rate": 1.4976964608108038e-05, + "loss": 1.9082, + "step": 1727 + }, + { + "epoch": 1.06330282285978, + "grad_norm": 6.580636501312256, + "learning_rate": 1.4971200260273223e-05, + "loss": 1.7975, + "step": 1728 + }, + { + "epoch": 1.0639181601415275, + "grad_norm": 6.467515468597412, + "learning_rate": 1.4965433717582307e-05, + "loss": 1.9278, + "step": 1729 + }, + { + "epoch": 1.0645334974232752, + "grad_norm": 6.143365859985352, + "learning_rate": 1.4959664982581304e-05, + "loss": 1.6439, + "step": 1730 + }, + { + "epoch": 1.0651488347050226, + "grad_norm": 6.986037254333496, + "learning_rate": 1.495389405781719e-05, + "loss": 1.8404, + "step": 1731 + }, + { + "epoch": 1.0657641719867703, + "grad_norm": 5.054937362670898, + "learning_rate": 1.4948120945837907e-05, + "loss": 2.0675, + "step": 1732 + }, + { + "epoch": 1.0663795092685178, + "grad_norm": 6.424448013305664, + "learning_rate": 1.4942345649192376e-05, + "loss": 1.6943, + "step": 1733 + }, + { + "epoch": 1.0669948465502654, + "grad_norm": 5.030768871307373, + "learning_rate": 1.4936568170430465e-05, + "loss": 1.9674, + "step": 1734 + }, + { + "epoch": 1.067610183832013, + "grad_norm": 6.471560001373291, + "learning_rate": 1.4930788512103018e-05, + "loss": 2.0729, + "step": 1735 + }, + { + "epoch": 1.0682255211137606, + "grad_norm": 5.195088863372803, + "learning_rate": 1.4925006676761835e-05, + "loss": 1.8797, + "step": 1736 + }, + { + "epoch": 1.068840858395508, + "grad_norm": 6.176763534545898, + "learning_rate": 1.491922266695968e-05, + "loss": 1.7946, + "step": 1737 + }, + { + "epoch": 1.0694561956772557, + "grad_norm": 6.135370254516602, + "learning_rate": 1.4913436485250275e-05, + "loss": 1.98, + "step": 1738 + }, + { + "epoch": 1.0700715329590031, + "grad_norm": 5.835413455963135, + "learning_rate": 1.4907648134188304e-05, + "loss": 1.9649, + "step": 1739 + }, + { + "epoch": 1.0706868702407508, + "grad_norm": 6.3465094566345215, + "learning_rate": 1.4901857616329405e-05, + "loss": 1.9368, + "step": 1740 + }, + { + "epoch": 1.0713022075224983, + "grad_norm": 5.935573101043701, + "learning_rate": 1.4896064934230174e-05, + "loss": 1.8896, + "step": 1741 + }, + { + "epoch": 1.0719175448042457, + "grad_norm": 5.843479633331299, + "learning_rate": 1.4890270090448167e-05, + "loss": 2.0872, + "step": 1742 + }, + { + "epoch": 1.0725328820859934, + "grad_norm": 6.735114097595215, + "learning_rate": 1.488447308754189e-05, + "loss": 1.8894, + "step": 1743 + }, + { + "epoch": 1.0731482193677409, + "grad_norm": 6.834104537963867, + "learning_rate": 1.4878673928070795e-05, + "loss": 1.8652, + "step": 1744 + }, + { + "epoch": 1.0737635566494885, + "grad_norm": 7.214136600494385, + "learning_rate": 1.4872872614595304e-05, + "loss": 1.9913, + "step": 1745 + }, + { + "epoch": 1.074378893931236, + "grad_norm": 5.922374725341797, + "learning_rate": 1.4867069149676774e-05, + "loss": 2.0033, + "step": 1746 + }, + { + "epoch": 1.0749942312129837, + "grad_norm": 6.582641124725342, + "learning_rate": 1.486126353587752e-05, + "loss": 1.9639, + "step": 1747 + }, + { + "epoch": 1.075609568494731, + "grad_norm": 6.269070148468018, + "learning_rate": 1.4855455775760804e-05, + "loss": 1.8527, + "step": 1748 + }, + { + "epoch": 1.0762249057764788, + "grad_norm": 6.4737043380737305, + "learning_rate": 1.4849645871890832e-05, + "loss": 1.9817, + "step": 1749 + }, + { + "epoch": 1.0768402430582262, + "grad_norm": 5.92973518371582, + "learning_rate": 1.484383382683276e-05, + "loss": 1.8126, + "step": 1750 + }, + { + "epoch": 1.077455580339974, + "grad_norm": 6.711043357849121, + "learning_rate": 1.4838019643152699e-05, + "loss": 1.8491, + "step": 1751 + }, + { + "epoch": 1.0780709176217214, + "grad_norm": 5.3943400382995605, + "learning_rate": 1.4832203323417679e-05, + "loss": 1.9946, + "step": 1752 + }, + { + "epoch": 1.078686254903469, + "grad_norm": 6.983832836151123, + "learning_rate": 1.4826384870195702e-05, + "loss": 1.9792, + "step": 1753 + }, + { + "epoch": 1.0793015921852165, + "grad_norm": 6.3697896003723145, + "learning_rate": 1.482056428605569e-05, + "loss": 1.7191, + "step": 1754 + }, + { + "epoch": 1.0799169294669642, + "grad_norm": 6.985806465148926, + "learning_rate": 1.4814741573567514e-05, + "loss": 1.6062, + "step": 1755 + }, + { + "epoch": 1.0805322667487116, + "grad_norm": 6.472733020782471, + "learning_rate": 1.4808916735301992e-05, + "loss": 1.8132, + "step": 1756 + }, + { + "epoch": 1.0811476040304593, + "grad_norm": 6.68153190612793, + "learning_rate": 1.4803089773830868e-05, + "loss": 1.9459, + "step": 1757 + }, + { + "epoch": 1.0817629413122067, + "grad_norm": 6.503655433654785, + "learning_rate": 1.4797260691726831e-05, + "loss": 1.8968, + "step": 1758 + }, + { + "epoch": 1.0823782785939544, + "grad_norm": 6.253997325897217, + "learning_rate": 1.47914294915635e-05, + "loss": 2.0526, + "step": 1759 + }, + { + "epoch": 1.0829936158757019, + "grad_norm": 6.368047714233398, + "learning_rate": 1.478559617591544e-05, + "loss": 1.8979, + "step": 1760 + }, + { + "epoch": 1.0836089531574493, + "grad_norm": 5.929328441619873, + "learning_rate": 1.4779760747358141e-05, + "loss": 1.8916, + "step": 1761 + }, + { + "epoch": 1.084224290439197, + "grad_norm": 6.761097431182861, + "learning_rate": 1.4773923208468023e-05, + "loss": 1.6724, + "step": 1762 + }, + { + "epoch": 1.0848396277209444, + "grad_norm": 5.988875389099121, + "learning_rate": 1.4768083561822451e-05, + "loss": 1.9067, + "step": 1763 + }, + { + "epoch": 1.0854549650026921, + "grad_norm": 6.104567527770996, + "learning_rate": 1.4762241809999704e-05, + "loss": 1.6103, + "step": 1764 + }, + { + "epoch": 1.0860703022844396, + "grad_norm": 6.391572952270508, + "learning_rate": 1.4756397955579012e-05, + "loss": 2.0055, + "step": 1765 + }, + { + "epoch": 1.0866856395661872, + "grad_norm": 6.283228874206543, + "learning_rate": 1.4750552001140506e-05, + "loss": 2.0342, + "step": 1766 + }, + { + "epoch": 1.0873009768479347, + "grad_norm": 6.59044885635376, + "learning_rate": 1.4744703949265268e-05, + "loss": 1.8712, + "step": 1767 + }, + { + "epoch": 1.0879163141296824, + "grad_norm": 5.068249702453613, + "learning_rate": 1.4738853802535293e-05, + "loss": 1.7581, + "step": 1768 + }, + { + "epoch": 1.0885316514114298, + "grad_norm": 7.030444622039795, + "learning_rate": 1.4733001563533507e-05, + "loss": 1.9548, + "step": 1769 + }, + { + "epoch": 1.0891469886931775, + "grad_norm": 6.573042392730713, + "learning_rate": 1.4727147234843752e-05, + "loss": 2.016, + "step": 1770 + }, + { + "epoch": 1.089762325974925, + "grad_norm": 6.104885101318359, + "learning_rate": 1.4721290819050804e-05, + "loss": 1.8406, + "step": 1771 + }, + { + "epoch": 1.0903776632566726, + "grad_norm": 6.790970325469971, + "learning_rate": 1.4715432318740352e-05, + "loss": 1.94, + "step": 1772 + }, + { + "epoch": 1.09099300053842, + "grad_norm": 5.897993087768555, + "learning_rate": 1.4709571736499008e-05, + "loss": 2.015, + "step": 1773 + }, + { + "epoch": 1.0916083378201678, + "grad_norm": 6.877941608428955, + "learning_rate": 1.47037090749143e-05, + "loss": 1.9085, + "step": 1774 + }, + { + "epoch": 1.0922236751019152, + "grad_norm": 7.974496841430664, + "learning_rate": 1.4697844336574685e-05, + "loss": 1.8676, + "step": 1775 + }, + { + "epoch": 1.0928390123836629, + "grad_norm": 5.594254970550537, + "learning_rate": 1.4691977524069518e-05, + "loss": 2.003, + "step": 1776 + }, + { + "epoch": 1.0934543496654103, + "grad_norm": 7.615223407745361, + "learning_rate": 1.4686108639989091e-05, + "loss": 1.9931, + "step": 1777 + }, + { + "epoch": 1.094069686947158, + "grad_norm": 6.575129508972168, + "learning_rate": 1.4680237686924594e-05, + "loss": 1.9074, + "step": 1778 + }, + { + "epoch": 1.0946850242289055, + "grad_norm": 6.86401891708374, + "learning_rate": 1.467436466746814e-05, + "loss": 1.9273, + "step": 1779 + }, + { + "epoch": 1.095300361510653, + "grad_norm": 8.231244087219238, + "learning_rate": 1.4668489584212746e-05, + "loss": 1.8062, + "step": 1780 + }, + { + "epoch": 1.0959156987924006, + "grad_norm": 6.7447052001953125, + "learning_rate": 1.4662612439752356e-05, + "loss": 2.0316, + "step": 1781 + }, + { + "epoch": 1.0965310360741483, + "grad_norm": 6.680459499359131, + "learning_rate": 1.4656733236681802e-05, + "loss": 1.8196, + "step": 1782 + }, + { + "epoch": 1.0971463733558957, + "grad_norm": 6.850892066955566, + "learning_rate": 1.465085197759684e-05, + "loss": 1.9334, + "step": 1783 + }, + { + "epoch": 1.0977617106376432, + "grad_norm": 6.918075084686279, + "learning_rate": 1.4644968665094133e-05, + "loss": 2.0248, + "step": 1784 + }, + { + "epoch": 1.0983770479193908, + "grad_norm": 6.527578830718994, + "learning_rate": 1.4639083301771246e-05, + "loss": 1.7817, + "step": 1785 + }, + { + "epoch": 1.0989923852011383, + "grad_norm": 7.30032205581665, + "learning_rate": 1.4633195890226653e-05, + "loss": 1.9842, + "step": 1786 + }, + { + "epoch": 1.099607722482886, + "grad_norm": 5.076261043548584, + "learning_rate": 1.4627306433059724e-05, + "loss": 1.9962, + "step": 1787 + }, + { + "epoch": 1.1002230597646334, + "grad_norm": 7.844146251678467, + "learning_rate": 1.4621414932870746e-05, + "loss": 1.8508, + "step": 1788 + }, + { + "epoch": 1.100838397046381, + "grad_norm": 5.94285249710083, + "learning_rate": 1.4615521392260899e-05, + "loss": 1.8713, + "step": 1789 + }, + { + "epoch": 1.1014537343281285, + "grad_norm": 5.887419700622559, + "learning_rate": 1.4609625813832263e-05, + "loss": 1.7752, + "step": 1790 + }, + { + "epoch": 1.1020690716098762, + "grad_norm": 6.3427605628967285, + "learning_rate": 1.4603728200187823e-05, + "loss": 1.7983, + "step": 1791 + }, + { + "epoch": 1.1026844088916237, + "grad_norm": 6.128720283508301, + "learning_rate": 1.4597828553931454e-05, + "loss": 1.9876, + "step": 1792 + }, + { + "epoch": 1.1032997461733713, + "grad_norm": 8.027382850646973, + "learning_rate": 1.4591926877667946e-05, + "loss": 1.8829, + "step": 1793 + }, + { + "epoch": 1.1039150834551188, + "grad_norm": 7.918554306030273, + "learning_rate": 1.4586023174002961e-05, + "loss": 1.8201, + "step": 1794 + }, + { + "epoch": 1.1045304207368665, + "grad_norm": 7.42606782913208, + "learning_rate": 1.4580117445543077e-05, + "loss": 2.1002, + "step": 1795 + }, + { + "epoch": 1.105145758018614, + "grad_norm": 5.667446613311768, + "learning_rate": 1.4574209694895755e-05, + "loss": 1.7567, + "step": 1796 + }, + { + "epoch": 1.1057610953003616, + "grad_norm": 6.599480628967285, + "learning_rate": 1.4568299924669351e-05, + "loss": 1.9796, + "step": 1797 + }, + { + "epoch": 1.106376432582109, + "grad_norm": 5.910421848297119, + "learning_rate": 1.4562388137473114e-05, + "loss": 1.9748, + "step": 1798 + }, + { + "epoch": 1.1069917698638567, + "grad_norm": 7.454853534698486, + "learning_rate": 1.4556474335917181e-05, + "loss": 1.819, + "step": 1799 + }, + { + "epoch": 1.1076071071456042, + "grad_norm": 7.198428153991699, + "learning_rate": 1.4550558522612584e-05, + "loss": 1.8259, + "step": 1800 + }, + { + "epoch": 1.1082224444273518, + "grad_norm": 7.260030746459961, + "learning_rate": 1.4544640700171235e-05, + "loss": 1.7133, + "step": 1801 + }, + { + "epoch": 1.1088377817090993, + "grad_norm": 6.137328624725342, + "learning_rate": 1.4538720871205938e-05, + "loss": 1.9117, + "step": 1802 + }, + { + "epoch": 1.1094531189908468, + "grad_norm": 6.844432830810547, + "learning_rate": 1.4532799038330385e-05, + "loss": 1.6997, + "step": 1803 + }, + { + "epoch": 1.1100684562725944, + "grad_norm": 5.602412700653076, + "learning_rate": 1.4526875204159148e-05, + "loss": 1.942, + "step": 1804 + }, + { + "epoch": 1.1106837935543419, + "grad_norm": 7.49669885635376, + "learning_rate": 1.4520949371307686e-05, + "loss": 1.8394, + "step": 1805 + }, + { + "epoch": 1.1112991308360896, + "grad_norm": 5.472622871398926, + "learning_rate": 1.4515021542392335e-05, + "loss": 1.8608, + "step": 1806 + }, + { + "epoch": 1.111914468117837, + "grad_norm": 5.71762228012085, + "learning_rate": 1.4509091720030323e-05, + "loss": 1.9862, + "step": 1807 + }, + { + "epoch": 1.1125298053995847, + "grad_norm": 6.574720859527588, + "learning_rate": 1.4503159906839743e-05, + "loss": 2.0326, + "step": 1808 + }, + { + "epoch": 1.1131451426813321, + "grad_norm": 6.004607200622559, + "learning_rate": 1.4497226105439586e-05, + "loss": 1.7834, + "step": 1809 + }, + { + "epoch": 1.1137604799630798, + "grad_norm": 6.107209205627441, + "learning_rate": 1.44912903184497e-05, + "loss": 2.0066, + "step": 1810 + }, + { + "epoch": 1.1143758172448273, + "grad_norm": 6.803170204162598, + "learning_rate": 1.4485352548490827e-05, + "loss": 1.8947, + "step": 1811 + }, + { + "epoch": 1.114991154526575, + "grad_norm": 6.802841663360596, + "learning_rate": 1.4479412798184576e-05, + "loss": 1.9508, + "step": 1812 + }, + { + "epoch": 1.1156064918083224, + "grad_norm": 5.275770664215088, + "learning_rate": 1.4473471070153429e-05, + "loss": 1.9624, + "step": 1813 + }, + { + "epoch": 1.11622182909007, + "grad_norm": 6.633639812469482, + "learning_rate": 1.4467527367020745e-05, + "loss": 1.9172, + "step": 1814 + }, + { + "epoch": 1.1168371663718175, + "grad_norm": 6.254295349121094, + "learning_rate": 1.4461581691410757e-05, + "loss": 1.8518, + "step": 1815 + }, + { + "epoch": 1.1174525036535652, + "grad_norm": 6.143066883087158, + "learning_rate": 1.4455634045948562e-05, + "loss": 1.9197, + "step": 1816 + }, + { + "epoch": 1.1180678409353126, + "grad_norm": 5.186299800872803, + "learning_rate": 1.4449684433260134e-05, + "loss": 1.9343, + "step": 1817 + }, + { + "epoch": 1.1186831782170603, + "grad_norm": 5.977596282958984, + "learning_rate": 1.4443732855972307e-05, + "loss": 1.9778, + "step": 1818 + }, + { + "epoch": 1.1192985154988078, + "grad_norm": 6.826209545135498, + "learning_rate": 1.4437779316712797e-05, + "loss": 2.0038, + "step": 1819 + }, + { + "epoch": 1.1199138527805554, + "grad_norm": 6.399897575378418, + "learning_rate": 1.4431823818110166e-05, + "loss": 1.7532, + "step": 1820 + }, + { + "epoch": 1.120529190062303, + "grad_norm": 7.844462871551514, + "learning_rate": 1.4425866362793863e-05, + "loss": 1.9184, + "step": 1821 + }, + { + "epoch": 1.1211445273440503, + "grad_norm": 6.503533840179443, + "learning_rate": 1.4419906953394179e-05, + "loss": 1.6805, + "step": 1822 + }, + { + "epoch": 1.121759864625798, + "grad_norm": 5.990655422210693, + "learning_rate": 1.4413945592542282e-05, + "loss": 2.0037, + "step": 1823 + }, + { + "epoch": 1.1223752019075455, + "grad_norm": 6.0809783935546875, + "learning_rate": 1.4407982282870203e-05, + "loss": 1.867, + "step": 1824 + }, + { + "epoch": 1.1229905391892931, + "grad_norm": 5.530211925506592, + "learning_rate": 1.4402017027010824e-05, + "loss": 2.0853, + "step": 1825 + }, + { + "epoch": 1.1236058764710406, + "grad_norm": 5.934091567993164, + "learning_rate": 1.4396049827597893e-05, + "loss": 1.9681, + "step": 1826 + }, + { + "epoch": 1.1242212137527883, + "grad_norm": 6.638157367706299, + "learning_rate": 1.4390080687266013e-05, + "loss": 2.0163, + "step": 1827 + }, + { + "epoch": 1.1248365510345357, + "grad_norm": 7.785284996032715, + "learning_rate": 1.4384109608650645e-05, + "loss": 1.9728, + "step": 1828 + }, + { + "epoch": 1.1254518883162834, + "grad_norm": 7.632718086242676, + "learning_rate": 1.4378136594388105e-05, + "loss": 1.7505, + "step": 1829 + }, + { + "epoch": 1.1260672255980309, + "grad_norm": 6.945493698120117, + "learning_rate": 1.4372161647115563e-05, + "loss": 1.9158, + "step": 1830 + }, + { + "epoch": 1.1266825628797785, + "grad_norm": 7.118682384490967, + "learning_rate": 1.4366184769471053e-05, + "loss": 1.9868, + "step": 1831 + }, + { + "epoch": 1.127297900161526, + "grad_norm": 6.86276388168335, + "learning_rate": 1.4360205964093436e-05, + "loss": 2.0491, + "step": 1832 + }, + { + "epoch": 1.1279132374432737, + "grad_norm": 6.340496063232422, + "learning_rate": 1.4354225233622457e-05, + "loss": 1.9345, + "step": 1833 + }, + { + "epoch": 1.128528574725021, + "grad_norm": 6.542847633361816, + "learning_rate": 1.434824258069868e-05, + "loss": 1.8752, + "step": 1834 + }, + { + "epoch": 1.1291439120067688, + "grad_norm": 7.444716453552246, + "learning_rate": 1.4342258007963541e-05, + "loss": 1.9876, + "step": 1835 + }, + { + "epoch": 1.1297592492885162, + "grad_norm": 6.454919338226318, + "learning_rate": 1.433627151805931e-05, + "loss": 1.8391, + "step": 1836 + }, + { + "epoch": 1.130374586570264, + "grad_norm": 5.510046005249023, + "learning_rate": 1.4330283113629112e-05, + "loss": 1.8962, + "step": 1837 + }, + { + "epoch": 1.1309899238520114, + "grad_norm": 5.873473167419434, + "learning_rate": 1.4324292797316908e-05, + "loss": 1.9923, + "step": 1838 + }, + { + "epoch": 1.131605261133759, + "grad_norm": 7.814196586608887, + "learning_rate": 1.4318300571767514e-05, + "loss": 2.0343, + "step": 1839 + }, + { + "epoch": 1.1322205984155065, + "grad_norm": 6.525686264038086, + "learning_rate": 1.4312306439626583e-05, + "loss": 2.0894, + "step": 1840 + }, + { + "epoch": 1.132835935697254, + "grad_norm": 6.01018762588501, + "learning_rate": 1.4306310403540602e-05, + "loss": 1.8674, + "step": 1841 + }, + { + "epoch": 1.1334512729790016, + "grad_norm": 6.411753177642822, + "learning_rate": 1.4300312466156917e-05, + "loss": 1.654, + "step": 1842 + }, + { + "epoch": 1.1340666102607493, + "grad_norm": 5.610366344451904, + "learning_rate": 1.4294312630123699e-05, + "loss": 1.9258, + "step": 1843 + }, + { + "epoch": 1.1346819475424967, + "grad_norm": 5.5772600173950195, + "learning_rate": 1.428831089808996e-05, + "loss": 1.767, + "step": 1844 + }, + { + "epoch": 1.1352972848242442, + "grad_norm": 5.488239765167236, + "learning_rate": 1.4282307272705555e-05, + "loss": 1.9093, + "step": 1845 + }, + { + "epoch": 1.1359126221059919, + "grad_norm": 6.809393882751465, + "learning_rate": 1.4276301756621162e-05, + "loss": 1.95, + "step": 1846 + }, + { + "epoch": 1.1365279593877393, + "grad_norm": 6.609917163848877, + "learning_rate": 1.4270294352488316e-05, + "loss": 1.9828, + "step": 1847 + }, + { + "epoch": 1.137143296669487, + "grad_norm": 5.247275352478027, + "learning_rate": 1.4264285062959358e-05, + "loss": 2.0326, + "step": 1848 + }, + { + "epoch": 1.1377586339512344, + "grad_norm": 6.093908786773682, + "learning_rate": 1.4258273890687487e-05, + "loss": 1.7519, + "step": 1849 + }, + { + "epoch": 1.1383739712329821, + "grad_norm": 5.283247470855713, + "learning_rate": 1.425226083832671e-05, + "loss": 1.7404, + "step": 1850 + }, + { + "epoch": 1.1389893085147296, + "grad_norm": 7.709108352661133, + "learning_rate": 1.4246245908531883e-05, + "loss": 1.8611, + "step": 1851 + }, + { + "epoch": 1.1396046457964772, + "grad_norm": 5.860177516937256, + "learning_rate": 1.4240229103958687e-05, + "loss": 2.046, + "step": 1852 + }, + { + "epoch": 1.1402199830782247, + "grad_norm": 6.918152332305908, + "learning_rate": 1.423421042726362e-05, + "loss": 1.832, + "step": 1853 + }, + { + "epoch": 1.1408353203599724, + "grad_norm": 6.310130596160889, + "learning_rate": 1.4228189881104016e-05, + "loss": 1.8204, + "step": 1854 + }, + { + "epoch": 1.1414506576417198, + "grad_norm": 6.032684326171875, + "learning_rate": 1.4222167468138034e-05, + "loss": 1.8328, + "step": 1855 + }, + { + "epoch": 1.1420659949234675, + "grad_norm": 5.9068756103515625, + "learning_rate": 1.4216143191024656e-05, + "loss": 2.0494, + "step": 1856 + }, + { + "epoch": 1.142681332205215, + "grad_norm": 5.846731662750244, + "learning_rate": 1.4210117052423683e-05, + "loss": 1.901, + "step": 1857 + }, + { + "epoch": 1.1432966694869626, + "grad_norm": 6.306459903717041, + "learning_rate": 1.4204089054995745e-05, + "loss": 2.1647, + "step": 1858 + }, + { + "epoch": 1.14391200676871, + "grad_norm": 6.922242164611816, + "learning_rate": 1.4198059201402288e-05, + "loss": 1.4747, + "step": 1859 + }, + { + "epoch": 1.1445273440504578, + "grad_norm": 7.462320327758789, + "learning_rate": 1.4192027494305578e-05, + "loss": 1.9579, + "step": 1860 + }, + { + "epoch": 1.1451426813322052, + "grad_norm": 6.445594310760498, + "learning_rate": 1.4185993936368703e-05, + "loss": 1.8852, + "step": 1861 + }, + { + "epoch": 1.1457580186139529, + "grad_norm": 6.628572463989258, + "learning_rate": 1.4179958530255559e-05, + "loss": 1.9654, + "step": 1862 + }, + { + "epoch": 1.1463733558957003, + "grad_norm": 7.737904071807861, + "learning_rate": 1.4173921278630874e-05, + "loss": 1.7002, + "step": 1863 + }, + { + "epoch": 1.1469886931774478, + "grad_norm": 6.8194580078125, + "learning_rate": 1.4167882184160168e-05, + "loss": 1.9222, + "step": 1864 + }, + { + "epoch": 1.1476040304591955, + "grad_norm": 6.484294414520264, + "learning_rate": 1.41618412495098e-05, + "loss": 1.7458, + "step": 1865 + }, + { + "epoch": 1.148219367740943, + "grad_norm": 6.83275842666626, + "learning_rate": 1.4155798477346922e-05, + "loss": 2.0913, + "step": 1866 + }, + { + "epoch": 1.1488347050226906, + "grad_norm": 7.38653564453125, + "learning_rate": 1.4149753870339509e-05, + "loss": 1.799, + "step": 1867 + }, + { + "epoch": 1.149450042304438, + "grad_norm": 6.084892272949219, + "learning_rate": 1.4143707431156337e-05, + "loss": 2.0038, + "step": 1868 + }, + { + "epoch": 1.1500653795861857, + "grad_norm": 5.874941349029541, + "learning_rate": 1.4137659162466999e-05, + "loss": 1.9719, + "step": 1869 + }, + { + "epoch": 1.1506807168679332, + "grad_norm": 6.767702102661133, + "learning_rate": 1.413160906694189e-05, + "loss": 2.0812, + "step": 1870 + }, + { + "epoch": 1.1512960541496808, + "grad_norm": 6.303641319274902, + "learning_rate": 1.4125557147252215e-05, + "loss": 1.9162, + "step": 1871 + }, + { + "epoch": 1.1519113914314283, + "grad_norm": 6.65217399597168, + "learning_rate": 1.411950340606998e-05, + "loss": 2.0469, + "step": 1872 + }, + { + "epoch": 1.152526728713176, + "grad_norm": 6.118605613708496, + "learning_rate": 1.4113447846068007e-05, + "loss": 2.0403, + "step": 1873 + }, + { + "epoch": 1.1531420659949234, + "grad_norm": 6.41951322555542, + "learning_rate": 1.4107390469919899e-05, + "loss": 1.672, + "step": 1874 + }, + { + "epoch": 1.153757403276671, + "grad_norm": 5.9683685302734375, + "learning_rate": 1.410133128030009e-05, + "loss": 1.9462, + "step": 1875 + }, + { + "epoch": 1.1543727405584185, + "grad_norm": 6.242847442626953, + "learning_rate": 1.4095270279883786e-05, + "loss": 1.9273, + "step": 1876 + }, + { + "epoch": 1.1549880778401662, + "grad_norm": 5.844324588775635, + "learning_rate": 1.4089207471347013e-05, + "loss": 1.7901, + "step": 1877 + }, + { + "epoch": 1.1556034151219137, + "grad_norm": 6.091996669769287, + "learning_rate": 1.408314285736658e-05, + "loss": 1.651, + "step": 1878 + }, + { + "epoch": 1.1562187524036613, + "grad_norm": 8.375609397888184, + "learning_rate": 1.4077076440620112e-05, + "loss": 1.8805, + "step": 1879 + }, + { + "epoch": 1.1568340896854088, + "grad_norm": 6.080041885375977, + "learning_rate": 1.4071008223786011e-05, + "loss": 1.8606, + "step": 1880 + }, + { + "epoch": 1.1574494269671565, + "grad_norm": 6.624536991119385, + "learning_rate": 1.4064938209543483e-05, + "loss": 1.8521, + "step": 1881 + }, + { + "epoch": 1.158064764248904, + "grad_norm": 6.769179344177246, + "learning_rate": 1.4058866400572528e-05, + "loss": 1.8299, + "step": 1882 + }, + { + "epoch": 1.1586801015306514, + "grad_norm": 6.238314151763916, + "learning_rate": 1.4052792799553935e-05, + "loss": 1.9355, + "step": 1883 + }, + { + "epoch": 1.159295438812399, + "grad_norm": 7.183286190032959, + "learning_rate": 1.4046717409169288e-05, + "loss": 1.6207, + "step": 1884 + }, + { + "epoch": 1.1599107760941467, + "grad_norm": 6.468939304351807, + "learning_rate": 1.4040640232100955e-05, + "loss": 1.7038, + "step": 1885 + }, + { + "epoch": 1.1605261133758942, + "grad_norm": 7.319580078125, + "learning_rate": 1.4034561271032096e-05, + "loss": 1.8691, + "step": 1886 + }, + { + "epoch": 1.1611414506576416, + "grad_norm": 7.0074944496154785, + "learning_rate": 1.4028480528646669e-05, + "loss": 1.8492, + "step": 1887 + }, + { + "epoch": 1.1617567879393893, + "grad_norm": 7.138804912567139, + "learning_rate": 1.4022398007629396e-05, + "loss": 1.8735, + "step": 1888 + }, + { + "epoch": 1.1623721252211368, + "grad_norm": 5.75323486328125, + "learning_rate": 1.401631371066581e-05, + "loss": 1.7911, + "step": 1889 + }, + { + "epoch": 1.1629874625028844, + "grad_norm": 6.762871265411377, + "learning_rate": 1.4010227640442202e-05, + "loss": 1.8655, + "step": 1890 + }, + { + "epoch": 1.1636027997846319, + "grad_norm": 5.211088180541992, + "learning_rate": 1.4004139799645669e-05, + "loss": 1.9527, + "step": 1891 + }, + { + "epoch": 1.1642181370663796, + "grad_norm": 6.648775577545166, + "learning_rate": 1.3998050190964075e-05, + "loss": 1.9847, + "step": 1892 + }, + { + "epoch": 1.164833474348127, + "grad_norm": 5.676955223083496, + "learning_rate": 1.3991958817086068e-05, + "loss": 1.9872, + "step": 1893 + }, + { + "epoch": 1.1654488116298747, + "grad_norm": 6.81825590133667, + "learning_rate": 1.3985865680701084e-05, + "loss": 2.0001, + "step": 1894 + }, + { + "epoch": 1.1660641489116221, + "grad_norm": 5.807706356048584, + "learning_rate": 1.3979770784499325e-05, + "loss": 1.9173, + "step": 1895 + }, + { + "epoch": 1.1666794861933698, + "grad_norm": 7.092994689941406, + "learning_rate": 1.3973674131171775e-05, + "loss": 1.897, + "step": 1896 + }, + { + "epoch": 1.1672948234751173, + "grad_norm": 6.276900291442871, + "learning_rate": 1.3967575723410194e-05, + "loss": 2.0628, + "step": 1897 + }, + { + "epoch": 1.167910160756865, + "grad_norm": 5.939901351928711, + "learning_rate": 1.396147556390712e-05, + "loss": 1.9855, + "step": 1898 + }, + { + "epoch": 1.1685254980386124, + "grad_norm": 5.817834377288818, + "learning_rate": 1.3955373655355852e-05, + "loss": 1.9661, + "step": 1899 + }, + { + "epoch": 1.16914083532036, + "grad_norm": 9.346198081970215, + "learning_rate": 1.3949270000450476e-05, + "loss": 1.847, + "step": 1900 + }, + { + "epoch": 1.1697561726021075, + "grad_norm": 8.53718090057373, + "learning_rate": 1.3943164601885846e-05, + "loss": 2.0972, + "step": 1901 + }, + { + "epoch": 1.170371509883855, + "grad_norm": 6.237472057342529, + "learning_rate": 1.3937057462357578e-05, + "loss": 1.9312, + "step": 1902 + }, + { + "epoch": 1.1709868471656026, + "grad_norm": 8.493489265441895, + "learning_rate": 1.3930948584562062e-05, + "loss": 1.9677, + "step": 1903 + }, + { + "epoch": 1.1716021844473503, + "grad_norm": 6.053918838500977, + "learning_rate": 1.3924837971196456e-05, + "loss": 2.0517, + "step": 1904 + }, + { + "epoch": 1.1722175217290978, + "grad_norm": 7.852814674377441, + "learning_rate": 1.3918725624958684e-05, + "loss": 1.9017, + "step": 1905 + }, + { + "epoch": 1.1728328590108452, + "grad_norm": 5.591170310974121, + "learning_rate": 1.3912611548547435e-05, + "loss": 1.9912, + "step": 1906 + }, + { + "epoch": 1.173448196292593, + "grad_norm": 7.0988078117370605, + "learning_rate": 1.3906495744662159e-05, + "loss": 1.8175, + "step": 1907 + }, + { + "epoch": 1.1740635335743403, + "grad_norm": 6.294561386108398, + "learning_rate": 1.3900378216003073e-05, + "loss": 1.769, + "step": 1908 + }, + { + "epoch": 1.174678870856088, + "grad_norm": 7.824243545532227, + "learning_rate": 1.389425896527115e-05, + "loss": 2.0002, + "step": 1909 + }, + { + "epoch": 1.1752942081378355, + "grad_norm": 7.104013919830322, + "learning_rate": 1.388813799516813e-05, + "loss": 1.8954, + "step": 1910 + }, + { + "epoch": 1.1759095454195831, + "grad_norm": 6.5818772315979, + "learning_rate": 1.3882015308396508e-05, + "loss": 1.7804, + "step": 1911 + }, + { + "epoch": 1.1765248827013306, + "grad_norm": 9.619221687316895, + "learning_rate": 1.3875890907659539e-05, + "loss": 2.123, + "step": 1912 + }, + { + "epoch": 1.1771402199830783, + "grad_norm": 7.281387805938721, + "learning_rate": 1.3869764795661233e-05, + "loss": 1.9446, + "step": 1913 + }, + { + "epoch": 1.1777555572648257, + "grad_norm": 7.1689324378967285, + "learning_rate": 1.3863636975106354e-05, + "loss": 1.92, + "step": 1914 + }, + { + "epoch": 1.1783708945465734, + "grad_norm": 6.39445686340332, + "learning_rate": 1.3857507448700425e-05, + "loss": 1.8599, + "step": 1915 + }, + { + "epoch": 1.1789862318283209, + "grad_norm": 5.920019626617432, + "learning_rate": 1.3851376219149721e-05, + "loss": 1.9523, + "step": 1916 + }, + { + "epoch": 1.1796015691100685, + "grad_norm": 5.8803486824035645, + "learning_rate": 1.3845243289161263e-05, + "loss": 1.9421, + "step": 1917 + }, + { + "epoch": 1.180216906391816, + "grad_norm": 6.8632659912109375, + "learning_rate": 1.3839108661442832e-05, + "loss": 2.0433, + "step": 1918 + }, + { + "epoch": 1.1808322436735637, + "grad_norm": 5.940195083618164, + "learning_rate": 1.3832972338702954e-05, + "loss": 2.0008, + "step": 1919 + }, + { + "epoch": 1.181447580955311, + "grad_norm": 8.498902320861816, + "learning_rate": 1.3826834323650899e-05, + "loss": 1.9383, + "step": 1920 + }, + { + "epoch": 1.1820629182370588, + "grad_norm": 5.611064910888672, + "learning_rate": 1.3820694618996692e-05, + "loss": 1.9293, + "step": 1921 + }, + { + "epoch": 1.1826782555188062, + "grad_norm": 7.186694145202637, + "learning_rate": 1.3814553227451099e-05, + "loss": 1.9569, + "step": 1922 + }, + { + "epoch": 1.183293592800554, + "grad_norm": 5.808206081390381, + "learning_rate": 1.3808410151725633e-05, + "loss": 1.9532, + "step": 1923 + }, + { + "epoch": 1.1839089300823014, + "grad_norm": 5.80131721496582, + "learning_rate": 1.380226539453255e-05, + "loss": 1.9009, + "step": 1924 + }, + { + "epoch": 1.1845242673640488, + "grad_norm": 8.05959415435791, + "learning_rate": 1.379611895858485e-05, + "loss": 1.9246, + "step": 1925 + }, + { + "epoch": 1.1851396046457965, + "grad_norm": 5.380119800567627, + "learning_rate": 1.3789970846596268e-05, + "loss": 2.1423, + "step": 1926 + }, + { + "epoch": 1.1857549419275442, + "grad_norm": 6.846277236938477, + "learning_rate": 1.3783821061281285e-05, + "loss": 1.9195, + "step": 1927 + }, + { + "epoch": 1.1863702792092916, + "grad_norm": 7.480844974517822, + "learning_rate": 1.377766960535512e-05, + "loss": 1.9921, + "step": 1928 + }, + { + "epoch": 1.186985616491039, + "grad_norm": 8.89071273803711, + "learning_rate": 1.3771516481533733e-05, + "loss": 1.908, + "step": 1929 + }, + { + "epoch": 1.1876009537727867, + "grad_norm": 6.241833686828613, + "learning_rate": 1.376536169253381e-05, + "loss": 1.8496, + "step": 1930 + }, + { + "epoch": 1.1882162910545342, + "grad_norm": 7.032931327819824, + "learning_rate": 1.3759205241072782e-05, + "loss": 1.8208, + "step": 1931 + }, + { + "epoch": 1.1888316283362819, + "grad_norm": 5.49415397644043, + "learning_rate": 1.3753047129868808e-05, + "loss": 1.8831, + "step": 1932 + }, + { + "epoch": 1.1894469656180293, + "grad_norm": 7.647847652435303, + "learning_rate": 1.3746887361640786e-05, + "loss": 1.9139, + "step": 1933 + }, + { + "epoch": 1.190062302899777, + "grad_norm": 7.017206192016602, + "learning_rate": 1.3740725939108336e-05, + "loss": 2.0446, + "step": 1934 + }, + { + "epoch": 1.1906776401815244, + "grad_norm": 5.584463596343994, + "learning_rate": 1.3734562864991823e-05, + "loss": 1.8408, + "step": 1935 + }, + { + "epoch": 1.1912929774632721, + "grad_norm": 5.82896089553833, + "learning_rate": 1.3728398142012323e-05, + "loss": 2.0983, + "step": 1936 + }, + { + "epoch": 1.1919083147450196, + "grad_norm": 6.193018436431885, + "learning_rate": 1.3722231772891657e-05, + "loss": 1.9592, + "step": 1937 + }, + { + "epoch": 1.1925236520267672, + "grad_norm": 5.322345733642578, + "learning_rate": 1.3716063760352359e-05, + "loss": 1.7433, + "step": 1938 + }, + { + "epoch": 1.1931389893085147, + "grad_norm": 5.754003047943115, + "learning_rate": 1.3709894107117698e-05, + "loss": 1.8946, + "step": 1939 + }, + { + "epoch": 1.1937543265902624, + "grad_norm": 5.109851360321045, + "learning_rate": 1.3703722815911667e-05, + "loss": 1.7875, + "step": 1940 + }, + { + "epoch": 1.1943696638720098, + "grad_norm": 5.390745162963867, + "learning_rate": 1.3697549889458974e-05, + "loss": 1.7723, + "step": 1941 + }, + { + "epoch": 1.1949850011537575, + "grad_norm": 7.532373428344727, + "learning_rate": 1.3691375330485057e-05, + "loss": 1.8363, + "step": 1942 + }, + { + "epoch": 1.195600338435505, + "grad_norm": 5.946829319000244, + "learning_rate": 1.3685199141716074e-05, + "loss": 2.0426, + "step": 1943 + }, + { + "epoch": 1.1962156757172524, + "grad_norm": 6.128093242645264, + "learning_rate": 1.3679021325878901e-05, + "loss": 1.75, + "step": 1944 + }, + { + "epoch": 1.196831012999, + "grad_norm": 5.948440074920654, + "learning_rate": 1.3672841885701127e-05, + "loss": 2.0378, + "step": 1945 + }, + { + "epoch": 1.1974463502807478, + "grad_norm": 6.3420586585998535, + "learning_rate": 1.3666660823911067e-05, + "loss": 1.8851, + "step": 1946 + }, + { + "epoch": 1.1980616875624952, + "grad_norm": 7.074560642242432, + "learning_rate": 1.3660478143237748e-05, + "loss": 1.6619, + "step": 1947 + }, + { + "epoch": 1.1986770248442427, + "grad_norm": 6.364389419555664, + "learning_rate": 1.3654293846410913e-05, + "loss": 1.9275, + "step": 1948 + }, + { + "epoch": 1.1992923621259903, + "grad_norm": 6.368815898895264, + "learning_rate": 1.3648107936161015e-05, + "loss": 2.1609, + "step": 1949 + }, + { + "epoch": 1.1999076994077378, + "grad_norm": 6.223146915435791, + "learning_rate": 1.3641920415219224e-05, + "loss": 1.8281, + "step": 1950 + }, + { + "epoch": 1.2005230366894855, + "grad_norm": 5.930906295776367, + "learning_rate": 1.3635731286317415e-05, + "loss": 1.9897, + "step": 1951 + }, + { + "epoch": 1.201138373971233, + "grad_norm": 6.237645149230957, + "learning_rate": 1.3629540552188188e-05, + "loss": 2.0181, + "step": 1952 + }, + { + "epoch": 1.2017537112529806, + "grad_norm": 8.184050559997559, + "learning_rate": 1.3623348215564827e-05, + "loss": 1.898, + "step": 1953 + }, + { + "epoch": 1.202369048534728, + "grad_norm": 5.800127983093262, + "learning_rate": 1.3617154279181348e-05, + "loss": 2.0394, + "step": 1954 + }, + { + "epoch": 1.2029843858164757, + "grad_norm": 5.910327434539795, + "learning_rate": 1.3610958745772456e-05, + "loss": 1.9579, + "step": 1955 + }, + { + "epoch": 1.2035997230982232, + "grad_norm": 6.338083267211914, + "learning_rate": 1.3604761618073567e-05, + "loss": 1.8194, + "step": 1956 + }, + { + "epoch": 1.2042150603799708, + "grad_norm": 7.066787242889404, + "learning_rate": 1.359856289882081e-05, + "loss": 1.7148, + "step": 1957 + }, + { + "epoch": 1.2048303976617183, + "grad_norm": 6.1598286628723145, + "learning_rate": 1.3592362590751001e-05, + "loss": 2.08, + "step": 1958 + }, + { + "epoch": 1.205445734943466, + "grad_norm": 6.437409400939941, + "learning_rate": 1.3586160696601667e-05, + "loss": 1.9064, + "step": 1959 + }, + { + "epoch": 1.2060610722252134, + "grad_norm": 7.379507541656494, + "learning_rate": 1.3579957219111031e-05, + "loss": 1.803, + "step": 1960 + }, + { + "epoch": 1.206676409506961, + "grad_norm": 5.961740016937256, + "learning_rate": 1.3573752161018021e-05, + "loss": 1.9543, + "step": 1961 + }, + { + "epoch": 1.2072917467887085, + "grad_norm": 9.158125877380371, + "learning_rate": 1.3567545525062256e-05, + "loss": 2.0042, + "step": 1962 + }, + { + "epoch": 1.207907084070456, + "grad_norm": 5.524449348449707, + "learning_rate": 1.3561337313984053e-05, + "loss": 1.8227, + "step": 1963 + }, + { + "epoch": 1.2085224213522037, + "grad_norm": 7.304518699645996, + "learning_rate": 1.3555127530524432e-05, + "loss": 2.0558, + "step": 1964 + }, + { + "epoch": 1.2091377586339513, + "grad_norm": 6.372654914855957, + "learning_rate": 1.3548916177425095e-05, + "loss": 1.7921, + "step": 1965 + }, + { + "epoch": 1.2097530959156988, + "grad_norm": 9.611068725585938, + "learning_rate": 1.3542703257428454e-05, + "loss": 1.9721, + "step": 1966 + }, + { + "epoch": 1.2103684331974462, + "grad_norm": 6.749753475189209, + "learning_rate": 1.3536488773277586e-05, + "loss": 1.9336, + "step": 1967 + }, + { + "epoch": 1.210983770479194, + "grad_norm": 6.129271507263184, + "learning_rate": 1.3530272727716286e-05, + "loss": 2.0186, + "step": 1968 + }, + { + "epoch": 1.2115991077609414, + "grad_norm": 7.297130584716797, + "learning_rate": 1.3524055123489023e-05, + "loss": 2.1436, + "step": 1969 + }, + { + "epoch": 1.212214445042689, + "grad_norm": 6.835809230804443, + "learning_rate": 1.3517835963340961e-05, + "loss": 2.1028, + "step": 1970 + }, + { + "epoch": 1.2128297823244365, + "grad_norm": 5.582975387573242, + "learning_rate": 1.3511615250017948e-05, + "loss": 1.9136, + "step": 1971 + }, + { + "epoch": 1.2134451196061842, + "grad_norm": 5.187623023986816, + "learning_rate": 1.3505392986266516e-05, + "loss": 1.9389, + "step": 1972 + }, + { + "epoch": 1.2140604568879316, + "grad_norm": 6.257410526275635, + "learning_rate": 1.3499169174833883e-05, + "loss": 1.8528, + "step": 1973 + }, + { + "epoch": 1.2146757941696793, + "grad_norm": 7.004724502563477, + "learning_rate": 1.3492943818467956e-05, + "loss": 1.7448, + "step": 1974 + }, + { + "epoch": 1.2152911314514268, + "grad_norm": 7.376151084899902, + "learning_rate": 1.3486716919917313e-05, + "loss": 2.0186, + "step": 1975 + }, + { + "epoch": 1.2159064687331744, + "grad_norm": 6.776345252990723, + "learning_rate": 1.348048848193122e-05, + "loss": 2.0711, + "step": 1976 + }, + { + "epoch": 1.2165218060149219, + "grad_norm": 6.360037326812744, + "learning_rate": 1.3474258507259623e-05, + "loss": 1.9435, + "step": 1977 + }, + { + "epoch": 1.2171371432966696, + "grad_norm": 6.893351078033447, + "learning_rate": 1.3468026998653147e-05, + "loss": 1.9831, + "step": 1978 + }, + { + "epoch": 1.217752480578417, + "grad_norm": 6.121695518493652, + "learning_rate": 1.3461793958863087e-05, + "loss": 1.9786, + "step": 1979 + }, + { + "epoch": 1.2183678178601647, + "grad_norm": 5.578811168670654, + "learning_rate": 1.3455559390641426e-05, + "loss": 1.8396, + "step": 1980 + }, + { + "epoch": 1.2189831551419121, + "grad_norm": 6.120358943939209, + "learning_rate": 1.3449323296740808e-05, + "loss": 1.9618, + "step": 1981 + }, + { + "epoch": 1.2195984924236598, + "grad_norm": 6.336721420288086, + "learning_rate": 1.3443085679914566e-05, + "loss": 1.9006, + "step": 1982 + }, + { + "epoch": 1.2202138297054073, + "grad_norm": 6.22429895401001, + "learning_rate": 1.3436846542916686e-05, + "loss": 2.0721, + "step": 1983 + }, + { + "epoch": 1.220829166987155, + "grad_norm": 6.919360160827637, + "learning_rate": 1.3430605888501846e-05, + "loss": 1.8456, + "step": 1984 + }, + { + "epoch": 1.2214445042689024, + "grad_norm": 6.921582221984863, + "learning_rate": 1.3424363719425382e-05, + "loss": 1.9024, + "step": 1985 + }, + { + "epoch": 1.2220598415506498, + "grad_norm": 7.731745719909668, + "learning_rate": 1.34181200384433e-05, + "loss": 1.8, + "step": 1986 + }, + { + "epoch": 1.2226751788323975, + "grad_norm": 5.676345348358154, + "learning_rate": 1.3411874848312274e-05, + "loss": 1.7755, + "step": 1987 + }, + { + "epoch": 1.2232905161141452, + "grad_norm": 6.054127216339111, + "learning_rate": 1.3405628151789645e-05, + "loss": 1.7778, + "step": 1988 + }, + { + "epoch": 1.2239058533958926, + "grad_norm": 5.7627739906311035, + "learning_rate": 1.339937995163342e-05, + "loss": 1.8611, + "step": 1989 + }, + { + "epoch": 1.22452119067764, + "grad_norm": 6.181137561798096, + "learning_rate": 1.3393130250602266e-05, + "loss": 1.9248, + "step": 1990 + }, + { + "epoch": 1.2251365279593878, + "grad_norm": 5.799289226531982, + "learning_rate": 1.338687905145552e-05, + "loss": 2.018, + "step": 1991 + }, + { + "epoch": 1.2257518652411352, + "grad_norm": 5.776966094970703, + "learning_rate": 1.3380626356953175e-05, + "loss": 1.966, + "step": 1992 + }, + { + "epoch": 1.226367202522883, + "grad_norm": 5.70413064956665, + "learning_rate": 1.337437216985588e-05, + "loss": 2.0049, + "step": 1993 + }, + { + "epoch": 1.2269825398046303, + "grad_norm": 6.496288776397705, + "learning_rate": 1.336811649292496e-05, + "loss": 1.746, + "step": 1994 + }, + { + "epoch": 1.227597877086378, + "grad_norm": 6.393544673919678, + "learning_rate": 1.3361859328922368e-05, + "loss": 1.8029, + "step": 1995 + }, + { + "epoch": 1.2282132143681255, + "grad_norm": 8.105537414550781, + "learning_rate": 1.3355600680610748e-05, + "loss": 1.9281, + "step": 1996 + }, + { + "epoch": 1.2288285516498731, + "grad_norm": 6.7314677238464355, + "learning_rate": 1.3349340550753374e-05, + "loss": 1.7729, + "step": 1997 + }, + { + "epoch": 1.2294438889316206, + "grad_norm": 7.052388668060303, + "learning_rate": 1.3343078942114185e-05, + "loss": 1.9156, + "step": 1998 + }, + { + "epoch": 1.2300592262133683, + "grad_norm": 6.659929275512695, + "learning_rate": 1.3336815857457772e-05, + "loss": 1.9331, + "step": 1999 + }, + { + "epoch": 1.2306745634951157, + "grad_norm": 6.486208915710449, + "learning_rate": 1.3330551299549377e-05, + "loss": 1.7831, + "step": 2000 + }, + { + "epoch": 1.2312899007768634, + "grad_norm": 6.1678466796875, + "learning_rate": 1.3324285271154889e-05, + "loss": 1.9695, + "step": 2001 + }, + { + "epoch": 1.2319052380586109, + "grad_norm": 6.929620742797852, + "learning_rate": 1.3318017775040852e-05, + "loss": 1.7716, + "step": 2002 + }, + { + "epoch": 1.2325205753403585, + "grad_norm": 7.108518600463867, + "learning_rate": 1.3311748813974454e-05, + "loss": 1.8373, + "step": 2003 + }, + { + "epoch": 1.233135912622106, + "grad_norm": 9.260191917419434, + "learning_rate": 1.3305478390723532e-05, + "loss": 1.7067, + "step": 2004 + }, + { + "epoch": 1.2337512499038534, + "grad_norm": 6.445788860321045, + "learning_rate": 1.3299206508056563e-05, + "loss": 1.7877, + "step": 2005 + }, + { + "epoch": 1.234366587185601, + "grad_norm": 6.024454593658447, + "learning_rate": 1.3292933168742685e-05, + "loss": 2.012, + "step": 2006 + }, + { + "epoch": 1.2349819244673488, + "grad_norm": 5.954429626464844, + "learning_rate": 1.3286658375551654e-05, + "loss": 1.6704, + "step": 2007 + }, + { + "epoch": 1.2355972617490962, + "grad_norm": 6.160884857177734, + "learning_rate": 1.3280382131253888e-05, + "loss": 1.8451, + "step": 2008 + }, + { + "epoch": 1.2362125990308437, + "grad_norm": 6.997044563293457, + "learning_rate": 1.3274104438620435e-05, + "loss": 1.6653, + "step": 2009 + }, + { + "epoch": 1.2368279363125914, + "grad_norm": 5.8181586265563965, + "learning_rate": 1.3267825300422991e-05, + "loss": 1.8652, + "step": 2010 + }, + { + "epoch": 1.2374432735943388, + "grad_norm": 5.980932235717773, + "learning_rate": 1.3261544719433878e-05, + "loss": 2.0295, + "step": 2011 + }, + { + "epoch": 1.2380586108760865, + "grad_norm": 6.163631916046143, + "learning_rate": 1.325526269842607e-05, + "loss": 1.834, + "step": 2012 + }, + { + "epoch": 1.238673948157834, + "grad_norm": 6.248097896575928, + "learning_rate": 1.3248979240173167e-05, + "loss": 1.9433, + "step": 2013 + }, + { + "epoch": 1.2392892854395816, + "grad_norm": 7.274768352508545, + "learning_rate": 1.3242694347449405e-05, + "loss": 1.9126, + "step": 2014 + }, + { + "epoch": 1.239904622721329, + "grad_norm": 6.844907760620117, + "learning_rate": 1.3236408023029655e-05, + "loss": 1.8709, + "step": 2015 + }, + { + "epoch": 1.2405199600030767, + "grad_norm": 7.998079299926758, + "learning_rate": 1.3230120269689417e-05, + "loss": 1.7193, + "step": 2016 + }, + { + "epoch": 1.2411352972848242, + "grad_norm": 5.938083648681641, + "learning_rate": 1.3223831090204826e-05, + "loss": 2.0466, + "step": 2017 + }, + { + "epoch": 1.2417506345665719, + "grad_norm": 7.753738880157471, + "learning_rate": 1.3217540487352645e-05, + "loss": 1.8587, + "step": 2018 + }, + { + "epoch": 1.2423659718483193, + "grad_norm": 7.598982810974121, + "learning_rate": 1.3211248463910263e-05, + "loss": 1.9776, + "step": 2019 + }, + { + "epoch": 1.242981309130067, + "grad_norm": 6.096324443817139, + "learning_rate": 1.3204955022655704e-05, + "loss": 1.753, + "step": 2020 + }, + { + "epoch": 1.2435966464118144, + "grad_norm": 6.442549705505371, + "learning_rate": 1.3198660166367603e-05, + "loss": 1.8744, + "step": 2021 + }, + { + "epoch": 1.2442119836935621, + "grad_norm": 5.608162879943848, + "learning_rate": 1.319236389782524e-05, + "loss": 1.9744, + "step": 2022 + }, + { + "epoch": 1.2448273209753096, + "grad_norm": 5.856525897979736, + "learning_rate": 1.3186066219808498e-05, + "loss": 2.0666, + "step": 2023 + }, + { + "epoch": 1.2454426582570572, + "grad_norm": 6.031161308288574, + "learning_rate": 1.3179767135097901e-05, + "loss": 1.7849, + "step": 2024 + }, + { + "epoch": 1.2460579955388047, + "grad_norm": 5.843185901641846, + "learning_rate": 1.3173466646474575e-05, + "loss": 1.9668, + "step": 2025 + }, + { + "epoch": 1.2466733328205524, + "grad_norm": 5.544034957885742, + "learning_rate": 1.3167164756720285e-05, + "loss": 1.7129, + "step": 2026 + }, + { + "epoch": 1.2472886701022998, + "grad_norm": 6.075882434844971, + "learning_rate": 1.3160861468617402e-05, + "loss": 1.8848, + "step": 2027 + }, + { + "epoch": 1.2479040073840473, + "grad_norm": 6.325423717498779, + "learning_rate": 1.3154556784948918e-05, + "loss": 1.9163, + "step": 2028 + }, + { + "epoch": 1.248519344665795, + "grad_norm": 8.320820808410645, + "learning_rate": 1.3148250708498443e-05, + "loss": 1.7379, + "step": 2029 + }, + { + "epoch": 1.2491346819475424, + "grad_norm": 5.232048511505127, + "learning_rate": 1.3141943242050195e-05, + "loss": 1.7624, + "step": 2030 + }, + { + "epoch": 1.24975001922929, + "grad_norm": 5.543237686157227, + "learning_rate": 1.3135634388389016e-05, + "loss": 1.9503, + "step": 2031 + }, + { + "epoch": 1.2503653565110375, + "grad_norm": 5.278805255889893, + "learning_rate": 1.312932415030035e-05, + "loss": 1.9835, + "step": 2032 + }, + { + "epoch": 1.2509806937927852, + "grad_norm": 8.95687198638916, + "learning_rate": 1.3123012530570263e-05, + "loss": 2.0127, + "step": 2033 + }, + { + "epoch": 1.2515960310745327, + "grad_norm": 6.756926536560059, + "learning_rate": 1.3116699531985426e-05, + "loss": 1.9108, + "step": 2034 + }, + { + "epoch": 1.2522113683562803, + "grad_norm": 5.638125896453857, + "learning_rate": 1.311038515733311e-05, + "loss": 1.8674, + "step": 2035 + }, + { + "epoch": 1.2528267056380278, + "grad_norm": 6.744015693664551, + "learning_rate": 1.3104069409401214e-05, + "loss": 1.7778, + "step": 2036 + }, + { + "epoch": 1.2534420429197755, + "grad_norm": 6.783023357391357, + "learning_rate": 1.3097752290978219e-05, + "loss": 1.8984, + "step": 2037 + }, + { + "epoch": 1.254057380201523, + "grad_norm": 6.752358436584473, + "learning_rate": 1.309143380485323e-05, + "loss": 1.8903, + "step": 2038 + }, + { + "epoch": 1.2546727174832706, + "grad_norm": 7.7219319343566895, + "learning_rate": 1.3085113953815948e-05, + "loss": 2.0621, + "step": 2039 + }, + { + "epoch": 1.255288054765018, + "grad_norm": 7.155499458312988, + "learning_rate": 1.3078792740656679e-05, + "loss": 2.1133, + "step": 2040 + }, + { + "epoch": 1.2559033920467657, + "grad_norm": 6.685373783111572, + "learning_rate": 1.3072470168166325e-05, + "loss": 1.7662, + "step": 2041 + }, + { + "epoch": 1.2565187293285132, + "grad_norm": 6.547412395477295, + "learning_rate": 1.3066146239136398e-05, + "loss": 1.9958, + "step": 2042 + }, + { + "epoch": 1.2571340666102606, + "grad_norm": 6.380745887756348, + "learning_rate": 1.3059820956358998e-05, + "loss": 1.7436, + "step": 2043 + }, + { + "epoch": 1.2577494038920083, + "grad_norm": 9.6522855758667, + "learning_rate": 1.305349432262683e-05, + "loss": 1.8718, + "step": 2044 + }, + { + "epoch": 1.258364741173756, + "grad_norm": 7.132034778594971, + "learning_rate": 1.3047166340733196e-05, + "loss": 1.9333, + "step": 2045 + }, + { + "epoch": 1.2589800784555034, + "grad_norm": 6.187729835510254, + "learning_rate": 1.3040837013471985e-05, + "loss": 1.6604, + "step": 2046 + }, + { + "epoch": 1.2595954157372509, + "grad_norm": 7.047447681427002, + "learning_rate": 1.3034506343637687e-05, + "loss": 1.981, + "step": 2047 + }, + { + "epoch": 1.2602107530189985, + "grad_norm": 6.45848274230957, + "learning_rate": 1.3028174334025392e-05, + "loss": 2.0467, + "step": 2048 + }, + { + "epoch": 1.2608260903007462, + "grad_norm": 6.9217529296875, + "learning_rate": 1.3021840987430761e-05, + "loss": 1.9247, + "step": 2049 + }, + { + "epoch": 1.2614414275824937, + "grad_norm": 6.7310285568237305, + "learning_rate": 1.3015506306650063e-05, + "loss": 1.8786, + "step": 2050 + }, + { + "epoch": 1.2620567648642411, + "grad_norm": 6.79317045211792, + "learning_rate": 1.3009170294480149e-05, + "loss": 1.8663, + "step": 2051 + }, + { + "epoch": 1.2626721021459888, + "grad_norm": 6.181501865386963, + "learning_rate": 1.3002832953718462e-05, + "loss": 1.7614, + "step": 2052 + }, + { + "epoch": 1.2632874394277362, + "grad_norm": 6.295355319976807, + "learning_rate": 1.2996494287163024e-05, + "loss": 1.9876, + "step": 2053 + }, + { + "epoch": 1.263902776709484, + "grad_norm": 6.824727535247803, + "learning_rate": 1.299015429761245e-05, + "loss": 1.7711, + "step": 2054 + }, + { + "epoch": 1.2645181139912314, + "grad_norm": 5.861077785491943, + "learning_rate": 1.2983812987865936e-05, + "loss": 1.8942, + "step": 2055 + }, + { + "epoch": 1.265133451272979, + "grad_norm": 8.688406944274902, + "learning_rate": 1.2977470360723258e-05, + "loss": 1.7806, + "step": 2056 + }, + { + "epoch": 1.2657487885547265, + "grad_norm": 7.598904132843018, + "learning_rate": 1.2971126418984783e-05, + "loss": 1.9555, + "step": 2057 + }, + { + "epoch": 1.2663641258364742, + "grad_norm": 8.02134895324707, + "learning_rate": 1.2964781165451448e-05, + "loss": 1.9085, + "step": 2058 + }, + { + "epoch": 1.2669794631182216, + "grad_norm": 5.459079265594482, + "learning_rate": 1.2958434602924771e-05, + "loss": 1.8974, + "step": 2059 + }, + { + "epoch": 1.2675948003999693, + "grad_norm": 5.859983921051025, + "learning_rate": 1.2952086734206857e-05, + "loss": 1.9206, + "step": 2060 + }, + { + "epoch": 1.2682101376817168, + "grad_norm": 6.6137566566467285, + "learning_rate": 1.2945737562100373e-05, + "loss": 1.8364, + "step": 2061 + }, + { + "epoch": 1.2688254749634644, + "grad_norm": 6.852616310119629, + "learning_rate": 1.2939387089408578e-05, + "loss": 1.7939, + "step": 2062 + }, + { + "epoch": 1.2694408122452119, + "grad_norm": 6.3088483810424805, + "learning_rate": 1.2933035318935285e-05, + "loss": 1.8857, + "step": 2063 + }, + { + "epoch": 1.2700561495269596, + "grad_norm": 5.520413398742676, + "learning_rate": 1.2926682253484906e-05, + "loss": 1.8274, + "step": 2064 + }, + { + "epoch": 1.270671486808707, + "grad_norm": 6.114968776702881, + "learning_rate": 1.2920327895862397e-05, + "loss": 1.9778, + "step": 2065 + }, + { + "epoch": 1.2712868240904545, + "grad_norm": 6.277491569519043, + "learning_rate": 1.291397224887331e-05, + "loss": 1.92, + "step": 2066 + }, + { + "epoch": 1.2719021613722021, + "grad_norm": 6.967935085296631, + "learning_rate": 1.290761531532374e-05, + "loss": 1.6429, + "step": 2067 + }, + { + "epoch": 1.2725174986539498, + "grad_norm": 5.308424949645996, + "learning_rate": 1.2901257098020376e-05, + "loss": 1.9502, + "step": 2068 + }, + { + "epoch": 1.2731328359356973, + "grad_norm": 6.5552263259887695, + "learning_rate": 1.289489759977046e-05, + "loss": 2.0777, + "step": 2069 + }, + { + "epoch": 1.2737481732174447, + "grad_norm": 6.51767110824585, + "learning_rate": 1.2888536823381799e-05, + "loss": 1.7462, + "step": 2070 + }, + { + "epoch": 1.2743635104991924, + "grad_norm": 6.398855686187744, + "learning_rate": 1.2882174771662765e-05, + "loss": 1.8971, + "step": 2071 + }, + { + "epoch": 1.27497884778094, + "grad_norm": 6.961341381072998, + "learning_rate": 1.2875811447422301e-05, + "loss": 1.8035, + "step": 2072 + }, + { + "epoch": 1.2755941850626875, + "grad_norm": 6.383691787719727, + "learning_rate": 1.2869446853469898e-05, + "loss": 1.802, + "step": 2073 + }, + { + "epoch": 1.276209522344435, + "grad_norm": 6.159292221069336, + "learning_rate": 1.2863080992615625e-05, + "loss": 1.9595, + "step": 2074 + }, + { + "epoch": 1.2768248596261826, + "grad_norm": 6.988149642944336, + "learning_rate": 1.2856713867670089e-05, + "loss": 1.6949, + "step": 2075 + }, + { + "epoch": 1.27744019690793, + "grad_norm": 6.541243076324463, + "learning_rate": 1.285034548144448e-05, + "loss": 1.7613, + "step": 2076 + }, + { + "epoch": 1.2780555341896778, + "grad_norm": 7.711475849151611, + "learning_rate": 1.2843975836750519e-05, + "loss": 1.9795, + "step": 2077 + }, + { + "epoch": 1.2786708714714252, + "grad_norm": 5.123617649078369, + "learning_rate": 1.2837604936400507e-05, + "loss": 1.9129, + "step": 2078 + }, + { + "epoch": 1.279286208753173, + "grad_norm": 5.680095672607422, + "learning_rate": 1.2831232783207278e-05, + "loss": 1.9483, + "step": 2079 + }, + { + "epoch": 1.2799015460349203, + "grad_norm": 5.698746681213379, + "learning_rate": 1.2824859379984231e-05, + "loss": 1.9041, + "step": 2080 + }, + { + "epoch": 1.280516883316668, + "grad_norm": 6.3706955909729, + "learning_rate": 1.2818484729545319e-05, + "loss": 2.1206, + "step": 2081 + }, + { + "epoch": 1.2811322205984155, + "grad_norm": 6.491360664367676, + "learning_rate": 1.281210883470504e-05, + "loss": 1.7206, + "step": 2082 + }, + { + "epoch": 1.2817475578801631, + "grad_norm": 6.318089008331299, + "learning_rate": 1.2805731698278442e-05, + "loss": 1.9182, + "step": 2083 + }, + { + "epoch": 1.2823628951619106, + "grad_norm": 7.0237298011779785, + "learning_rate": 1.279935332308112e-05, + "loss": 1.6771, + "step": 2084 + }, + { + "epoch": 1.282978232443658, + "grad_norm": 7.087061405181885, + "learning_rate": 1.2792973711929222e-05, + "loss": 1.648, + "step": 2085 + }, + { + "epoch": 1.2835935697254057, + "grad_norm": 9.572964668273926, + "learning_rate": 1.2786592867639436e-05, + "loss": 2.001, + "step": 2086 + }, + { + "epoch": 1.2842089070071534, + "grad_norm": 6.295474052429199, + "learning_rate": 1.2780210793028994e-05, + "loss": 1.8359, + "step": 2087 + }, + { + "epoch": 1.2848242442889009, + "grad_norm": 6.836648464202881, + "learning_rate": 1.2773827490915678e-05, + "loss": 1.8321, + "step": 2088 + }, + { + "epoch": 1.2854395815706483, + "grad_norm": 6.847193717956543, + "learning_rate": 1.27674429641178e-05, + "loss": 2.0145, + "step": 2089 + }, + { + "epoch": 1.286054918852396, + "grad_norm": 6.232311248779297, + "learning_rate": 1.2761057215454229e-05, + "loss": 1.732, + "step": 2090 + }, + { + "epoch": 1.2866702561341437, + "grad_norm": 4.953662395477295, + "learning_rate": 1.2754670247744353e-05, + "loss": 1.8958, + "step": 2091 + }, + { + "epoch": 1.287285593415891, + "grad_norm": 7.564837455749512, + "learning_rate": 1.2748282063808124e-05, + "loss": 1.867, + "step": 2092 + }, + { + "epoch": 1.2879009306976386, + "grad_norm": 7.094615936279297, + "learning_rate": 1.2741892666466006e-05, + "loss": 1.8882, + "step": 2093 + }, + { + "epoch": 1.2885162679793862, + "grad_norm": 6.5347394943237305, + "learning_rate": 1.2735502058539014e-05, + "loss": 1.8018, + "step": 2094 + }, + { + "epoch": 1.2891316052611337, + "grad_norm": 5.9305925369262695, + "learning_rate": 1.272911024284869e-05, + "loss": 1.8505, + "step": 2095 + }, + { + "epoch": 1.2897469425428814, + "grad_norm": 5.954204082489014, + "learning_rate": 1.2722717222217116e-05, + "loss": 1.9157, + "step": 2096 + }, + { + "epoch": 1.2903622798246288, + "grad_norm": 6.344577789306641, + "learning_rate": 1.27163229994669e-05, + "loss": 1.8982, + "step": 2097 + }, + { + "epoch": 1.2909776171063765, + "grad_norm": 6.827578067779541, + "learning_rate": 1.2709927577421182e-05, + "loss": 1.8881, + "step": 2098 + }, + { + "epoch": 1.291592954388124, + "grad_norm": 5.870503902435303, + "learning_rate": 1.2703530958903631e-05, + "loss": 1.8399, + "step": 2099 + }, + { + "epoch": 1.2922082916698716, + "grad_norm": 6.282092094421387, + "learning_rate": 1.2697133146738452e-05, + "loss": 1.8878, + "step": 2100 + }, + { + "epoch": 1.292823628951619, + "grad_norm": 7.489135265350342, + "learning_rate": 1.2690734143750361e-05, + "loss": 1.7916, + "step": 2101 + }, + { + "epoch": 1.2934389662333667, + "grad_norm": 5.7619452476501465, + "learning_rate": 1.2684333952764621e-05, + "loss": 1.922, + "step": 2102 + }, + { + "epoch": 1.2940543035151142, + "grad_norm": 6.563897132873535, + "learning_rate": 1.2677932576606998e-05, + "loss": 1.7081, + "step": 2103 + }, + { + "epoch": 1.2946696407968616, + "grad_norm": 6.583156585693359, + "learning_rate": 1.2671530018103797e-05, + "loss": 1.8959, + "step": 2104 + }, + { + "epoch": 1.2952849780786093, + "grad_norm": 5.24906063079834, + "learning_rate": 1.2665126280081837e-05, + "loss": 1.9124, + "step": 2105 + }, + { + "epoch": 1.295900315360357, + "grad_norm": 5.599966526031494, + "learning_rate": 1.2658721365368462e-05, + "loss": 1.6137, + "step": 2106 + }, + { + "epoch": 1.2965156526421044, + "grad_norm": 7.030892848968506, + "learning_rate": 1.265231527679153e-05, + "loss": 1.907, + "step": 2107 + }, + { + "epoch": 1.297130989923852, + "grad_norm": 6.394113063812256, + "learning_rate": 1.2645908017179425e-05, + "loss": 1.9027, + "step": 2108 + }, + { + "epoch": 1.2977463272055996, + "grad_norm": 6.403169631958008, + "learning_rate": 1.2639499589361041e-05, + "loss": 1.8386, + "step": 2109 + }, + { + "epoch": 1.2983616644873472, + "grad_norm": 7.9445576667785645, + "learning_rate": 1.2633089996165792e-05, + "loss": 1.8917, + "step": 2110 + }, + { + "epoch": 1.2989770017690947, + "grad_norm": 4.8253350257873535, + "learning_rate": 1.2626679240423605e-05, + "loss": 1.7827, + "step": 2111 + }, + { + "epoch": 1.2995923390508421, + "grad_norm": 6.9490509033203125, + "learning_rate": 1.2620267324964918e-05, + "loss": 2.1513, + "step": 2112 + }, + { + "epoch": 1.3002076763325898, + "grad_norm": 6.142038345336914, + "learning_rate": 1.2613854252620687e-05, + "loss": 1.9089, + "step": 2113 + }, + { + "epoch": 1.3008230136143373, + "grad_norm": 7.085801601409912, + "learning_rate": 1.2607440026222375e-05, + "loss": 1.8676, + "step": 2114 + }, + { + "epoch": 1.301438350896085, + "grad_norm": 6.0693440437316895, + "learning_rate": 1.260102464860195e-05, + "loss": 1.7669, + "step": 2115 + }, + { + "epoch": 1.3020536881778324, + "grad_norm": 5.53743314743042, + "learning_rate": 1.25946081225919e-05, + "loss": 1.776, + "step": 2116 + }, + { + "epoch": 1.30266902545958, + "grad_norm": 6.472773551940918, + "learning_rate": 1.2588190451025209e-05, + "loss": 1.8411, + "step": 2117 + }, + { + "epoch": 1.3032843627413275, + "grad_norm": 5.2307538986206055, + "learning_rate": 1.258177163673537e-05, + "loss": 1.8812, + "step": 2118 + }, + { + "epoch": 1.3038997000230752, + "grad_norm": 6.74517297744751, + "learning_rate": 1.2575351682556387e-05, + "loss": 1.9905, + "step": 2119 + }, + { + "epoch": 1.3045150373048227, + "grad_norm": 7.657496452331543, + "learning_rate": 1.2568930591322757e-05, + "loss": 2.0003, + "step": 2120 + }, + { + "epoch": 1.3051303745865703, + "grad_norm": 6.8735504150390625, + "learning_rate": 1.2562508365869484e-05, + "loss": 2.0024, + "step": 2121 + }, + { + "epoch": 1.3057457118683178, + "grad_norm": 6.433995246887207, + "learning_rate": 1.2556085009032072e-05, + "loss": 2.0478, + "step": 2122 + }, + { + "epoch": 1.3063610491500655, + "grad_norm": 5.206351280212402, + "learning_rate": 1.2549660523646527e-05, + "loss": 1.7643, + "step": 2123 + }, + { + "epoch": 1.306976386431813, + "grad_norm": 8.849990844726562, + "learning_rate": 1.2543234912549352e-05, + "loss": 1.5846, + "step": 2124 + }, + { + "epoch": 1.3075917237135606, + "grad_norm": 6.543824195861816, + "learning_rate": 1.2536808178577542e-05, + "loss": 1.8779, + "step": 2125 + }, + { + "epoch": 1.308207060995308, + "grad_norm": 7.3105692863464355, + "learning_rate": 1.2530380324568599e-05, + "loss": 2.1621, + "step": 2126 + }, + { + "epoch": 1.3088223982770555, + "grad_norm": 6.387377738952637, + "learning_rate": 1.2523951353360504e-05, + "loss": 1.8982, + "step": 2127 + }, + { + "epoch": 1.3094377355588032, + "grad_norm": 7.859792232513428, + "learning_rate": 1.2517521267791746e-05, + "loss": 1.8868, + "step": 2128 + }, + { + "epoch": 1.3100530728405508, + "grad_norm": 9.245649337768555, + "learning_rate": 1.2511090070701297e-05, + "loss": 1.7528, + "step": 2129 + }, + { + "epoch": 1.3106684101222983, + "grad_norm": 6.4233293533325195, + "learning_rate": 1.2504657764928626e-05, + "loss": 1.8438, + "step": 2130 + }, + { + "epoch": 1.3112837474040457, + "grad_norm": 6.580201148986816, + "learning_rate": 1.2498224353313684e-05, + "loss": 1.9375, + "step": 2131 + }, + { + "epoch": 1.3118990846857934, + "grad_norm": 6.498640060424805, + "learning_rate": 1.2491789838696918e-05, + "loss": 1.8299, + "step": 2132 + }, + { + "epoch": 1.312514421967541, + "grad_norm": 5.804482460021973, + "learning_rate": 1.2485354223919255e-05, + "loss": 1.9033, + "step": 2133 + }, + { + "epoch": 1.3131297592492885, + "grad_norm": 6.321252346038818, + "learning_rate": 1.2478917511822113e-05, + "loss": 1.8635, + "step": 2134 + }, + { + "epoch": 1.313745096531036, + "grad_norm": 7.065130710601807, + "learning_rate": 1.2472479705247393e-05, + "loss": 2.0834, + "step": 2135 + }, + { + "epoch": 1.3143604338127837, + "grad_norm": 6.649792671203613, + "learning_rate": 1.2466040807037478e-05, + "loss": 1.9059, + "step": 2136 + }, + { + "epoch": 1.3149757710945311, + "grad_norm": 6.656622409820557, + "learning_rate": 1.2459600820035234e-05, + "loss": 1.9013, + "step": 2137 + }, + { + "epoch": 1.3155911083762788, + "grad_norm": 6.507527828216553, + "learning_rate": 1.2453159747084007e-05, + "loss": 1.84, + "step": 2138 + }, + { + "epoch": 1.3162064456580262, + "grad_norm": 6.627927303314209, + "learning_rate": 1.2446717591027624e-05, + "loss": 1.7755, + "step": 2139 + }, + { + "epoch": 1.316821782939774, + "grad_norm": 6.907180309295654, + "learning_rate": 1.2440274354710384e-05, + "loss": 1.8907, + "step": 2140 + }, + { + "epoch": 1.3174371202215214, + "grad_norm": 5.53687047958374, + "learning_rate": 1.2433830040977077e-05, + "loss": 1.9946, + "step": 2141 + }, + { + "epoch": 1.318052457503269, + "grad_norm": 6.023593425750732, + "learning_rate": 1.2427384652672954e-05, + "loss": 1.9425, + "step": 2142 + }, + { + "epoch": 1.3186677947850165, + "grad_norm": 5.579769134521484, + "learning_rate": 1.2420938192643746e-05, + "loss": 2.0234, + "step": 2143 + }, + { + "epoch": 1.3192831320667642, + "grad_norm": 5.611184120178223, + "learning_rate": 1.2414490663735662e-05, + "loss": 1.9603, + "step": 2144 + }, + { + "epoch": 1.3198984693485116, + "grad_norm": 6.961946487426758, + "learning_rate": 1.2408042068795373e-05, + "loss": 1.8391, + "step": 2145 + }, + { + "epoch": 1.320513806630259, + "grad_norm": 6.191616535186768, + "learning_rate": 1.2401592410670026e-05, + "loss": 1.888, + "step": 2146 + }, + { + "epoch": 1.3211291439120068, + "grad_norm": 7.23009729385376, + "learning_rate": 1.2395141692207244e-05, + "loss": 1.9342, + "step": 2147 + }, + { + "epoch": 1.3217444811937544, + "grad_norm": 7.115652084350586, + "learning_rate": 1.2388689916255106e-05, + "loss": 1.9153, + "step": 2148 + }, + { + "epoch": 1.3223598184755019, + "grad_norm": 7.689627170562744, + "learning_rate": 1.2382237085662166e-05, + "loss": 1.9683, + "step": 2149 + }, + { + "epoch": 1.3229751557572493, + "grad_norm": 7.030697345733643, + "learning_rate": 1.2375783203277436e-05, + "loss": 2.0392, + "step": 2150 + }, + { + "epoch": 1.323590493038997, + "grad_norm": 6.643804550170898, + "learning_rate": 1.2369328271950404e-05, + "loss": 1.843, + "step": 2151 + }, + { + "epoch": 1.3242058303207447, + "grad_norm": 6.604363441467285, + "learning_rate": 1.236287229453101e-05, + "loss": 1.952, + "step": 2152 + }, + { + "epoch": 1.3248211676024921, + "grad_norm": 5.393376350402832, + "learning_rate": 1.2356415273869664e-05, + "loss": 2.0786, + "step": 2153 + }, + { + "epoch": 1.3254365048842396, + "grad_norm": 6.444981098175049, + "learning_rate": 1.2349957212817229e-05, + "loss": 1.9295, + "step": 2154 + }, + { + "epoch": 1.3260518421659873, + "grad_norm": 5.942962646484375, + "learning_rate": 1.2343498114225038e-05, + "loss": 1.9081, + "step": 2155 + }, + { + "epoch": 1.3266671794477347, + "grad_norm": 8.519148826599121, + "learning_rate": 1.2337037980944869e-05, + "loss": 1.8864, + "step": 2156 + }, + { + "epoch": 1.3272825167294824, + "grad_norm": 6.24669885635376, + "learning_rate": 1.2330576815828964e-05, + "loss": 1.8349, + "step": 2157 + }, + { + "epoch": 1.3278978540112298, + "grad_norm": 6.7482590675354, + "learning_rate": 1.2324114621730026e-05, + "loss": 1.7553, + "step": 2158 + }, + { + "epoch": 1.3285131912929775, + "grad_norm": 6.470263481140137, + "learning_rate": 1.2317651401501201e-05, + "loss": 1.9096, + "step": 2159 + }, + { + "epoch": 1.329128528574725, + "grad_norm": 6.477339744567871, + "learning_rate": 1.2311187157996091e-05, + "loss": 2.0025, + "step": 2160 + }, + { + "epoch": 1.3297438658564726, + "grad_norm": 4.998039245605469, + "learning_rate": 1.2304721894068758e-05, + "loss": 1.9969, + "step": 2161 + }, + { + "epoch": 1.33035920313822, + "grad_norm": 6.886364936828613, + "learning_rate": 1.2298255612573706e-05, + "loss": 1.9336, + "step": 2162 + }, + { + "epoch": 1.3309745404199678, + "grad_norm": 6.979865550994873, + "learning_rate": 1.229178831636589e-05, + "loss": 2.0262, + "step": 2163 + }, + { + "epoch": 1.3315898777017152, + "grad_norm": 6.798940181732178, + "learning_rate": 1.2285320008300712e-05, + "loss": 1.7136, + "step": 2164 + }, + { + "epoch": 1.332205214983463, + "grad_norm": 6.835152626037598, + "learning_rate": 1.227885069123403e-05, + "loss": 1.7409, + "step": 2165 + }, + { + "epoch": 1.3328205522652103, + "grad_norm": 6.742611885070801, + "learning_rate": 1.2272380368022133e-05, + "loss": 1.8152, + "step": 2166 + }, + { + "epoch": 1.333435889546958, + "grad_norm": 6.631821632385254, + "learning_rate": 1.2265909041521764e-05, + "loss": 1.8632, + "step": 2167 + }, + { + "epoch": 1.3340512268287055, + "grad_norm": 6.229750633239746, + "learning_rate": 1.2259436714590105e-05, + "loss": 1.7941, + "step": 2168 + }, + { + "epoch": 1.334666564110453, + "grad_norm": 5.594045639038086, + "learning_rate": 1.2252963390084784e-05, + "loss": 1.9744, + "step": 2169 + }, + { + "epoch": 1.3352819013922006, + "grad_norm": 7.633639812469482, + "learning_rate": 1.2246489070863863e-05, + "loss": 1.9199, + "step": 2170 + }, + { + "epoch": 1.3358972386739483, + "grad_norm": 6.776426792144775, + "learning_rate": 1.2240013759785849e-05, + "loss": 2.0322, + "step": 2171 + }, + { + "epoch": 1.3365125759556957, + "grad_norm": 7.592427730560303, + "learning_rate": 1.2233537459709683e-05, + "loss": 1.9474, + "step": 2172 + }, + { + "epoch": 1.3371279132374432, + "grad_norm": 6.224386692047119, + "learning_rate": 1.2227060173494747e-05, + "loss": 1.9375, + "step": 2173 + }, + { + "epoch": 1.3377432505191909, + "grad_norm": 6.454556465148926, + "learning_rate": 1.2220581904000852e-05, + "loss": 1.9865, + "step": 2174 + }, + { + "epoch": 1.3383585878009385, + "grad_norm": 6.327072620391846, + "learning_rate": 1.2214102654088248e-05, + "loss": 2.0021, + "step": 2175 + }, + { + "epoch": 1.338973925082686, + "grad_norm": 8.767364501953125, + "learning_rate": 1.2207622426617619e-05, + "loss": 2.0447, + "step": 2176 + }, + { + "epoch": 1.3395892623644334, + "grad_norm": 5.996358871459961, + "learning_rate": 1.2201141224450073e-05, + "loss": 1.7859, + "step": 2177 + }, + { + "epoch": 1.340204599646181, + "grad_norm": 6.286624431610107, + "learning_rate": 1.2194659050447158e-05, + "loss": 1.7314, + "step": 2178 + }, + { + "epoch": 1.3408199369279286, + "grad_norm": 5.967553615570068, + "learning_rate": 1.2188175907470847e-05, + "loss": 1.873, + "step": 2179 + }, + { + "epoch": 1.3414352742096762, + "grad_norm": 6.480489730834961, + "learning_rate": 1.2181691798383536e-05, + "loss": 1.956, + "step": 2180 + }, + { + "epoch": 1.3420506114914237, + "grad_norm": 7.923617839813232, + "learning_rate": 1.2175206726048058e-05, + "loss": 1.7887, + "step": 2181 + }, + { + "epoch": 1.3426659487731714, + "grad_norm": 7.730405807495117, + "learning_rate": 1.216872069332766e-05, + "loss": 1.9244, + "step": 2182 + }, + { + "epoch": 1.3432812860549188, + "grad_norm": 6.6858134269714355, + "learning_rate": 1.2162233703086024e-05, + "loss": 1.7554, + "step": 2183 + }, + { + "epoch": 1.3438966233366665, + "grad_norm": 6.233524322509766, + "learning_rate": 1.215574575818724e-05, + "loss": 1.822, + "step": 2184 + }, + { + "epoch": 1.344511960618414, + "grad_norm": 5.1546311378479, + "learning_rate": 1.2149256861495837e-05, + "loss": 2.0123, + "step": 2185 + }, + { + "epoch": 1.3451272979001616, + "grad_norm": 6.400115489959717, + "learning_rate": 1.2142767015876755e-05, + "loss": 1.9773, + "step": 2186 + }, + { + "epoch": 1.345742635181909, + "grad_norm": 7.123510837554932, + "learning_rate": 1.2136276224195349e-05, + "loss": 1.7781, + "step": 2187 + }, + { + "epoch": 1.3463579724636565, + "grad_norm": 6.164758205413818, + "learning_rate": 1.21297844893174e-05, + "loss": 1.9062, + "step": 2188 + }, + { + "epoch": 1.3469733097454042, + "grad_norm": 8.82740306854248, + "learning_rate": 1.2123291814109102e-05, + "loss": 1.7173, + "step": 2189 + }, + { + "epoch": 1.3475886470271519, + "grad_norm": 6.688774585723877, + "learning_rate": 1.2116798201437061e-05, + "loss": 2.0423, + "step": 2190 + }, + { + "epoch": 1.3482039843088993, + "grad_norm": 6.676208019256592, + "learning_rate": 1.2110303654168305e-05, + "loss": 1.8486, + "step": 2191 + }, + { + "epoch": 1.3488193215906468, + "grad_norm": 6.992137432098389, + "learning_rate": 1.2103808175170264e-05, + "loss": 1.8703, + "step": 2192 + }, + { + "epoch": 1.3494346588723944, + "grad_norm": 6.354970932006836, + "learning_rate": 1.2097311767310791e-05, + "loss": 1.9068, + "step": 2193 + }, + { + "epoch": 1.3500499961541421, + "grad_norm": 6.434171676635742, + "learning_rate": 1.2090814433458136e-05, + "loss": 1.8572, + "step": 2194 + }, + { + "epoch": 1.3506653334358896, + "grad_norm": 6.184635639190674, + "learning_rate": 1.2084316176480972e-05, + "loss": 2.0011, + "step": 2195 + }, + { + "epoch": 1.351280670717637, + "grad_norm": 5.981064796447754, + "learning_rate": 1.2077816999248366e-05, + "loss": 1.8881, + "step": 2196 + }, + { + "epoch": 1.3518960079993847, + "grad_norm": 7.955066680908203, + "learning_rate": 1.2071316904629807e-05, + "loss": 1.5658, + "step": 2197 + }, + { + "epoch": 1.3525113452811321, + "grad_norm": 7.363027572631836, + "learning_rate": 1.2064815895495168e-05, + "loss": 2.0318, + "step": 2198 + }, + { + "epoch": 1.3531266825628798, + "grad_norm": 6.6910881996154785, + "learning_rate": 1.2058313974714746e-05, + "loss": 2.1111, + "step": 2199 + }, + { + "epoch": 1.3537420198446273, + "grad_norm": 5.546640396118164, + "learning_rate": 1.2051811145159232e-05, + "loss": 1.7631, + "step": 2200 + }, + { + "epoch": 1.354357357126375, + "grad_norm": 6.900455474853516, + "learning_rate": 1.2045307409699713e-05, + "loss": 1.7435, + "step": 2201 + }, + { + "epoch": 1.3549726944081224, + "grad_norm": 6.761716365814209, + "learning_rate": 1.2038802771207686e-05, + "loss": 1.8186, + "step": 2202 + }, + { + "epoch": 1.35558803168987, + "grad_norm": 5.86665153503418, + "learning_rate": 1.2032297232555039e-05, + "loss": 1.9378, + "step": 2203 + }, + { + "epoch": 1.3562033689716175, + "grad_norm": 5.9629011154174805, + "learning_rate": 1.2025790796614064e-05, + "loss": 1.8351, + "step": 2204 + }, + { + "epoch": 1.3568187062533652, + "grad_norm": 6.959474086761475, + "learning_rate": 1.2019283466257441e-05, + "loss": 1.8138, + "step": 2205 + }, + { + "epoch": 1.3574340435351127, + "grad_norm": 7.036727428436279, + "learning_rate": 1.2012775244358253e-05, + "loss": 1.859, + "step": 2206 + }, + { + "epoch": 1.35804938081686, + "grad_norm": 5.649549961090088, + "learning_rate": 1.2006266133789975e-05, + "loss": 1.8539, + "step": 2207 + }, + { + "epoch": 1.3586647180986078, + "grad_norm": 5.418045997619629, + "learning_rate": 1.1999756137426465e-05, + "loss": 1.8451, + "step": 2208 + }, + { + "epoch": 1.3592800553803555, + "grad_norm": 6.54177188873291, + "learning_rate": 1.199324525814199e-05, + "loss": 2.1551, + "step": 2209 + }, + { + "epoch": 1.359895392662103, + "grad_norm": 5.349142074584961, + "learning_rate": 1.1986733498811186e-05, + "loss": 1.8479, + "step": 2210 + }, + { + "epoch": 1.3605107299438504, + "grad_norm": 6.6822285652160645, + "learning_rate": 1.1980220862309097e-05, + "loss": 2.1383, + "step": 2211 + }, + { + "epoch": 1.361126067225598, + "grad_norm": 6.210731506347656, + "learning_rate": 1.1973707351511137e-05, + "loss": 1.681, + "step": 2212 + }, + { + "epoch": 1.3617414045073457, + "grad_norm": 5.979877471923828, + "learning_rate": 1.196719296929312e-05, + "loss": 2.0135, + "step": 2213 + }, + { + "epoch": 1.3623567417890932, + "grad_norm": 6.762173652648926, + "learning_rate": 1.196067771853124e-05, + "loss": 1.9046, + "step": 2214 + }, + { + "epoch": 1.3629720790708406, + "grad_norm": 7.346059322357178, + "learning_rate": 1.1954161602102068e-05, + "loss": 1.9503, + "step": 2215 + }, + { + "epoch": 1.3635874163525883, + "grad_norm": 6.201185703277588, + "learning_rate": 1.1947644622882569e-05, + "loss": 1.7718, + "step": 2216 + }, + { + "epoch": 1.3642027536343357, + "grad_norm": 5.635894775390625, + "learning_rate": 1.1941126783750075e-05, + "loss": 1.9289, + "step": 2217 + }, + { + "epoch": 1.3648180909160834, + "grad_norm": 8.331083297729492, + "learning_rate": 1.1934608087582312e-05, + "loss": 1.8938, + "step": 2218 + }, + { + "epoch": 1.3654334281978309, + "grad_norm": 7.50520133972168, + "learning_rate": 1.1928088537257376e-05, + "loss": 1.9186, + "step": 2219 + }, + { + "epoch": 1.3660487654795785, + "grad_norm": 6.272292613983154, + "learning_rate": 1.1921568135653738e-05, + "loss": 1.7453, + "step": 2220 + }, + { + "epoch": 1.366664102761326, + "grad_norm": 6.3411455154418945, + "learning_rate": 1.1915046885650256e-05, + "loss": 1.9557, + "step": 2221 + }, + { + "epoch": 1.3672794400430737, + "grad_norm": 6.455621719360352, + "learning_rate": 1.1908524790126149e-05, + "loss": 1.7998, + "step": 2222 + }, + { + "epoch": 1.3678947773248211, + "grad_norm": 6.86384916305542, + "learning_rate": 1.1902001851961019e-05, + "loss": 1.8525, + "step": 2223 + }, + { + "epoch": 1.3685101146065688, + "grad_norm": 8.478190422058105, + "learning_rate": 1.1895478074034831e-05, + "loss": 1.6269, + "step": 2224 + }, + { + "epoch": 1.3691254518883162, + "grad_norm": 6.758494853973389, + "learning_rate": 1.1888953459227937e-05, + "loss": 1.9777, + "step": 2225 + }, + { + "epoch": 1.369740789170064, + "grad_norm": 6.048208713531494, + "learning_rate": 1.1882428010421036e-05, + "loss": 1.7524, + "step": 2226 + }, + { + "epoch": 1.3703561264518114, + "grad_norm": 5.756191253662109, + "learning_rate": 1.1875901730495215e-05, + "loss": 1.888, + "step": 2227 + }, + { + "epoch": 1.370971463733559, + "grad_norm": 6.099569797515869, + "learning_rate": 1.1869374622331919e-05, + "loss": 1.8502, + "step": 2228 + }, + { + "epoch": 1.3715868010153065, + "grad_norm": 7.859236717224121, + "learning_rate": 1.1862846688812956e-05, + "loss": 1.947, + "step": 2229 + }, + { + "epoch": 1.372202138297054, + "grad_norm": 6.862178802490234, + "learning_rate": 1.1856317932820507e-05, + "loss": 2.0027, + "step": 2230 + }, + { + "epoch": 1.3728174755788016, + "grad_norm": 7.3927717208862305, + "learning_rate": 1.1849788357237108e-05, + "loss": 1.7579, + "step": 2231 + }, + { + "epoch": 1.3734328128605493, + "grad_norm": 6.828779220581055, + "learning_rate": 1.184325796494566e-05, + "loss": 1.945, + "step": 2232 + }, + { + "epoch": 1.3740481501422968, + "grad_norm": 6.328741550445557, + "learning_rate": 1.1836726758829427e-05, + "loss": 1.8509, + "step": 2233 + }, + { + "epoch": 1.3746634874240442, + "grad_norm": 5.58297061920166, + "learning_rate": 1.1830194741772025e-05, + "loss": 1.9606, + "step": 2234 + }, + { + "epoch": 1.3752788247057919, + "grad_norm": 5.203289031982422, + "learning_rate": 1.1823661916657441e-05, + "loss": 1.7226, + "step": 2235 + }, + { + "epoch": 1.3758941619875396, + "grad_norm": 6.080706596374512, + "learning_rate": 1.1817128286370004e-05, + "loss": 1.9741, + "step": 2236 + }, + { + "epoch": 1.376509499269287, + "grad_norm": 7.550291061401367, + "learning_rate": 1.1810593853794409e-05, + "loss": 1.6637, + "step": 2237 + }, + { + "epoch": 1.3771248365510345, + "grad_norm": 6.749922752380371, + "learning_rate": 1.1804058621815699e-05, + "loss": 1.8855, + "step": 2238 + }, + { + "epoch": 1.3777401738327821, + "grad_norm": 7.871387004852295, + "learning_rate": 1.1797522593319277e-05, + "loss": 1.7782, + "step": 2239 + }, + { + "epoch": 1.3783555111145296, + "grad_norm": 5.66064453125, + "learning_rate": 1.1790985771190886e-05, + "loss": 1.9743, + "step": 2240 + }, + { + "epoch": 1.3789708483962773, + "grad_norm": 5.007917881011963, + "learning_rate": 1.1784448158316633e-05, + "loss": 1.8305, + "step": 2241 + }, + { + "epoch": 1.3795861856780247, + "grad_norm": 7.7048258781433105, + "learning_rate": 1.1777909757582965e-05, + "loss": 1.8259, + "step": 2242 + }, + { + "epoch": 1.3802015229597724, + "grad_norm": 6.340480804443359, + "learning_rate": 1.1771370571876681e-05, + "loss": 1.7235, + "step": 2243 + }, + { + "epoch": 1.3808168602415198, + "grad_norm": 6.273292541503906, + "learning_rate": 1.1764830604084923e-05, + "loss": 1.8991, + "step": 2244 + }, + { + "epoch": 1.3814321975232675, + "grad_norm": 6.216590881347656, + "learning_rate": 1.1758289857095182e-05, + "loss": 1.8925, + "step": 2245 + }, + { + "epoch": 1.382047534805015, + "grad_norm": 7.156593322753906, + "learning_rate": 1.1751748333795292e-05, + "loss": 1.9535, + "step": 2246 + }, + { + "epoch": 1.3826628720867626, + "grad_norm": 6.291649341583252, + "learning_rate": 1.1745206037073426e-05, + "loss": 1.8175, + "step": 2247 + }, + { + "epoch": 1.38327820936851, + "grad_norm": 5.985632419586182, + "learning_rate": 1.1738662969818102e-05, + "loss": 1.9181, + "step": 2248 + }, + { + "epoch": 1.3838935466502575, + "grad_norm": 5.892245769500732, + "learning_rate": 1.1732119134918185e-05, + "loss": 2.0705, + "step": 2249 + }, + { + "epoch": 1.3845088839320052, + "grad_norm": 5.83536434173584, + "learning_rate": 1.1725574535262863e-05, + "loss": 1.8288, + "step": 2250 + }, + { + "epoch": 1.385124221213753, + "grad_norm": 7.2269487380981445, + "learning_rate": 1.1719029173741675e-05, + "loss": 1.9272, + "step": 2251 + }, + { + "epoch": 1.3857395584955003, + "grad_norm": 7.1051530838012695, + "learning_rate": 1.171248305324449e-05, + "loss": 1.9291, + "step": 2252 + }, + { + "epoch": 1.3863548957772478, + "grad_norm": 7.145440578460693, + "learning_rate": 1.1705936176661516e-05, + "loss": 1.8668, + "step": 2253 + }, + { + "epoch": 1.3869702330589955, + "grad_norm": 7.432530879974365, + "learning_rate": 1.1699388546883291e-05, + "loss": 1.8955, + "step": 2254 + }, + { + "epoch": 1.3875855703407431, + "grad_norm": 7.0112762451171875, + "learning_rate": 1.1692840166800688e-05, + "loss": 1.8395, + "step": 2255 + }, + { + "epoch": 1.3882009076224906, + "grad_norm": 6.851318359375, + "learning_rate": 1.168629103930491e-05, + "loss": 1.8933, + "step": 2256 + }, + { + "epoch": 1.388816244904238, + "grad_norm": 6.61049222946167, + "learning_rate": 1.167974116728749e-05, + "loss": 1.9449, + "step": 2257 + }, + { + "epoch": 1.3894315821859857, + "grad_norm": 5.308497428894043, + "learning_rate": 1.1673190553640295e-05, + "loss": 1.8495, + "step": 2258 + }, + { + "epoch": 1.3900469194677332, + "grad_norm": 6.613244533538818, + "learning_rate": 1.1666639201255507e-05, + "loss": 1.8188, + "step": 2259 + }, + { + "epoch": 1.3906622567494809, + "grad_norm": 6.320141792297363, + "learning_rate": 1.1660087113025647e-05, + "loss": 1.9283, + "step": 2260 + }, + { + "epoch": 1.3912775940312283, + "grad_norm": 8.440532684326172, + "learning_rate": 1.1653534291843556e-05, + "loss": 1.7837, + "step": 2261 + }, + { + "epoch": 1.391892931312976, + "grad_norm": 6.399055004119873, + "learning_rate": 1.1646980740602394e-05, + "loss": 1.7329, + "step": 2262 + }, + { + "epoch": 1.3925082685947234, + "grad_norm": 6.389463424682617, + "learning_rate": 1.1640426462195654e-05, + "loss": 1.7302, + "step": 2263 + }, + { + "epoch": 1.393123605876471, + "grad_norm": 6.416095733642578, + "learning_rate": 1.1633871459517139e-05, + "loss": 1.8769, + "step": 2264 + }, + { + "epoch": 1.3937389431582186, + "grad_norm": 6.721067428588867, + "learning_rate": 1.162731573546098e-05, + "loss": 2.0009, + "step": 2265 + }, + { + "epoch": 1.3943542804399662, + "grad_norm": 5.912996292114258, + "learning_rate": 1.1620759292921617e-05, + "loss": 1.8021, + "step": 2266 + }, + { + "epoch": 1.3949696177217137, + "grad_norm": 5.729987144470215, + "learning_rate": 1.1614202134793823e-05, + "loss": 1.803, + "step": 2267 + }, + { + "epoch": 1.3955849550034611, + "grad_norm": 7.36344575881958, + "learning_rate": 1.1607644263972669e-05, + "loss": 1.8207, + "step": 2268 + }, + { + "epoch": 1.3962002922852088, + "grad_norm": 5.044934272766113, + "learning_rate": 1.160108568335355e-05, + "loss": 1.9526, + "step": 2269 + }, + { + "epoch": 1.3968156295669565, + "grad_norm": 6.113140106201172, + "learning_rate": 1.1594526395832176e-05, + "loss": 1.9709, + "step": 2270 + }, + { + "epoch": 1.397430966848704, + "grad_norm": 6.08885383605957, + "learning_rate": 1.1587966404304565e-05, + "loss": 2.0143, + "step": 2271 + }, + { + "epoch": 1.3980463041304514, + "grad_norm": 6.574419975280762, + "learning_rate": 1.158140571166705e-05, + "loss": 1.8099, + "step": 2272 + }, + { + "epoch": 1.398661641412199, + "grad_norm": 5.466810703277588, + "learning_rate": 1.1574844320816263e-05, + "loss": 2.0662, + "step": 2273 + }, + { + "epoch": 1.3992769786939467, + "grad_norm": 6.716195106506348, + "learning_rate": 1.156828223464916e-05, + "loss": 2.0498, + "step": 2274 + }, + { + "epoch": 1.3998923159756942, + "grad_norm": 7.047515869140625, + "learning_rate": 1.1561719456062995e-05, + "loss": 1.9584, + "step": 2275 + }, + { + "epoch": 1.4005076532574416, + "grad_norm": 5.370654106140137, + "learning_rate": 1.1555155987955322e-05, + "loss": 1.7725, + "step": 2276 + }, + { + "epoch": 1.4011229905391893, + "grad_norm": 5.7213969230651855, + "learning_rate": 1.1548591833224015e-05, + "loss": 2.0449, + "step": 2277 + }, + { + "epoch": 1.4017383278209368, + "grad_norm": 6.119516849517822, + "learning_rate": 1.1542026994767235e-05, + "loss": 1.9643, + "step": 2278 + }, + { + "epoch": 1.4023536651026844, + "grad_norm": 5.4841461181640625, + "learning_rate": 1.1535461475483461e-05, + "loss": 1.821, + "step": 2279 + }, + { + "epoch": 1.402969002384432, + "grad_norm": 6.255189418792725, + "learning_rate": 1.152889527827145e-05, + "loss": 2.0709, + "step": 2280 + }, + { + "epoch": 1.4035843396661796, + "grad_norm": 7.939350605010986, + "learning_rate": 1.1522328406030285e-05, + "loss": 2.0255, + "step": 2281 + }, + { + "epoch": 1.404199676947927, + "grad_norm": 6.363982677459717, + "learning_rate": 1.151576086165933e-05, + "loss": 1.7803, + "step": 2282 + }, + { + "epoch": 1.4048150142296747, + "grad_norm": 6.102575302124023, + "learning_rate": 1.150919264805825e-05, + "loss": 1.8119, + "step": 2283 + }, + { + "epoch": 1.4054303515114221, + "grad_norm": 5.805746078491211, + "learning_rate": 1.1502623768127004e-05, + "loss": 2.0284, + "step": 2284 + }, + { + "epoch": 1.4060456887931698, + "grad_norm": 8.096291542053223, + "learning_rate": 1.1496054224765846e-05, + "loss": 1.8043, + "step": 2285 + }, + { + "epoch": 1.4066610260749173, + "grad_norm": 5.874636650085449, + "learning_rate": 1.1489484020875325e-05, + "loss": 1.663, + "step": 2286 + }, + { + "epoch": 1.407276363356665, + "grad_norm": 6.198155403137207, + "learning_rate": 1.1482913159356281e-05, + "loss": 2.0943, + "step": 2287 + }, + { + "epoch": 1.4078917006384124, + "grad_norm": 6.561847686767578, + "learning_rate": 1.1476341643109842e-05, + "loss": 1.916, + "step": 2288 + }, + { + "epoch": 1.40850703792016, + "grad_norm": 6.519033432006836, + "learning_rate": 1.1469769475037427e-05, + "loss": 2.0004, + "step": 2289 + }, + { + "epoch": 1.4091223752019075, + "grad_norm": 7.368416786193848, + "learning_rate": 1.1463196658040738e-05, + "loss": 1.803, + "step": 2290 + }, + { + "epoch": 1.409737712483655, + "grad_norm": 5.362823009490967, + "learning_rate": 1.1456623195021778e-05, + "loss": 1.8274, + "step": 2291 + }, + { + "epoch": 1.4103530497654027, + "grad_norm": 6.224695205688477, + "learning_rate": 1.1450049088882816e-05, + "loss": 1.9873, + "step": 2292 + }, + { + "epoch": 1.4109683870471503, + "grad_norm": 5.657955169677734, + "learning_rate": 1.1443474342526418e-05, + "loss": 1.8273, + "step": 2293 + }, + { + "epoch": 1.4115837243288978, + "grad_norm": 6.50985050201416, + "learning_rate": 1.1436898958855427e-05, + "loss": 2.0522, + "step": 2294 + }, + { + "epoch": 1.4121990616106452, + "grad_norm": 6.729806900024414, + "learning_rate": 1.143032294077297e-05, + "loss": 1.9185, + "step": 2295 + }, + { + "epoch": 1.412814398892393, + "grad_norm": 6.0130839347839355, + "learning_rate": 1.1423746291182454e-05, + "loss": 2.0464, + "step": 2296 + }, + { + "epoch": 1.4134297361741406, + "grad_norm": 6.299532890319824, + "learning_rate": 1.1417169012987561e-05, + "loss": 1.95, + "step": 2297 + }, + { + "epoch": 1.414045073455888, + "grad_norm": 6.74490213394165, + "learning_rate": 1.1410591109092257e-05, + "loss": 1.8404, + "step": 2298 + }, + { + "epoch": 1.4146604107376355, + "grad_norm": 6.4402947425842285, + "learning_rate": 1.1404012582400778e-05, + "loss": 1.8087, + "step": 2299 + }, + { + "epoch": 1.4152757480193832, + "grad_norm": 6.104351043701172, + "learning_rate": 1.1397433435817639e-05, + "loss": 1.8287, + "step": 2300 + }, + { + "epoch": 1.4158910853011306, + "grad_norm": 6.515002727508545, + "learning_rate": 1.1390853672247629e-05, + "loss": 1.8657, + "step": 2301 + }, + { + "epoch": 1.4165064225828783, + "grad_norm": 6.061358451843262, + "learning_rate": 1.1384273294595802e-05, + "loss": 2.0729, + "step": 2302 + }, + { + "epoch": 1.4171217598646257, + "grad_norm": 6.329962253570557, + "learning_rate": 1.1377692305767496e-05, + "loss": 1.8822, + "step": 2303 + }, + { + "epoch": 1.4177370971463734, + "grad_norm": 7.848086833953857, + "learning_rate": 1.1371110708668307e-05, + "loss": 1.8896, + "step": 2304 + }, + { + "epoch": 1.4183524344281209, + "grad_norm": 6.627632141113281, + "learning_rate": 1.1364528506204109e-05, + "loss": 1.8025, + "step": 2305 + }, + { + "epoch": 1.4189677717098685, + "grad_norm": 6.373751163482666, + "learning_rate": 1.1357945701281035e-05, + "loss": 1.8711, + "step": 2306 + }, + { + "epoch": 1.419583108991616, + "grad_norm": 6.432198524475098, + "learning_rate": 1.1351362296805487e-05, + "loss": 1.9319, + "step": 2307 + }, + { + "epoch": 1.4201984462733637, + "grad_norm": 5.823531150817871, + "learning_rate": 1.1344778295684135e-05, + "loss": 1.8626, + "step": 2308 + }, + { + "epoch": 1.4208137835551111, + "grad_norm": 7.4285502433776855, + "learning_rate": 1.133819370082391e-05, + "loss": 1.9753, + "step": 2309 + }, + { + "epoch": 1.4214291208368586, + "grad_norm": 7.219712734222412, + "learning_rate": 1.1331608515132001e-05, + "loss": 1.7538, + "step": 2310 + }, + { + "epoch": 1.4220444581186062, + "grad_norm": 5.423766613006592, + "learning_rate": 1.1325022741515865e-05, + "loss": 1.7485, + "step": 2311 + }, + { + "epoch": 1.422659795400354, + "grad_norm": 6.4992475509643555, + "learning_rate": 1.1318436382883216e-05, + "loss": 1.916, + "step": 2312 + }, + { + "epoch": 1.4232751326821014, + "grad_norm": 7.029430389404297, + "learning_rate": 1.1311849442142025e-05, + "loss": 1.7696, + "step": 2313 + }, + { + "epoch": 1.4238904699638488, + "grad_norm": 6.025760650634766, + "learning_rate": 1.130526192220052e-05, + "loss": 1.857, + "step": 2314 + }, + { + "epoch": 1.4245058072455965, + "grad_norm": 7.342560768127441, + "learning_rate": 1.1298673825967184e-05, + "loss": 1.8766, + "step": 2315 + }, + { + "epoch": 1.4251211445273442, + "grad_norm": 5.143774509429932, + "learning_rate": 1.1292085156350756e-05, + "loss": 1.8811, + "step": 2316 + }, + { + "epoch": 1.4257364818090916, + "grad_norm": 6.048152446746826, + "learning_rate": 1.128549591626023e-05, + "loss": 1.8205, + "step": 2317 + }, + { + "epoch": 1.426351819090839, + "grad_norm": 6.477710723876953, + "learning_rate": 1.1278906108604848e-05, + "loss": 1.9174, + "step": 2318 + }, + { + "epoch": 1.4269671563725868, + "grad_norm": 5.9795379638671875, + "learning_rate": 1.1272315736294108e-05, + "loss": 1.7331, + "step": 2319 + }, + { + "epoch": 1.4275824936543342, + "grad_norm": 6.416720867156982, + "learning_rate": 1.1265724802237749e-05, + "loss": 1.9671, + "step": 2320 + }, + { + "epoch": 1.4281978309360819, + "grad_norm": 5.477086067199707, + "learning_rate": 1.1259133309345769e-05, + "loss": 1.8066, + "step": 2321 + }, + { + "epoch": 1.4288131682178293, + "grad_norm": 6.784786701202393, + "learning_rate": 1.1252541260528398e-05, + "loss": 1.9609, + "step": 2322 + }, + { + "epoch": 1.429428505499577, + "grad_norm": 6.02249002456665, + "learning_rate": 1.1245948658696127e-05, + "loss": 1.7565, + "step": 2323 + }, + { + "epoch": 1.4300438427813245, + "grad_norm": 6.219879627227783, + "learning_rate": 1.1239355506759681e-05, + "loss": 2.0607, + "step": 2324 + }, + { + "epoch": 1.4306591800630721, + "grad_norm": 6.227684020996094, + "learning_rate": 1.1232761807630032e-05, + "loss": 1.9147, + "step": 2325 + }, + { + "epoch": 1.4312745173448196, + "grad_norm": 6.309422492980957, + "learning_rate": 1.1226167564218393e-05, + "loss": 2.1874, + "step": 2326 + }, + { + "epoch": 1.4318898546265673, + "grad_norm": 9.383749961853027, + "learning_rate": 1.1219572779436215e-05, + "loss": 2.1156, + "step": 2327 + }, + { + "epoch": 1.4325051919083147, + "grad_norm": 6.64164924621582, + "learning_rate": 1.121297745619519e-05, + "loss": 1.8646, + "step": 2328 + }, + { + "epoch": 1.4331205291900622, + "grad_norm": 6.672055244445801, + "learning_rate": 1.1206381597407248e-05, + "loss": 1.9096, + "step": 2329 + }, + { + "epoch": 1.4337358664718098, + "grad_norm": 5.819342613220215, + "learning_rate": 1.1199785205984558e-05, + "loss": 2.0823, + "step": 2330 + }, + { + "epoch": 1.4343512037535575, + "grad_norm": 5.875044822692871, + "learning_rate": 1.1193188284839518e-05, + "loss": 1.6808, + "step": 2331 + }, + { + "epoch": 1.434966541035305, + "grad_norm": 5.9393463134765625, + "learning_rate": 1.1186590836884766e-05, + "loss": 1.9199, + "step": 2332 + }, + { + "epoch": 1.4355818783170524, + "grad_norm": 8.262505531311035, + "learning_rate": 1.1179992865033164e-05, + "loss": 1.6823, + "step": 2333 + }, + { + "epoch": 1.4361972155988, + "grad_norm": 7.331518173217773, + "learning_rate": 1.117339437219782e-05, + "loss": 1.8937, + "step": 2334 + }, + { + "epoch": 1.4368125528805478, + "grad_norm": 6.3165974617004395, + "learning_rate": 1.1166795361292055e-05, + "loss": 1.9772, + "step": 2335 + }, + { + "epoch": 1.4374278901622952, + "grad_norm": 5.99014949798584, + "learning_rate": 1.116019583522943e-05, + "loss": 1.86, + "step": 2336 + }, + { + "epoch": 1.4380432274440427, + "grad_norm": 8.626955032348633, + "learning_rate": 1.115359579692373e-05, + "loss": 2.0016, + "step": 2337 + }, + { + "epoch": 1.4386585647257903, + "grad_norm": 6.55992317199707, + "learning_rate": 1.1146995249288967e-05, + "loss": 1.9861, + "step": 2338 + }, + { + "epoch": 1.439273902007538, + "grad_norm": 8.169011116027832, + "learning_rate": 1.1140394195239376e-05, + "loss": 1.8785, + "step": 2339 + }, + { + "epoch": 1.4398892392892855, + "grad_norm": 6.432872772216797, + "learning_rate": 1.113379263768942e-05, + "loss": 1.8774, + "step": 2340 + }, + { + "epoch": 1.440504576571033, + "grad_norm": 7.136188983917236, + "learning_rate": 1.1127190579553775e-05, + "loss": 2.1545, + "step": 2341 + }, + { + "epoch": 1.4411199138527806, + "grad_norm": 7.584582328796387, + "learning_rate": 1.1120588023747352e-05, + "loss": 1.7953, + "step": 2342 + }, + { + "epoch": 1.441735251134528, + "grad_norm": 5.540750980377197, + "learning_rate": 1.1113984973185268e-05, + "loss": 1.7951, + "step": 2343 + }, + { + "epoch": 1.4423505884162757, + "grad_norm": 6.556468486785889, + "learning_rate": 1.1107381430782869e-05, + "loss": 2.0028, + "step": 2344 + }, + { + "epoch": 1.4429659256980232, + "grad_norm": 6.627414703369141, + "learning_rate": 1.1100777399455707e-05, + "loss": 1.8623, + "step": 2345 + }, + { + "epoch": 1.4435812629797709, + "grad_norm": 6.032796859741211, + "learning_rate": 1.1094172882119562e-05, + "loss": 1.8575, + "step": 2346 + }, + { + "epoch": 1.4441966002615183, + "grad_norm": 6.868031024932861, + "learning_rate": 1.1087567881690422e-05, + "loss": 1.9723, + "step": 2347 + }, + { + "epoch": 1.444811937543266, + "grad_norm": 8.526298522949219, + "learning_rate": 1.1080962401084487e-05, + "loss": 1.9254, + "step": 2348 + }, + { + "epoch": 1.4454272748250134, + "grad_norm": 8.089234352111816, + "learning_rate": 1.1074356443218175e-05, + "loss": 1.8922, + "step": 2349 + }, + { + "epoch": 1.446042612106761, + "grad_norm": 5.600485801696777, + "learning_rate": 1.1067750011008107e-05, + "loss": 2.0996, + "step": 2350 + }, + { + "epoch": 1.4466579493885086, + "grad_norm": 5.474144458770752, + "learning_rate": 1.1061143107371122e-05, + "loss": 1.9648, + "step": 2351 + }, + { + "epoch": 1.447273286670256, + "grad_norm": 7.588264465332031, + "learning_rate": 1.1054535735224257e-05, + "loss": 2.0592, + "step": 2352 + }, + { + "epoch": 1.4478886239520037, + "grad_norm": 6.375146389007568, + "learning_rate": 1.1047927897484764e-05, + "loss": 1.9795, + "step": 2353 + }, + { + "epoch": 1.4485039612337514, + "grad_norm": 6.155760288238525, + "learning_rate": 1.1041319597070102e-05, + "loss": 1.8803, + "step": 2354 + }, + { + "epoch": 1.4491192985154988, + "grad_norm": 5.631165981292725, + "learning_rate": 1.1034710836897922e-05, + "loss": 2.0403, + "step": 2355 + }, + { + "epoch": 1.4497346357972463, + "grad_norm": 6.545266628265381, + "learning_rate": 1.1028101619886095e-05, + "loss": 1.84, + "step": 2356 + }, + { + "epoch": 1.450349973078994, + "grad_norm": 6.879766941070557, + "learning_rate": 1.1021491948952676e-05, + "loss": 1.6819, + "step": 2357 + }, + { + "epoch": 1.4509653103607416, + "grad_norm": 7.077706813812256, + "learning_rate": 1.1014881827015939e-05, + "loss": 2.0858, + "step": 2358 + }, + { + "epoch": 1.451580647642489, + "grad_norm": 6.282973766326904, + "learning_rate": 1.1008271256994338e-05, + "loss": 1.9167, + "step": 2359 + }, + { + "epoch": 1.4521959849242365, + "grad_norm": 5.978004455566406, + "learning_rate": 1.1001660241806542e-05, + "loss": 1.9873, + "step": 2360 + }, + { + "epoch": 1.4528113222059842, + "grad_norm": 7.079991340637207, + "learning_rate": 1.0995048784371405e-05, + "loss": 2.1031, + "step": 2361 + }, + { + "epoch": 1.4534266594877316, + "grad_norm": 5.172896385192871, + "learning_rate": 1.0988436887607984e-05, + "loss": 1.8145, + "step": 2362 + }, + { + "epoch": 1.4540419967694793, + "grad_norm": 6.356195449829102, + "learning_rate": 1.0981824554435518e-05, + "loss": 1.7879, + "step": 2363 + }, + { + "epoch": 1.4546573340512268, + "grad_norm": 6.690424919128418, + "learning_rate": 1.0975211787773455e-05, + "loss": 1.8523, + "step": 2364 + }, + { + "epoch": 1.4552726713329744, + "grad_norm": 6.830735683441162, + "learning_rate": 1.096859859054142e-05, + "loss": 1.7875, + "step": 2365 + }, + { + "epoch": 1.455888008614722, + "grad_norm": 6.520412445068359, + "learning_rate": 1.096198496565924e-05, + "loss": 1.9416, + "step": 2366 + }, + { + "epoch": 1.4565033458964696, + "grad_norm": 6.638843059539795, + "learning_rate": 1.095537091604692e-05, + "loss": 1.8385, + "step": 2367 + }, + { + "epoch": 1.457118683178217, + "grad_norm": 6.998364448547363, + "learning_rate": 1.0948756444624664e-05, + "loss": 1.9666, + "step": 2368 + }, + { + "epoch": 1.4577340204599647, + "grad_norm": 5.994791507720947, + "learning_rate": 1.0942141554312847e-05, + "loss": 1.9586, + "step": 2369 + }, + { + "epoch": 1.4583493577417121, + "grad_norm": 5.253006935119629, + "learning_rate": 1.0935526248032047e-05, + "loss": 1.8673, + "step": 2370 + }, + { + "epoch": 1.4589646950234596, + "grad_norm": 7.259736061096191, + "learning_rate": 1.0928910528703007e-05, + "loss": 1.7796, + "step": 2371 + }, + { + "epoch": 1.4595800323052073, + "grad_norm": 5.429383754730225, + "learning_rate": 1.0922294399246671e-05, + "loss": 1.9549, + "step": 2372 + }, + { + "epoch": 1.460195369586955, + "grad_norm": 6.636396408081055, + "learning_rate": 1.0915677862584149e-05, + "loss": 1.7924, + "step": 2373 + }, + { + "epoch": 1.4608107068687024, + "grad_norm": 6.2201032638549805, + "learning_rate": 1.0909060921636739e-05, + "loss": 1.775, + "step": 2374 + }, + { + "epoch": 1.4614260441504499, + "grad_norm": 6.070355415344238, + "learning_rate": 1.0902443579325914e-05, + "loss": 1.7546, + "step": 2375 + }, + { + "epoch": 1.4620413814321975, + "grad_norm": 6.176407814025879, + "learning_rate": 1.0895825838573326e-05, + "loss": 1.6541, + "step": 2376 + }, + { + "epoch": 1.4626567187139452, + "grad_norm": 7.2055206298828125, + "learning_rate": 1.0889207702300803e-05, + "loss": 2.0533, + "step": 2377 + }, + { + "epoch": 1.4632720559956927, + "grad_norm": 7.779618263244629, + "learning_rate": 1.0882589173430346e-05, + "loss": 2.0351, + "step": 2378 + }, + { + "epoch": 1.46388739327744, + "grad_norm": 6.022552967071533, + "learning_rate": 1.087597025488413e-05, + "loss": 1.8634, + "step": 2379 + }, + { + "epoch": 1.4645027305591878, + "grad_norm": 6.562661170959473, + "learning_rate": 1.0869350949584505e-05, + "loss": 1.7158, + "step": 2380 + }, + { + "epoch": 1.4651180678409352, + "grad_norm": 7.625369548797607, + "learning_rate": 1.0862731260453985e-05, + "loss": 1.858, + "step": 2381 + }, + { + "epoch": 1.465733405122683, + "grad_norm": 6.51851224899292, + "learning_rate": 1.0856111190415262e-05, + "loss": 2.1247, + "step": 2382 + }, + { + "epoch": 1.4663487424044304, + "grad_norm": 5.961126804351807, + "learning_rate": 1.0849490742391184e-05, + "loss": 1.8121, + "step": 2383 + }, + { + "epoch": 1.466964079686178, + "grad_norm": 8.133834838867188, + "learning_rate": 1.0842869919304784e-05, + "loss": 1.7851, + "step": 2384 + }, + { + "epoch": 1.4675794169679255, + "grad_norm": 9.60153579711914, + "learning_rate": 1.0836248724079241e-05, + "loss": 2.0655, + "step": 2385 + }, + { + "epoch": 1.4681947542496732, + "grad_norm": 5.977877140045166, + "learning_rate": 1.0829627159637914e-05, + "loss": 2.0621, + "step": 2386 + }, + { + "epoch": 1.4688100915314206, + "grad_norm": 6.465794086456299, + "learning_rate": 1.0823005228904315e-05, + "loss": 1.6934, + "step": 2387 + }, + { + "epoch": 1.4694254288131683, + "grad_norm": 6.1588921546936035, + "learning_rate": 1.0816382934802123e-05, + "loss": 1.953, + "step": 2388 + }, + { + "epoch": 1.4700407660949157, + "grad_norm": 5.5303168296813965, + "learning_rate": 1.0809760280255176e-05, + "loss": 1.8767, + "step": 2389 + }, + { + "epoch": 1.4706561033766634, + "grad_norm": 6.125265121459961, + "learning_rate": 1.080313726818747e-05, + "loss": 1.9865, + "step": 2390 + }, + { + "epoch": 1.4712714406584109, + "grad_norm": 6.738412380218506, + "learning_rate": 1.0796513901523156e-05, + "loss": 1.8203, + "step": 2391 + }, + { + "epoch": 1.4718867779401585, + "grad_norm": 5.981904983520508, + "learning_rate": 1.078989018318655e-05, + "loss": 2.0517, + "step": 2392 + }, + { + "epoch": 1.472502115221906, + "grad_norm": 6.106054306030273, + "learning_rate": 1.0783266116102116e-05, + "loss": 1.9025, + "step": 2393 + }, + { + "epoch": 1.4731174525036534, + "grad_norm": 7.000411510467529, + "learning_rate": 1.077664170319448e-05, + "loss": 2.0043, + "step": 2394 + }, + { + "epoch": 1.4737327897854011, + "grad_norm": 6.386091709136963, + "learning_rate": 1.0770016947388407e-05, + "loss": 1.9847, + "step": 2395 + }, + { + "epoch": 1.4743481270671488, + "grad_norm": 7.239955902099609, + "learning_rate": 1.0763391851608827e-05, + "loss": 1.9716, + "step": 2396 + }, + { + "epoch": 1.4749634643488962, + "grad_norm": 5.924992084503174, + "learning_rate": 1.0756766418780811e-05, + "loss": 1.9542, + "step": 2397 + }, + { + "epoch": 1.4755788016306437, + "grad_norm": 8.570306777954102, + "learning_rate": 1.0750140651829588e-05, + "loss": 1.8338, + "step": 2398 + }, + { + "epoch": 1.4761941389123914, + "grad_norm": 7.462289333343506, + "learning_rate": 1.0743514553680521e-05, + "loss": 1.8101, + "step": 2399 + }, + { + "epoch": 1.476809476194139, + "grad_norm": 6.401105880737305, + "learning_rate": 1.0736888127259132e-05, + "loss": 1.6952, + "step": 2400 + }, + { + "epoch": 1.4774248134758865, + "grad_norm": 6.55561637878418, + "learning_rate": 1.0730261375491087e-05, + "loss": 1.8107, + "step": 2401 + }, + { + "epoch": 1.478040150757634, + "grad_norm": 6.595202922821045, + "learning_rate": 1.0723634301302186e-05, + "loss": 1.8851, + "step": 2402 + }, + { + "epoch": 1.4786554880393816, + "grad_norm": 6.675348281860352, + "learning_rate": 1.0717006907618377e-05, + "loss": 1.9615, + "step": 2403 + }, + { + "epoch": 1.479270825321129, + "grad_norm": 7.709946155548096, + "learning_rate": 1.0710379197365752e-05, + "loss": 1.9166, + "step": 2404 + }, + { + "epoch": 1.4798861626028768, + "grad_norm": 7.247996807098389, + "learning_rate": 1.070375117347054e-05, + "loss": 1.7993, + "step": 2405 + }, + { + "epoch": 1.4805014998846242, + "grad_norm": 6.138127326965332, + "learning_rate": 1.0697122838859106e-05, + "loss": 1.9718, + "step": 2406 + }, + { + "epoch": 1.4811168371663719, + "grad_norm": 6.073063850402832, + "learning_rate": 1.0690494196457954e-05, + "loss": 1.9948, + "step": 2407 + }, + { + "epoch": 1.4817321744481193, + "grad_norm": 5.920560359954834, + "learning_rate": 1.0683865249193732e-05, + "loss": 1.9089, + "step": 2408 + }, + { + "epoch": 1.482347511729867, + "grad_norm": 6.367339611053467, + "learning_rate": 1.0677235999993205e-05, + "loss": 1.853, + "step": 2409 + }, + { + "epoch": 1.4829628490116145, + "grad_norm": 8.302556037902832, + "learning_rate": 1.067060645178329e-05, + "loss": 1.8834, + "step": 2410 + }, + { + "epoch": 1.4835781862933621, + "grad_norm": 6.789721965789795, + "learning_rate": 1.066397660749102e-05, + "loss": 1.7668, + "step": 2411 + }, + { + "epoch": 1.4841935235751096, + "grad_norm": 5.820046901702881, + "learning_rate": 1.0657346470043575e-05, + "loss": 1.8925, + "step": 2412 + }, + { + "epoch": 1.484808860856857, + "grad_norm": 5.268479824066162, + "learning_rate": 1.0650716042368242e-05, + "loss": 1.7465, + "step": 2413 + }, + { + "epoch": 1.4854241981386047, + "grad_norm": 6.908467769622803, + "learning_rate": 1.0644085327392465e-05, + "loss": 1.8129, + "step": 2414 + }, + { + "epoch": 1.4860395354203524, + "grad_norm": 8.396808624267578, + "learning_rate": 1.0637454328043792e-05, + "loss": 1.9929, + "step": 2415 + }, + { + "epoch": 1.4866548727020998, + "grad_norm": 7.9382853507995605, + "learning_rate": 1.0630823047249903e-05, + "loss": 1.8736, + "step": 2416 + }, + { + "epoch": 1.4872702099838473, + "grad_norm": 7.732401371002197, + "learning_rate": 1.0624191487938607e-05, + "loss": 1.9774, + "step": 2417 + }, + { + "epoch": 1.487885547265595, + "grad_norm": 6.7714009284973145, + "learning_rate": 1.0617559653037833e-05, + "loss": 1.8573, + "step": 2418 + }, + { + "epoch": 1.4885008845473426, + "grad_norm": 6.831276893615723, + "learning_rate": 1.0610927545475624e-05, + "loss": 1.8966, + "step": 2419 + }, + { + "epoch": 1.48911622182909, + "grad_norm": 5.344137668609619, + "learning_rate": 1.060429516818016e-05, + "loss": 1.8585, + "step": 2420 + }, + { + "epoch": 1.4897315591108375, + "grad_norm": 5.783152103424072, + "learning_rate": 1.0597662524079721e-05, + "loss": 1.8495, + "step": 2421 + }, + { + "epoch": 1.4903468963925852, + "grad_norm": 6.151708126068115, + "learning_rate": 1.0591029616102725e-05, + "loss": 1.9466, + "step": 2422 + }, + { + "epoch": 1.4909622336743327, + "grad_norm": 8.654248237609863, + "learning_rate": 1.0584396447177682e-05, + "loss": 1.8211, + "step": 2423 + }, + { + "epoch": 1.4915775709560803, + "grad_norm": 5.754706859588623, + "learning_rate": 1.057776302023325e-05, + "loss": 2.0279, + "step": 2424 + }, + { + "epoch": 1.4921929082378278, + "grad_norm": 5.884255886077881, + "learning_rate": 1.0571129338198162e-05, + "loss": 1.9234, + "step": 2425 + }, + { + "epoch": 1.4928082455195755, + "grad_norm": 6.571177959442139, + "learning_rate": 1.0564495404001299e-05, + "loss": 1.9162, + "step": 2426 + }, + { + "epoch": 1.493423582801323, + "grad_norm": 6.515172481536865, + "learning_rate": 1.0557861220571626e-05, + "loss": 1.8928, + "step": 2427 + }, + { + "epoch": 1.4940389200830706, + "grad_norm": 6.334596157073975, + "learning_rate": 1.055122679083824e-05, + "loss": 1.8675, + "step": 2428 + }, + { + "epoch": 1.494654257364818, + "grad_norm": 7.4317731857299805, + "learning_rate": 1.054459211773033e-05, + "loss": 1.7907, + "step": 2429 + }, + { + "epoch": 1.4952695946465657, + "grad_norm": 5.681263446807861, + "learning_rate": 1.0537957204177204e-05, + "loss": 1.9739, + "step": 2430 + }, + { + "epoch": 1.4958849319283132, + "grad_norm": 6.129361152648926, + "learning_rate": 1.0531322053108268e-05, + "loss": 1.9787, + "step": 2431 + }, + { + "epoch": 1.4965002692100606, + "grad_norm": 7.0703325271606445, + "learning_rate": 1.0524686667453038e-05, + "loss": 1.7773, + "step": 2432 + }, + { + "epoch": 1.4971156064918083, + "grad_norm": 6.813838958740234, + "learning_rate": 1.051805105014113e-05, + "loss": 2.0806, + "step": 2433 + }, + { + "epoch": 1.497730943773556, + "grad_norm": 6.824643135070801, + "learning_rate": 1.0511415204102264e-05, + "loss": 1.9443, + "step": 2434 + }, + { + "epoch": 1.4983462810553034, + "grad_norm": 6.338407516479492, + "learning_rate": 1.050477913226626e-05, + "loss": 1.946, + "step": 2435 + }, + { + "epoch": 1.4989616183370509, + "grad_norm": 5.436490535736084, + "learning_rate": 1.0498142837563048e-05, + "loss": 2.0116, + "step": 2436 + }, + { + "epoch": 1.4995769556187986, + "grad_norm": 6.733730316162109, + "learning_rate": 1.0491506322922633e-05, + "loss": 1.91, + "step": 2437 + }, + { + "epoch": 1.5001922929005462, + "grad_norm": 5.370570182800293, + "learning_rate": 1.0484869591275144e-05, + "loss": 1.8294, + "step": 2438 + }, + { + "epoch": 1.5008076301822937, + "grad_norm": 7.020693778991699, + "learning_rate": 1.0478232645550784e-05, + "loss": 2.005, + "step": 2439 + }, + { + "epoch": 1.5014229674640411, + "grad_norm": 6.010800361633301, + "learning_rate": 1.0471595488679865e-05, + "loss": 1.9168, + "step": 2440 + }, + { + "epoch": 1.5020383047457888, + "grad_norm": 7.783041000366211, + "learning_rate": 1.0464958123592779e-05, + "loss": 1.8311, + "step": 2441 + }, + { + "epoch": 1.5026536420275365, + "grad_norm": 5.321151256561279, + "learning_rate": 1.045832055322003e-05, + "loss": 1.9629, + "step": 2442 + }, + { + "epoch": 1.503268979309284, + "grad_norm": 6.089056491851807, + "learning_rate": 1.045168278049219e-05, + "loss": 1.9814, + "step": 2443 + }, + { + "epoch": 1.5038843165910314, + "grad_norm": 6.681804656982422, + "learning_rate": 1.0445044808339935e-05, + "loss": 2.0396, + "step": 2444 + }, + { + "epoch": 1.504499653872779, + "grad_norm": 6.238109111785889, + "learning_rate": 1.0438406639694025e-05, + "loss": 1.7838, + "step": 2445 + }, + { + "epoch": 1.5051149911545267, + "grad_norm": 7.142671585083008, + "learning_rate": 1.0431768277485306e-05, + "loss": 1.8188, + "step": 2446 + }, + { + "epoch": 1.505730328436274, + "grad_norm": 7.847067832946777, + "learning_rate": 1.042512972464471e-05, + "loss": 2.0053, + "step": 2447 + }, + { + "epoch": 1.5063456657180216, + "grad_norm": 5.534270286560059, + "learning_rate": 1.041849098410325e-05, + "loss": 2.0684, + "step": 2448 + }, + { + "epoch": 1.5069610029997693, + "grad_norm": 5.672049522399902, + "learning_rate": 1.041185205879203e-05, + "loss": 1.8637, + "step": 2449 + }, + { + "epoch": 1.5075763402815168, + "grad_norm": 6.516597270965576, + "learning_rate": 1.0405212951642229e-05, + "loss": 2.0183, + "step": 2450 + }, + { + "epoch": 1.5081916775632642, + "grad_norm": 6.04902982711792, + "learning_rate": 1.0398573665585105e-05, + "loss": 2.0363, + "step": 2451 + }, + { + "epoch": 1.508807014845012, + "grad_norm": 6.221118450164795, + "learning_rate": 1.0391934203552003e-05, + "loss": 1.8881, + "step": 2452 + }, + { + "epoch": 1.5094223521267596, + "grad_norm": 5.834832191467285, + "learning_rate": 1.0385294568474335e-05, + "loss": 2.0108, + "step": 2453 + }, + { + "epoch": 1.510037689408507, + "grad_norm": 5.9003753662109375, + "learning_rate": 1.0378654763283602e-05, + "loss": 2.0925, + "step": 2454 + }, + { + "epoch": 1.5106530266902545, + "grad_norm": 6.178776741027832, + "learning_rate": 1.0372014790911365e-05, + "loss": 1.7982, + "step": 2455 + }, + { + "epoch": 1.5112683639720021, + "grad_norm": 5.96808385848999, + "learning_rate": 1.0365374654289272e-05, + "loss": 2.065, + "step": 2456 + }, + { + "epoch": 1.5118837012537498, + "grad_norm": 6.580560207366943, + "learning_rate": 1.0358734356349035e-05, + "loss": 1.986, + "step": 2457 + }, + { + "epoch": 1.5124990385354973, + "grad_norm": 5.981705665588379, + "learning_rate": 1.0352093900022447e-05, + "loss": 1.9449, + "step": 2458 + }, + { + "epoch": 1.5131143758172447, + "grad_norm": 6.058340072631836, + "learning_rate": 1.0345453288241356e-05, + "loss": 1.9618, + "step": 2459 + }, + { + "epoch": 1.5137297130989924, + "grad_norm": 5.558003902435303, + "learning_rate": 1.0338812523937694e-05, + "loss": 1.8423, + "step": 2460 + }, + { + "epoch": 1.51434505038074, + "grad_norm": 7.460606575012207, + "learning_rate": 1.033217161004345e-05, + "loss": 1.9325, + "step": 2461 + }, + { + "epoch": 1.5149603876624875, + "grad_norm": 6.978290557861328, + "learning_rate": 1.0325530549490685e-05, + "loss": 1.981, + "step": 2462 + }, + { + "epoch": 1.515575724944235, + "grad_norm": 6.708978176116943, + "learning_rate": 1.0318889345211519e-05, + "loss": 1.9037, + "step": 2463 + }, + { + "epoch": 1.5161910622259827, + "grad_norm": 5.651205539703369, + "learning_rate": 1.0312248000138143e-05, + "loss": 1.9913, + "step": 2464 + }, + { + "epoch": 1.5168063995077303, + "grad_norm": 8.24433708190918, + "learning_rate": 1.03056065172028e-05, + "loss": 2.0262, + "step": 2465 + }, + { + "epoch": 1.5174217367894778, + "grad_norm": 8.062834739685059, + "learning_rate": 1.0298964899337807e-05, + "loss": 1.972, + "step": 2466 + }, + { + "epoch": 1.5180370740712252, + "grad_norm": 6.9814019203186035, + "learning_rate": 1.0292323149475527e-05, + "loss": 1.9636, + "step": 2467 + }, + { + "epoch": 1.518652411352973, + "grad_norm": 5.376134872436523, + "learning_rate": 1.0285681270548393e-05, + "loss": 1.9766, + "step": 2468 + }, + { + "epoch": 1.5192677486347204, + "grad_norm": 13.70329475402832, + "learning_rate": 1.0279039265488885e-05, + "loss": 2.1457, + "step": 2469 + }, + { + "epoch": 1.5198830859164678, + "grad_norm": 5.797250270843506, + "learning_rate": 1.0272397137229548e-05, + "loss": 1.8813, + "step": 2470 + }, + { + "epoch": 1.5204984231982155, + "grad_norm": 6.871057510375977, + "learning_rate": 1.0265754888702972e-05, + "loss": 1.8064, + "step": 2471 + }, + { + "epoch": 1.5211137604799632, + "grad_norm": 6.197854995727539, + "learning_rate": 1.0259112522841808e-05, + "loss": 1.8151, + "step": 2472 + }, + { + "epoch": 1.5217290977617106, + "grad_norm": 6.562155246734619, + "learning_rate": 1.0252470042578756e-05, + "loss": 1.8839, + "step": 2473 + }, + { + "epoch": 1.522344435043458, + "grad_norm": 6.575608253479004, + "learning_rate": 1.0245827450846564e-05, + "loss": 1.8223, + "step": 2474 + }, + { + "epoch": 1.5229597723252057, + "grad_norm": 7.373517990112305, + "learning_rate": 1.023918475057803e-05, + "loss": 1.8281, + "step": 2475 + }, + { + "epoch": 1.5235751096069534, + "grad_norm": 6.621358394622803, + "learning_rate": 1.0232541944706007e-05, + "loss": 1.8881, + "step": 2476 + }, + { + "epoch": 1.5241904468887009, + "grad_norm": 7.0206618309021, + "learning_rate": 1.0225899036163383e-05, + "loss": 1.727, + "step": 2477 + }, + { + "epoch": 1.5248057841704483, + "grad_norm": 6.479127883911133, + "learning_rate": 1.0219256027883102e-05, + "loss": 1.9358, + "step": 2478 + }, + { + "epoch": 1.525421121452196, + "grad_norm": 6.922554016113281, + "learning_rate": 1.0212612922798143e-05, + "loss": 1.8279, + "step": 2479 + }, + { + "epoch": 1.5260364587339437, + "grad_norm": 6.147972106933594, + "learning_rate": 1.0205969723841536e-05, + "loss": 2.0615, + "step": 2480 + }, + { + "epoch": 1.5266517960156911, + "grad_norm": 6.554501056671143, + "learning_rate": 1.0199326433946345e-05, + "loss": 1.9072, + "step": 2481 + }, + { + "epoch": 1.5272671332974386, + "grad_norm": 6.475035190582275, + "learning_rate": 1.0192683056045678e-05, + "loss": 2.185, + "step": 2482 + }, + { + "epoch": 1.5278824705791862, + "grad_norm": 6.549026012420654, + "learning_rate": 1.0186039593072685e-05, + "loss": 1.865, + "step": 2483 + }, + { + "epoch": 1.528497807860934, + "grad_norm": 5.579683780670166, + "learning_rate": 1.0179396047960546e-05, + "loss": 1.9066, + "step": 2484 + }, + { + "epoch": 1.5291131451426814, + "grad_norm": 5.498599052429199, + "learning_rate": 1.0172752423642484e-05, + "loss": 2.0432, + "step": 2485 + }, + { + "epoch": 1.5297284824244288, + "grad_norm": 5.46852970123291, + "learning_rate": 1.0166108723051753e-05, + "loss": 1.9777, + "step": 2486 + }, + { + "epoch": 1.5303438197061765, + "grad_norm": 7.145344257354736, + "learning_rate": 1.0159464949121642e-05, + "loss": 1.904, + "step": 2487 + }, + { + "epoch": 1.530959156987924, + "grad_norm": 6.430215358734131, + "learning_rate": 1.0152821104785473e-05, + "loss": 1.8658, + "step": 2488 + }, + { + "epoch": 1.5315744942696714, + "grad_norm": 5.703446865081787, + "learning_rate": 1.0146177192976599e-05, + "loss": 2.1446, + "step": 2489 + }, + { + "epoch": 1.532189831551419, + "grad_norm": 7.842698097229004, + "learning_rate": 1.0139533216628399e-05, + "loss": 1.9122, + "step": 2490 + }, + { + "epoch": 1.5328051688331668, + "grad_norm": 6.559938907623291, + "learning_rate": 1.0132889178674283e-05, + "loss": 1.776, + "step": 2491 + }, + { + "epoch": 1.5334205061149142, + "grad_norm": 6.836427211761475, + "learning_rate": 1.0126245082047699e-05, + "loss": 1.6082, + "step": 2492 + }, + { + "epoch": 1.5340358433966617, + "grad_norm": 6.948553562164307, + "learning_rate": 1.0119600929682096e-05, + "loss": 1.7837, + "step": 2493 + }, + { + "epoch": 1.5346511806784093, + "grad_norm": 6.79789924621582, + "learning_rate": 1.0112956724510974e-05, + "loss": 2.0412, + "step": 2494 + }, + { + "epoch": 1.535266517960157, + "grad_norm": 6.9521942138671875, + "learning_rate": 1.0106312469467841e-05, + "loss": 1.7168, + "step": 2495 + }, + { + "epoch": 1.5358818552419045, + "grad_norm": 7.1461405754089355, + "learning_rate": 1.0099668167486227e-05, + "loss": 1.7917, + "step": 2496 + }, + { + "epoch": 1.536497192523652, + "grad_norm": 7.881579875946045, + "learning_rate": 1.0093023821499692e-05, + "loss": 1.732, + "step": 2497 + }, + { + "epoch": 1.5371125298053996, + "grad_norm": 5.080859184265137, + "learning_rate": 1.008637943444181e-05, + "loss": 1.9192, + "step": 2498 + }, + { + "epoch": 1.5377278670871473, + "grad_norm": 7.368236541748047, + "learning_rate": 1.0079735009246168e-05, + "loss": 1.9833, + "step": 2499 + }, + { + "epoch": 1.5383432043688947, + "grad_norm": 5.747280597686768, + "learning_rate": 1.0073090548846378e-05, + "loss": 1.7641, + "step": 2500 + }, + { + "epoch": 1.5389585416506422, + "grad_norm": 7.980464458465576, + "learning_rate": 1.0066446056176065e-05, + "loss": 2.1232, + "step": 2501 + }, + { + "epoch": 1.5395738789323898, + "grad_norm": 6.96491003036499, + "learning_rate": 1.0059801534168868e-05, + "loss": 1.9235, + "step": 2502 + }, + { + "epoch": 1.5401892162141375, + "grad_norm": 5.851550102233887, + "learning_rate": 1.0053156985758435e-05, + "loss": 1.7861, + "step": 2503 + }, + { + "epoch": 1.540804553495885, + "grad_norm": 5.478586196899414, + "learning_rate": 1.004651241387843e-05, + "loss": 1.9103, + "step": 2504 + }, + { + "epoch": 1.5414198907776324, + "grad_norm": 7.540194988250732, + "learning_rate": 1.0039867821462529e-05, + "loss": 2.0311, + "step": 2505 + }, + { + "epoch": 1.54203522805938, + "grad_norm": 6.04556131362915, + "learning_rate": 1.0033223211444417e-05, + "loss": 1.8374, + "step": 2506 + }, + { + "epoch": 1.5426505653411278, + "grad_norm": 6.615495681762695, + "learning_rate": 1.0026578586757778e-05, + "loss": 1.9241, + "step": 2507 + }, + { + "epoch": 1.5432659026228752, + "grad_norm": 5.392002582550049, + "learning_rate": 1.0019933950336314e-05, + "loss": 1.9646, + "step": 2508 + }, + { + "epoch": 1.5438812399046227, + "grad_norm": 6.150516510009766, + "learning_rate": 1.0013289305113725e-05, + "loss": 1.9163, + "step": 2509 + }, + { + "epoch": 1.5444965771863703, + "grad_norm": 5.763930797576904, + "learning_rate": 1.0006644654023719e-05, + "loss": 1.8274, + "step": 2510 + }, + { + "epoch": 1.5451119144681178, + "grad_norm": 5.2970757484436035, + "learning_rate": 1e-05, + "loss": 2.0029, + "step": 2511 + }, + { + "epoch": 1.5457272517498652, + "grad_norm": 6.098113059997559, + "learning_rate": 9.993355345976284e-06, + "loss": 1.875, + "step": 2512 + }, + { + "epoch": 1.546342589031613, + "grad_norm": 7.427017688751221, + "learning_rate": 9.986710694886276e-06, + "loss": 1.8554, + "step": 2513 + }, + { + "epoch": 1.5469579263133606, + "grad_norm": 6.138082504272461, + "learning_rate": 9.98006604966369e-06, + "loss": 2.0963, + "step": 2514 + }, + { + "epoch": 1.547573263595108, + "grad_norm": 6.993271350860596, + "learning_rate": 9.973421413242224e-06, + "loss": 1.7577, + "step": 2515 + }, + { + "epoch": 1.5481886008768555, + "grad_norm": 5.9030680656433105, + "learning_rate": 9.966776788555588e-06, + "loss": 1.977, + "step": 2516 + }, + { + "epoch": 1.5488039381586032, + "grad_norm": 7.404929161071777, + "learning_rate": 9.960132178537471e-06, + "loss": 1.7973, + "step": 2517 + }, + { + "epoch": 1.5494192754403509, + "grad_norm": 6.093820571899414, + "learning_rate": 9.953487586121571e-06, + "loss": 1.8137, + "step": 2518 + }, + { + "epoch": 1.5500346127220983, + "grad_norm": 6.417913436889648, + "learning_rate": 9.946843014241572e-06, + "loss": 1.7991, + "step": 2519 + }, + { + "epoch": 1.5506499500038458, + "grad_norm": 8.36543083190918, + "learning_rate": 9.940198465831137e-06, + "loss": 1.9473, + "step": 2520 + }, + { + "epoch": 1.5512652872855934, + "grad_norm": 6.475955963134766, + "learning_rate": 9.933553943823938e-06, + "loss": 1.9541, + "step": 2521 + }, + { + "epoch": 1.551880624567341, + "grad_norm": 7.5311689376831055, + "learning_rate": 9.926909451153625e-06, + "loss": 1.8479, + "step": 2522 + }, + { + "epoch": 1.5524959618490886, + "grad_norm": 6.725881576538086, + "learning_rate": 9.920264990753837e-06, + "loss": 2.1821, + "step": 2523 + }, + { + "epoch": 1.553111299130836, + "grad_norm": 6.054800033569336, + "learning_rate": 9.913620565558194e-06, + "loss": 1.829, + "step": 2524 + }, + { + "epoch": 1.5537266364125837, + "grad_norm": 9.438462257385254, + "learning_rate": 9.90697617850031e-06, + "loss": 1.9502, + "step": 2525 + }, + { + "epoch": 1.5543419736943314, + "grad_norm": 6.3000359535217285, + "learning_rate": 9.900331832513775e-06, + "loss": 2.0117, + "step": 2526 + }, + { + "epoch": 1.5549573109760788, + "grad_norm": 6.53546667098999, + "learning_rate": 9.89368753053216e-06, + "loss": 2.0197, + "step": 2527 + }, + { + "epoch": 1.5555726482578263, + "grad_norm": 5.365586757659912, + "learning_rate": 9.88704327548903e-06, + "loss": 1.8637, + "step": 2528 + }, + { + "epoch": 1.556187985539574, + "grad_norm": 6.20352029800415, + "learning_rate": 9.880399070317907e-06, + "loss": 1.8784, + "step": 2529 + }, + { + "epoch": 1.5568033228213214, + "grad_norm": 5.8321638107299805, + "learning_rate": 9.873754917952307e-06, + "loss": 1.9493, + "step": 2530 + }, + { + "epoch": 1.5574186601030688, + "grad_norm": 7.119935035705566, + "learning_rate": 9.867110821325717e-06, + "loss": 1.8203, + "step": 2531 + }, + { + "epoch": 1.5580339973848165, + "grad_norm": 6.78074836730957, + "learning_rate": 9.860466783371605e-06, + "loss": 1.8162, + "step": 2532 + }, + { + "epoch": 1.5586493346665642, + "grad_norm": 6.567858695983887, + "learning_rate": 9.853822807023408e-06, + "loss": 1.9488, + "step": 2533 + }, + { + "epoch": 1.5592646719483116, + "grad_norm": 5.29601526260376, + "learning_rate": 9.84717889521453e-06, + "loss": 1.8956, + "step": 2534 + }, + { + "epoch": 1.559880009230059, + "grad_norm": 6.8540120124816895, + "learning_rate": 9.84053505087836e-06, + "loss": 2.1027, + "step": 2535 + }, + { + "epoch": 1.5604953465118068, + "grad_norm": 5.665838241577148, + "learning_rate": 9.83389127694825e-06, + "loss": 1.8531, + "step": 2536 + }, + { + "epoch": 1.5611106837935544, + "grad_norm": 6.535799503326416, + "learning_rate": 9.827247576357521e-06, + "loss": 1.8366, + "step": 2537 + }, + { + "epoch": 1.561726021075302, + "grad_norm": 5.437575817108154, + "learning_rate": 9.820603952039456e-06, + "loss": 1.9178, + "step": 2538 + }, + { + "epoch": 1.5623413583570493, + "grad_norm": 6.785678386688232, + "learning_rate": 9.813960406927318e-06, + "loss": 2.0763, + "step": 2539 + }, + { + "epoch": 1.562956695638797, + "grad_norm": 7.950549602508545, + "learning_rate": 9.807316943954325e-06, + "loss": 1.8075, + "step": 2540 + }, + { + "epoch": 1.5635720329205447, + "grad_norm": 7.965771675109863, + "learning_rate": 9.800673566053657e-06, + "loss": 1.9015, + "step": 2541 + }, + { + "epoch": 1.5641873702022921, + "grad_norm": 6.680383682250977, + "learning_rate": 9.794030276158465e-06, + "loss": 1.8564, + "step": 2542 + }, + { + "epoch": 1.5648027074840396, + "grad_norm": 5.517396450042725, + "learning_rate": 9.78738707720186e-06, + "loss": 1.7751, + "step": 2543 + }, + { + "epoch": 1.5654180447657873, + "grad_norm": 6.456830024719238, + "learning_rate": 9.780743972116903e-06, + "loss": 1.823, + "step": 2544 + }, + { + "epoch": 1.566033382047535, + "grad_norm": 5.043920040130615, + "learning_rate": 9.774100963836617e-06, + "loss": 1.9657, + "step": 2545 + }, + { + "epoch": 1.5666487193292824, + "grad_norm": 6.528972625732422, + "learning_rate": 9.767458055293995e-06, + "loss": 1.7277, + "step": 2546 + }, + { + "epoch": 1.5672640566110299, + "grad_norm": 5.803744316101074, + "learning_rate": 9.760815249421973e-06, + "loss": 1.9019, + "step": 2547 + }, + { + "epoch": 1.5678793938927775, + "grad_norm": 8.11334228515625, + "learning_rate": 9.754172549153439e-06, + "loss": 1.6999, + "step": 2548 + }, + { + "epoch": 1.5684947311745252, + "grad_norm": 7.959278106689453, + "learning_rate": 9.747529957421246e-06, + "loss": 1.9309, + "step": 2549 + }, + { + "epoch": 1.5691100684562724, + "grad_norm": 7.308795928955078, + "learning_rate": 9.740887477158194e-06, + "loss": 2.0512, + "step": 2550 + }, + { + "epoch": 1.56972540573802, + "grad_norm": 7.896944522857666, + "learning_rate": 9.734245111297033e-06, + "loss": 2.0114, + "step": 2551 + }, + { + "epoch": 1.5703407430197678, + "grad_norm": 5.974488258361816, + "learning_rate": 9.727602862770456e-06, + "loss": 1.7189, + "step": 2552 + }, + { + "epoch": 1.5709560803015152, + "grad_norm": 7.161620140075684, + "learning_rate": 9.720960734511118e-06, + "loss": 1.7962, + "step": 2553 + }, + { + "epoch": 1.5715714175832627, + "grad_norm": 6.797051429748535, + "learning_rate": 9.71431872945161e-06, + "loss": 1.58, + "step": 2554 + }, + { + "epoch": 1.5721867548650104, + "grad_norm": 6.172833442687988, + "learning_rate": 9.707676850524473e-06, + "loss": 1.8026, + "step": 2555 + }, + { + "epoch": 1.572802092146758, + "grad_norm": 7.593555927276611, + "learning_rate": 9.701035100662195e-06, + "loss": 2.1541, + "step": 2556 + }, + { + "epoch": 1.5734174294285055, + "grad_norm": 7.657301902770996, + "learning_rate": 9.694393482797203e-06, + "loss": 1.927, + "step": 2557 + }, + { + "epoch": 1.574032766710253, + "grad_norm": 6.201522350311279, + "learning_rate": 9.687751999861864e-06, + "loss": 2.0148, + "step": 2558 + }, + { + "epoch": 1.5746481039920006, + "grad_norm": 6.342685699462891, + "learning_rate": 9.681110654788483e-06, + "loss": 1.5937, + "step": 2559 + }, + { + "epoch": 1.5752634412737483, + "grad_norm": 5.910861015319824, + "learning_rate": 9.674469450509319e-06, + "loss": 1.9163, + "step": 2560 + }, + { + "epoch": 1.5758787785554957, + "grad_norm": 6.749059677124023, + "learning_rate": 9.667828389956555e-06, + "loss": 1.8046, + "step": 2561 + }, + { + "epoch": 1.5764941158372432, + "grad_norm": 6.541704177856445, + "learning_rate": 9.661187476062308e-06, + "loss": 1.9248, + "step": 2562 + }, + { + "epoch": 1.5771094531189909, + "grad_norm": 5.8368940353393555, + "learning_rate": 9.654546711758646e-06, + "loss": 1.8504, + "step": 2563 + }, + { + "epoch": 1.5777247904007385, + "grad_norm": 5.672591209411621, + "learning_rate": 9.647906099977556e-06, + "loss": 1.8069, + "step": 2564 + }, + { + "epoch": 1.578340127682486, + "grad_norm": 6.328923225402832, + "learning_rate": 9.641265643650968e-06, + "loss": 1.9237, + "step": 2565 + }, + { + "epoch": 1.5789554649642334, + "grad_norm": 5.597191333770752, + "learning_rate": 9.63462534571073e-06, + "loss": 1.8325, + "step": 2566 + }, + { + "epoch": 1.5795708022459811, + "grad_norm": 6.042911529541016, + "learning_rate": 9.627985209088639e-06, + "loss": 1.772, + "step": 2567 + }, + { + "epoch": 1.5801861395277288, + "grad_norm": 6.542628765106201, + "learning_rate": 9.621345236716403e-06, + "loss": 1.7761, + "step": 2568 + }, + { + "epoch": 1.5808014768094762, + "grad_norm": 5.985723495483398, + "learning_rate": 9.614705431525665e-06, + "loss": 1.8907, + "step": 2569 + }, + { + "epoch": 1.5814168140912237, + "grad_norm": 6.8549652099609375, + "learning_rate": 9.608065796447999e-06, + "loss": 1.7959, + "step": 2570 + }, + { + "epoch": 1.5820321513729714, + "grad_norm": 6.127254962921143, + "learning_rate": 9.601426334414898e-06, + "loss": 1.8661, + "step": 2571 + }, + { + "epoch": 1.5826474886547188, + "grad_norm": 6.482248783111572, + "learning_rate": 9.594787048357776e-06, + "loss": 1.8727, + "step": 2572 + }, + { + "epoch": 1.5832628259364663, + "grad_norm": 5.490617752075195, + "learning_rate": 9.588147941207972e-06, + "loss": 1.9272, + "step": 2573 + }, + { + "epoch": 1.583878163218214, + "grad_norm": 9.131315231323242, + "learning_rate": 9.581509015896751e-06, + "loss": 2.064, + "step": 2574 + }, + { + "epoch": 1.5844935004999616, + "grad_norm": 5.9634904861450195, + "learning_rate": 9.574870275355295e-06, + "loss": 1.855, + "step": 2575 + }, + { + "epoch": 1.585108837781709, + "grad_norm": 7.0555500984191895, + "learning_rate": 9.568231722514697e-06, + "loss": 1.9703, + "step": 2576 + }, + { + "epoch": 1.5857241750634565, + "grad_norm": 8.088744163513184, + "learning_rate": 9.561593360305977e-06, + "loss": 1.9747, + "step": 2577 + }, + { + "epoch": 1.5863395123452042, + "grad_norm": 6.581573963165283, + "learning_rate": 9.554955191660069e-06, + "loss": 1.6392, + "step": 2578 + }, + { + "epoch": 1.5869548496269519, + "grad_norm": 9.640170097351074, + "learning_rate": 9.548317219507815e-06, + "loss": 1.8848, + "step": 2579 + }, + { + "epoch": 1.5875701869086993, + "grad_norm": 5.450416088104248, + "learning_rate": 9.541679446779972e-06, + "loss": 1.6966, + "step": 2580 + }, + { + "epoch": 1.5881855241904468, + "grad_norm": 6.114874839782715, + "learning_rate": 9.535041876407223e-06, + "loss": 1.8604, + "step": 2581 + }, + { + "epoch": 1.5888008614721945, + "grad_norm": 6.431033611297607, + "learning_rate": 9.52840451132014e-06, + "loss": 1.8605, + "step": 2582 + }, + { + "epoch": 1.5894161987539421, + "grad_norm": 6.65113639831543, + "learning_rate": 9.521767354449218e-06, + "loss": 1.8883, + "step": 2583 + }, + { + "epoch": 1.5900315360356896, + "grad_norm": 6.859419345855713, + "learning_rate": 9.51513040872486e-06, + "loss": 1.6733, + "step": 2584 + }, + { + "epoch": 1.590646873317437, + "grad_norm": 5.769123554229736, + "learning_rate": 9.50849367707737e-06, + "loss": 2.0758, + "step": 2585 + }, + { + "epoch": 1.5912622105991847, + "grad_norm": 6.488439083099365, + "learning_rate": 9.501857162436957e-06, + "loss": 1.9358, + "step": 2586 + }, + { + "epoch": 1.5918775478809324, + "grad_norm": 6.257470607757568, + "learning_rate": 9.49522086773374e-06, + "loss": 1.8477, + "step": 2587 + }, + { + "epoch": 1.5924928851626798, + "grad_norm": 6.5684967041015625, + "learning_rate": 9.488584795897738e-06, + "loss": 2.2391, + "step": 2588 + }, + { + "epoch": 1.5931082224444273, + "grad_norm": 6.3192267417907715, + "learning_rate": 9.481948949858876e-06, + "loss": 1.9736, + "step": 2589 + }, + { + "epoch": 1.593723559726175, + "grad_norm": 7.261989593505859, + "learning_rate": 9.475313332546964e-06, + "loss": 1.8771, + "step": 2590 + }, + { + "epoch": 1.5943388970079224, + "grad_norm": 6.299973487854004, + "learning_rate": 9.468677946891735e-06, + "loss": 2.157, + "step": 2591 + }, + { + "epoch": 1.5949542342896699, + "grad_norm": 6.15043306350708, + "learning_rate": 9.462042795822799e-06, + "loss": 2.0305, + "step": 2592 + }, + { + "epoch": 1.5955695715714175, + "grad_norm": 5.616262912750244, + "learning_rate": 9.455407882269674e-06, + "loss": 1.5919, + "step": 2593 + }, + { + "epoch": 1.5961849088531652, + "grad_norm": 5.627361297607422, + "learning_rate": 9.448773209161762e-06, + "loss": 2.0208, + "step": 2594 + }, + { + "epoch": 1.5968002461349127, + "grad_norm": 7.316168308258057, + "learning_rate": 9.442138779428376e-06, + "loss": 2.042, + "step": 2595 + }, + { + "epoch": 1.5974155834166601, + "grad_norm": 9.444948196411133, + "learning_rate": 9.435504595998706e-06, + "loss": 1.9163, + "step": 2596 + }, + { + "epoch": 1.5980309206984078, + "grad_norm": 6.547945976257324, + "learning_rate": 9.42887066180184e-06, + "loss": 1.9451, + "step": 2597 + }, + { + "epoch": 1.5986462579801555, + "grad_norm": 6.979162693023682, + "learning_rate": 9.422236979766755e-06, + "loss": 1.8515, + "step": 2598 + }, + { + "epoch": 1.599261595261903, + "grad_norm": 6.108771324157715, + "learning_rate": 9.41560355282232e-06, + "loss": 1.9115, + "step": 2599 + }, + { + "epoch": 1.5998769325436504, + "grad_norm": 6.277493953704834, + "learning_rate": 9.40897038389728e-06, + "loss": 1.7535, + "step": 2600 + }, + { + "epoch": 1.600492269825398, + "grad_norm": 6.287598133087158, + "learning_rate": 9.40233747592028e-06, + "loss": 1.7451, + "step": 2601 + }, + { + "epoch": 1.6011076071071457, + "grad_norm": 6.413321495056152, + "learning_rate": 9.395704831819844e-06, + "loss": 1.7185, + "step": 2602 + }, + { + "epoch": 1.6017229443888932, + "grad_norm": 5.861776351928711, + "learning_rate": 9.389072454524381e-06, + "loss": 1.9024, + "step": 2603 + }, + { + "epoch": 1.6023382816706406, + "grad_norm": 8.122701644897461, + "learning_rate": 9.382440346962169e-06, + "loss": 2.0436, + "step": 2604 + }, + { + "epoch": 1.6029536189523883, + "grad_norm": 7.303281307220459, + "learning_rate": 9.375808512061394e-06, + "loss": 1.9218, + "step": 2605 + }, + { + "epoch": 1.603568956234136, + "grad_norm": 6.871929168701172, + "learning_rate": 9.369176952750099e-06, + "loss": 1.8463, + "step": 2606 + }, + { + "epoch": 1.6041842935158834, + "grad_norm": 7.492008686065674, + "learning_rate": 9.362545671956213e-06, + "loss": 1.8979, + "step": 2607 + }, + { + "epoch": 1.6047996307976309, + "grad_norm": 6.909112453460693, + "learning_rate": 9.355914672607536e-06, + "loss": 1.9884, + "step": 2608 + }, + { + "epoch": 1.6054149680793786, + "grad_norm": 6.701021671295166, + "learning_rate": 9.34928395763176e-06, + "loss": 1.8383, + "step": 2609 + }, + { + "epoch": 1.6060303053611262, + "grad_norm": 6.865297794342041, + "learning_rate": 9.342653529956432e-06, + "loss": 2.0438, + "step": 2610 + }, + { + "epoch": 1.6066456426428735, + "grad_norm": 8.775309562683105, + "learning_rate": 9.33602339250898e-06, + "loss": 2.1071, + "step": 2611 + }, + { + "epoch": 1.6072609799246211, + "grad_norm": 6.115533828735352, + "learning_rate": 9.329393548216712e-06, + "loss": 1.6731, + "step": 2612 + }, + { + "epoch": 1.6078763172063688, + "grad_norm": 5.699953556060791, + "learning_rate": 9.322764000006798e-06, + "loss": 1.8297, + "step": 2613 + }, + { + "epoch": 1.6084916544881163, + "grad_norm": 7.2675981521606445, + "learning_rate": 9.316134750806273e-06, + "loss": 1.6013, + "step": 2614 + }, + { + "epoch": 1.6091069917698637, + "grad_norm": 6.648097515106201, + "learning_rate": 9.309505803542046e-06, + "loss": 1.7351, + "step": 2615 + }, + { + "epoch": 1.6097223290516114, + "grad_norm": 7.056661605834961, + "learning_rate": 9.302877161140897e-06, + "loss": 1.6371, + "step": 2616 + }, + { + "epoch": 1.610337666333359, + "grad_norm": 6.2276997566223145, + "learning_rate": 9.296248826529467e-06, + "loss": 1.9189, + "step": 2617 + }, + { + "epoch": 1.6109530036151065, + "grad_norm": 6.8423919677734375, + "learning_rate": 9.289620802634248e-06, + "loss": 1.9557, + "step": 2618 + }, + { + "epoch": 1.611568340896854, + "grad_norm": 6.470641136169434, + "learning_rate": 9.282993092381626e-06, + "loss": 1.9205, + "step": 2619 + }, + { + "epoch": 1.6121836781786016, + "grad_norm": 7.131633758544922, + "learning_rate": 9.276365698697818e-06, + "loss": 1.9601, + "step": 2620 + }, + { + "epoch": 1.6127990154603493, + "grad_norm": 6.526155471801758, + "learning_rate": 9.269738624508918e-06, + "loss": 1.813, + "step": 2621 + }, + { + "epoch": 1.6134143527420968, + "grad_norm": 6.20374870300293, + "learning_rate": 9.263111872740866e-06, + "loss": 1.6614, + "step": 2622 + }, + { + "epoch": 1.6140296900238442, + "grad_norm": 6.025674343109131, + "learning_rate": 9.256485446319482e-06, + "loss": 1.8624, + "step": 2623 + }, + { + "epoch": 1.614645027305592, + "grad_norm": 7.366125106811523, + "learning_rate": 9.249859348170417e-06, + "loss": 1.8528, + "step": 2624 + }, + { + "epoch": 1.6152603645873396, + "grad_norm": 7.40745210647583, + "learning_rate": 9.24323358121919e-06, + "loss": 2.1257, + "step": 2625 + }, + { + "epoch": 1.615875701869087, + "grad_norm": 5.493039131164551, + "learning_rate": 9.236608148391174e-06, + "loss": 1.788, + "step": 2626 + }, + { + "epoch": 1.6164910391508345, + "grad_norm": 6.840283393859863, + "learning_rate": 9.229983052611598e-06, + "loss": 1.9847, + "step": 2627 + }, + { + "epoch": 1.6171063764325821, + "grad_norm": 7.370938777923584, + "learning_rate": 9.223358296805526e-06, + "loss": 1.8422, + "step": 2628 + }, + { + "epoch": 1.6177217137143298, + "grad_norm": 6.002562999725342, + "learning_rate": 9.216733883897884e-06, + "loss": 1.8337, + "step": 2629 + }, + { + "epoch": 1.6183370509960773, + "grad_norm": 7.043920993804932, + "learning_rate": 9.210109816813452e-06, + "loss": 1.9091, + "step": 2630 + }, + { + "epoch": 1.6189523882778247, + "grad_norm": 6.803098201751709, + "learning_rate": 9.20348609847685e-06, + "loss": 1.8075, + "step": 2631 + }, + { + "epoch": 1.6195677255595724, + "grad_norm": 7.544402599334717, + "learning_rate": 9.196862731812532e-06, + "loss": 1.851, + "step": 2632 + }, + { + "epoch": 1.6201830628413199, + "grad_norm": 5.814980506896973, + "learning_rate": 9.190239719744828e-06, + "loss": 2.1029, + "step": 2633 + }, + { + "epoch": 1.6207984001230673, + "grad_norm": 6.54205322265625, + "learning_rate": 9.183617065197879e-06, + "loss": 2.0035, + "step": 2634 + }, + { + "epoch": 1.621413737404815, + "grad_norm": 6.108112335205078, + "learning_rate": 9.176994771095687e-06, + "loss": 1.8876, + "step": 2635 + }, + { + "epoch": 1.6220290746865627, + "grad_norm": 6.591145038604736, + "learning_rate": 9.170372840362086e-06, + "loss": 1.743, + "step": 2636 + }, + { + "epoch": 1.62264441196831, + "grad_norm": 7.4654645919799805, + "learning_rate": 9.163751275920762e-06, + "loss": 1.7045, + "step": 2637 + }, + { + "epoch": 1.6232597492500576, + "grad_norm": 6.360475540161133, + "learning_rate": 9.15713008069522e-06, + "loss": 1.971, + "step": 2638 + }, + { + "epoch": 1.6238750865318052, + "grad_norm": 7.677999019622803, + "learning_rate": 9.150509257608816e-06, + "loss": 1.9649, + "step": 2639 + }, + { + "epoch": 1.624490423813553, + "grad_norm": 7.23573637008667, + "learning_rate": 9.143888809584741e-06, + "loss": 1.8727, + "step": 2640 + }, + { + "epoch": 1.6251057610953004, + "grad_norm": 6.725593090057373, + "learning_rate": 9.137268739546019e-06, + "loss": 2.0012, + "step": 2641 + }, + { + "epoch": 1.6257210983770478, + "grad_norm": 6.816574573516846, + "learning_rate": 9.130649050415499e-06, + "loss": 2.0077, + "step": 2642 + }, + { + "epoch": 1.6263364356587955, + "grad_norm": 7.563459873199463, + "learning_rate": 9.124029745115872e-06, + "loss": 1.9243, + "step": 2643 + }, + { + "epoch": 1.6269517729405432, + "grad_norm": 6.6698737144470215, + "learning_rate": 9.117410826569655e-06, + "loss": 1.9183, + "step": 2644 + }, + { + "epoch": 1.6275671102222906, + "grad_norm": 5.749674320220947, + "learning_rate": 9.110792297699202e-06, + "loss": 1.8517, + "step": 2645 + }, + { + "epoch": 1.628182447504038, + "grad_norm": 7.938089847564697, + "learning_rate": 9.104174161426674e-06, + "loss": 1.8791, + "step": 2646 + }, + { + "epoch": 1.6287977847857857, + "grad_norm": 6.111490249633789, + "learning_rate": 9.097556420674089e-06, + "loss": 1.9523, + "step": 2647 + }, + { + "epoch": 1.6294131220675334, + "grad_norm": 7.948398113250732, + "learning_rate": 9.090939078363265e-06, + "loss": 1.92, + "step": 2648 + }, + { + "epoch": 1.6300284593492809, + "grad_norm": 11.017815589904785, + "learning_rate": 9.084322137415855e-06, + "loss": 1.957, + "step": 2649 + }, + { + "epoch": 1.6306437966310283, + "grad_norm": 5.675527572631836, + "learning_rate": 9.07770560075333e-06, + "loss": 1.8934, + "step": 2650 + }, + { + "epoch": 1.631259133912776, + "grad_norm": 6.380974292755127, + "learning_rate": 9.071089471296996e-06, + "loss": 1.5716, + "step": 2651 + }, + { + "epoch": 1.6318744711945234, + "grad_norm": 6.3483781814575195, + "learning_rate": 9.064473751967958e-06, + "loss": 1.9966, + "step": 2652 + }, + { + "epoch": 1.632489808476271, + "grad_norm": 6.650658130645752, + "learning_rate": 9.057858445687155e-06, + "loss": 1.7704, + "step": 2653 + }, + { + "epoch": 1.6331051457580186, + "grad_norm": 6.744790554046631, + "learning_rate": 9.05124355537534e-06, + "loss": 1.9219, + "step": 2654 + }, + { + "epoch": 1.6337204830397662, + "grad_norm": 6.176998138427734, + "learning_rate": 9.044629083953082e-06, + "loss": 2.0366, + "step": 2655 + }, + { + "epoch": 1.6343358203215137, + "grad_norm": 6.814183235168457, + "learning_rate": 9.038015034340765e-06, + "loss": 1.9722, + "step": 2656 + }, + { + "epoch": 1.6349511576032612, + "grad_norm": 6.375484466552734, + "learning_rate": 9.031401409458582e-06, + "loss": 1.8644, + "step": 2657 + }, + { + "epoch": 1.6355664948850088, + "grad_norm": 8.287281036376953, + "learning_rate": 9.024788212226549e-06, + "loss": 2.0235, + "step": 2658 + }, + { + "epoch": 1.6361818321667565, + "grad_norm": 7.028730869293213, + "learning_rate": 9.018175445564485e-06, + "loss": 1.7135, + "step": 2659 + }, + { + "epoch": 1.636797169448504, + "grad_norm": 6.242979526519775, + "learning_rate": 9.011563112392018e-06, + "loss": 1.8564, + "step": 2660 + }, + { + "epoch": 1.6374125067302514, + "grad_norm": 5.876221179962158, + "learning_rate": 9.004951215628597e-06, + "loss": 2.0112, + "step": 2661 + }, + { + "epoch": 1.638027844011999, + "grad_norm": 8.010984420776367, + "learning_rate": 8.99833975819346e-06, + "loss": 1.7027, + "step": 2662 + }, + { + "epoch": 1.6386431812937468, + "grad_norm": 7.109739780426025, + "learning_rate": 8.991728743005664e-06, + "loss": 1.8984, + "step": 2663 + }, + { + "epoch": 1.6392585185754942, + "grad_norm": 7.401561737060547, + "learning_rate": 8.985118172984063e-06, + "loss": 1.5914, + "step": 2664 + }, + { + "epoch": 1.6398738558572417, + "grad_norm": 7.014801502227783, + "learning_rate": 8.978508051047326e-06, + "loss": 1.6529, + "step": 2665 + }, + { + "epoch": 1.6404891931389893, + "grad_norm": 7.087568283081055, + "learning_rate": 8.97189838011391e-06, + "loss": 1.7335, + "step": 2666 + }, + { + "epoch": 1.641104530420737, + "grad_norm": 6.254213809967041, + "learning_rate": 8.96528916310208e-06, + "loss": 1.8085, + "step": 2667 + }, + { + "epoch": 1.6417198677024845, + "grad_norm": 6.34023904800415, + "learning_rate": 8.958680402929902e-06, + "loss": 1.783, + "step": 2668 + }, + { + "epoch": 1.642335204984232, + "grad_norm": 6.155745506286621, + "learning_rate": 8.95207210251524e-06, + "loss": 1.8919, + "step": 2669 + }, + { + "epoch": 1.6429505422659796, + "grad_norm": 6.8014817237854, + "learning_rate": 8.945464264775748e-06, + "loss": 1.9199, + "step": 2670 + }, + { + "epoch": 1.6435658795477273, + "grad_norm": 6.742271900177002, + "learning_rate": 8.938856892628881e-06, + "loss": 1.8562, + "step": 2671 + }, + { + "epoch": 1.6441812168294745, + "grad_norm": 6.063194751739502, + "learning_rate": 8.932249988991894e-06, + "loss": 1.7178, + "step": 2672 + }, + { + "epoch": 1.6447965541112222, + "grad_norm": 9.047929763793945, + "learning_rate": 8.925643556781828e-06, + "loss": 1.8916, + "step": 2673 + }, + { + "epoch": 1.6454118913929698, + "grad_norm": 5.689531326293945, + "learning_rate": 8.919037598915513e-06, + "loss": 2.015, + "step": 2674 + }, + { + "epoch": 1.6460272286747173, + "grad_norm": 7.006716251373291, + "learning_rate": 8.912432118309581e-06, + "loss": 1.8887, + "step": 2675 + }, + { + "epoch": 1.6466425659564647, + "grad_norm": 6.062211036682129, + "learning_rate": 8.905827117880441e-06, + "loss": 1.822, + "step": 2676 + }, + { + "epoch": 1.6472579032382124, + "grad_norm": 5.727951526641846, + "learning_rate": 8.899222600544298e-06, + "loss": 1.8905, + "step": 2677 + }, + { + "epoch": 1.64787324051996, + "grad_norm": 5.599997520446777, + "learning_rate": 8.892618569217134e-06, + "loss": 1.9056, + "step": 2678 + }, + { + "epoch": 1.6484885778017075, + "grad_norm": 8.758955001831055, + "learning_rate": 8.886015026814736e-06, + "loss": 1.8148, + "step": 2679 + }, + { + "epoch": 1.649103915083455, + "grad_norm": 6.266496658325195, + "learning_rate": 8.879411976252652e-06, + "loss": 1.9318, + "step": 2680 + }, + { + "epoch": 1.6497192523652027, + "grad_norm": 6.8166117668151855, + "learning_rate": 8.872809420446225e-06, + "loss": 1.9019, + "step": 2681 + }, + { + "epoch": 1.6503345896469503, + "grad_norm": 8.354315757751465, + "learning_rate": 8.866207362310583e-06, + "loss": 1.8375, + "step": 2682 + }, + { + "epoch": 1.6509499269286978, + "grad_norm": 6.9503302574157715, + "learning_rate": 8.859605804760626e-06, + "loss": 1.9898, + "step": 2683 + }, + { + "epoch": 1.6515652642104452, + "grad_norm": 6.418771743774414, + "learning_rate": 8.853004750711038e-06, + "loss": 1.907, + "step": 2684 + }, + { + "epoch": 1.652180601492193, + "grad_norm": 6.463764190673828, + "learning_rate": 8.846404203076273e-06, + "loss": 1.8103, + "step": 2685 + }, + { + "epoch": 1.6527959387739406, + "grad_norm": 7.4729180335998535, + "learning_rate": 8.839804164770574e-06, + "loss": 1.8188, + "step": 2686 + }, + { + "epoch": 1.653411276055688, + "grad_norm": 7.870051860809326, + "learning_rate": 8.833204638707948e-06, + "loss": 1.6781, + "step": 2687 + }, + { + "epoch": 1.6540266133374355, + "grad_norm": 7.16571569442749, + "learning_rate": 8.826605627802182e-06, + "loss": 2.0536, + "step": 2688 + }, + { + "epoch": 1.6546419506191832, + "grad_norm": 6.996913909912109, + "learning_rate": 8.820007134966837e-06, + "loss": 1.8205, + "step": 2689 + }, + { + "epoch": 1.6552572879009309, + "grad_norm": 7.064311981201172, + "learning_rate": 8.81340916311524e-06, + "loss": 1.8558, + "step": 2690 + }, + { + "epoch": 1.6558726251826783, + "grad_norm": 5.641462802886963, + "learning_rate": 8.806811715160485e-06, + "loss": 1.8133, + "step": 2691 + }, + { + "epoch": 1.6564879624644258, + "grad_norm": 6.525477886199951, + "learning_rate": 8.800214794015443e-06, + "loss": 1.7597, + "step": 2692 + }, + { + "epoch": 1.6571032997461734, + "grad_norm": 7.830978870391846, + "learning_rate": 8.793618402592753e-06, + "loss": 1.8506, + "step": 2693 + }, + { + "epoch": 1.6577186370279209, + "grad_norm": 5.91981315612793, + "learning_rate": 8.787022543804814e-06, + "loss": 1.9864, + "step": 2694 + }, + { + "epoch": 1.6583339743096683, + "grad_norm": 8.174846649169922, + "learning_rate": 8.780427220563788e-06, + "loss": 2.0442, + "step": 2695 + }, + { + "epoch": 1.658949311591416, + "grad_norm": 6.759416103363037, + "learning_rate": 8.77383243578161e-06, + "loss": 1.8764, + "step": 2696 + }, + { + "epoch": 1.6595646488731637, + "grad_norm": 8.166670799255371, + "learning_rate": 8.76723819236997e-06, + "loss": 1.841, + "step": 2697 + }, + { + "epoch": 1.6601799861549111, + "grad_norm": 4.820034503936768, + "learning_rate": 8.760644493240324e-06, + "loss": 1.9043, + "step": 2698 + }, + { + "epoch": 1.6607953234366586, + "grad_norm": 6.093724727630615, + "learning_rate": 8.754051341303875e-06, + "loss": 1.8951, + "step": 2699 + }, + { + "epoch": 1.6614106607184063, + "grad_norm": 6.528242111206055, + "learning_rate": 8.747458739471603e-06, + "loss": 1.5262, + "step": 2700 + }, + { + "epoch": 1.662025998000154, + "grad_norm": 6.6639862060546875, + "learning_rate": 8.740866690654234e-06, + "loss": 1.9772, + "step": 2701 + }, + { + "epoch": 1.6626413352819014, + "grad_norm": 6.44828987121582, + "learning_rate": 8.734275197762251e-06, + "loss": 1.891, + "step": 2702 + }, + { + "epoch": 1.6632566725636488, + "grad_norm": 8.291632652282715, + "learning_rate": 8.727684263705896e-06, + "loss": 1.9321, + "step": 2703 + }, + { + "epoch": 1.6638720098453965, + "grad_norm": 5.9389166831970215, + "learning_rate": 8.721093891395155e-06, + "loss": 1.9165, + "step": 2704 + }, + { + "epoch": 1.6644873471271442, + "grad_norm": 6.688567638397217, + "learning_rate": 8.714504083739775e-06, + "loss": 1.9735, + "step": 2705 + }, + { + "epoch": 1.6651026844088916, + "grad_norm": 6.5633344650268555, + "learning_rate": 8.707914843649247e-06, + "loss": 1.8069, + "step": 2706 + }, + { + "epoch": 1.665718021690639, + "grad_norm": 6.844727039337158, + "learning_rate": 8.701326174032821e-06, + "loss": 1.5115, + "step": 2707 + }, + { + "epoch": 1.6663333589723868, + "grad_norm": 7.403736114501953, + "learning_rate": 8.694738077799487e-06, + "loss": 1.9303, + "step": 2708 + }, + { + "epoch": 1.6669486962541344, + "grad_norm": 6.6052470207214355, + "learning_rate": 8.688150557857979e-06, + "loss": 1.8289, + "step": 2709 + }, + { + "epoch": 1.667564033535882, + "grad_norm": 8.163689613342285, + "learning_rate": 8.681563617116786e-06, + "loss": 1.6961, + "step": 2710 + }, + { + "epoch": 1.6681793708176293, + "grad_norm": 6.2227253913879395, + "learning_rate": 8.674977258484136e-06, + "loss": 1.9146, + "step": 2711 + }, + { + "epoch": 1.668794708099377, + "grad_norm": 5.757627487182617, + "learning_rate": 8.668391484868004e-06, + "loss": 1.8343, + "step": 2712 + }, + { + "epoch": 1.6694100453811245, + "grad_norm": 6.966018199920654, + "learning_rate": 8.661806299176095e-06, + "loss": 1.9441, + "step": 2713 + }, + { + "epoch": 1.670025382662872, + "grad_norm": 6.186436176300049, + "learning_rate": 8.655221704315868e-06, + "loss": 2.0233, + "step": 2714 + }, + { + "epoch": 1.6706407199446196, + "grad_norm": 6.021646976470947, + "learning_rate": 8.648637703194515e-06, + "loss": 1.9571, + "step": 2715 + }, + { + "epoch": 1.6712560572263673, + "grad_norm": 5.674489498138428, + "learning_rate": 8.642054298718966e-06, + "loss": 1.8121, + "step": 2716 + }, + { + "epoch": 1.6718713945081147, + "grad_norm": 6.70950984954834, + "learning_rate": 8.635471493795895e-06, + "loss": 2.0382, + "step": 2717 + }, + { + "epoch": 1.6724867317898622, + "grad_norm": 6.380222320556641, + "learning_rate": 8.628889291331696e-06, + "loss": 2.0779, + "step": 2718 + }, + { + "epoch": 1.6731020690716099, + "grad_norm": 5.892037868499756, + "learning_rate": 8.622307694232508e-06, + "loss": 2.047, + "step": 2719 + }, + { + "epoch": 1.6737174063533575, + "grad_norm": 7.049192428588867, + "learning_rate": 8.6157267054042e-06, + "loss": 1.9908, + "step": 2720 + }, + { + "epoch": 1.674332743635105, + "grad_norm": 6.032291889190674, + "learning_rate": 8.609146327752376e-06, + "loss": 1.9058, + "step": 2721 + }, + { + "epoch": 1.6749480809168524, + "grad_norm": 7.571635723114014, + "learning_rate": 8.602566564182366e-06, + "loss": 1.7216, + "step": 2722 + }, + { + "epoch": 1.6755634181986, + "grad_norm": 6.187950134277344, + "learning_rate": 8.595987417599225e-06, + "loss": 1.8228, + "step": 2723 + }, + { + "epoch": 1.6761787554803478, + "grad_norm": 6.269375324249268, + "learning_rate": 8.589408890907746e-06, + "loss": 1.8475, + "step": 2724 + }, + { + "epoch": 1.6767940927620952, + "grad_norm": 6.1907877922058105, + "learning_rate": 8.582830987012442e-06, + "loss": 1.704, + "step": 2725 + }, + { + "epoch": 1.6774094300438427, + "grad_norm": 6.879931449890137, + "learning_rate": 8.576253708817553e-06, + "loss": 1.8076, + "step": 2726 + }, + { + "epoch": 1.6780247673255904, + "grad_norm": 7.735037803649902, + "learning_rate": 8.569677059227033e-06, + "loss": 1.8923, + "step": 2727 + }, + { + "epoch": 1.678640104607338, + "grad_norm": 7.424560070037842, + "learning_rate": 8.563101041144575e-06, + "loss": 1.6331, + "step": 2728 + }, + { + "epoch": 1.6792554418890855, + "grad_norm": 7.998291015625, + "learning_rate": 8.556525657473586e-06, + "loss": 1.6869, + "step": 2729 + }, + { + "epoch": 1.679870779170833, + "grad_norm": 6.198606491088867, + "learning_rate": 8.549950911117186e-06, + "loss": 2.0878, + "step": 2730 + }, + { + "epoch": 1.6804861164525806, + "grad_norm": 6.016180515289307, + "learning_rate": 8.543376804978223e-06, + "loss": 1.725, + "step": 2731 + }, + { + "epoch": 1.6811014537343283, + "grad_norm": 7.663485527038574, + "learning_rate": 8.536803341959263e-06, + "loss": 1.9553, + "step": 2732 + }, + { + "epoch": 1.6817167910160757, + "grad_norm": 6.18256139755249, + "learning_rate": 8.530230524962578e-06, + "loss": 2.094, + "step": 2733 + }, + { + "epoch": 1.6823321282978232, + "grad_norm": 6.455704689025879, + "learning_rate": 8.52365835689016e-06, + "loss": 1.819, + "step": 2734 + }, + { + "epoch": 1.6829474655795709, + "grad_norm": 6.949250221252441, + "learning_rate": 8.517086840643722e-06, + "loss": 1.8836, + "step": 2735 + }, + { + "epoch": 1.6835628028613183, + "grad_norm": 5.383044242858887, + "learning_rate": 8.51051597912468e-06, + "loss": 1.9196, + "step": 2736 + }, + { + "epoch": 1.6841781401430658, + "grad_norm": 6.928992748260498, + "learning_rate": 8.503945775234157e-06, + "loss": 1.9954, + "step": 2737 + }, + { + "epoch": 1.6847934774248134, + "grad_norm": 7.107481956481934, + "learning_rate": 8.497376231873e-06, + "loss": 1.6628, + "step": 2738 + }, + { + "epoch": 1.6854088147065611, + "grad_norm": 7.228597164154053, + "learning_rate": 8.490807351941753e-06, + "loss": 2.068, + "step": 2739 + }, + { + "epoch": 1.6860241519883086, + "grad_norm": 6.589219570159912, + "learning_rate": 8.484239138340674e-06, + "loss": 1.886, + "step": 2740 + }, + { + "epoch": 1.686639489270056, + "grad_norm": 8.230399131774902, + "learning_rate": 8.477671593969717e-06, + "loss": 2.0219, + "step": 2741 + }, + { + "epoch": 1.6872548265518037, + "grad_norm": 6.186456203460693, + "learning_rate": 8.471104721728551e-06, + "loss": 1.8312, + "step": 2742 + }, + { + "epoch": 1.6878701638335514, + "grad_norm": 6.3249897956848145, + "learning_rate": 8.464538524516546e-06, + "loss": 1.8364, + "step": 2743 + }, + { + "epoch": 1.6884855011152988, + "grad_norm": 8.234139442443848, + "learning_rate": 8.457973005232765e-06, + "loss": 1.9339, + "step": 2744 + }, + { + "epoch": 1.6891008383970463, + "grad_norm": 6.274057388305664, + "learning_rate": 8.451408166775987e-06, + "loss": 1.87, + "step": 2745 + }, + { + "epoch": 1.689716175678794, + "grad_norm": 7.383279800415039, + "learning_rate": 8.444844012044681e-06, + "loss": 1.9682, + "step": 2746 + }, + { + "epoch": 1.6903315129605416, + "grad_norm": 6.508224964141846, + "learning_rate": 8.438280543937012e-06, + "loss": 1.9697, + "step": 2747 + }, + { + "epoch": 1.690946850242289, + "grad_norm": 7.916470050811768, + "learning_rate": 8.431717765350842e-06, + "loss": 1.9917, + "step": 2748 + }, + { + "epoch": 1.6915621875240365, + "grad_norm": 6.390326499938965, + "learning_rate": 8.425155679183738e-06, + "loss": 1.9248, + "step": 2749 + }, + { + "epoch": 1.6921775248057842, + "grad_norm": 6.341187953948975, + "learning_rate": 8.418594288332957e-06, + "loss": 1.932, + "step": 2750 + }, + { + "epoch": 1.6927928620875319, + "grad_norm": 7.852671146392822, + "learning_rate": 8.412033595695436e-06, + "loss": 2.0061, + "step": 2751 + }, + { + "epoch": 1.6934081993692793, + "grad_norm": 7.153406620025635, + "learning_rate": 8.405473604167826e-06, + "loss": 1.8539, + "step": 2752 + }, + { + "epoch": 1.6940235366510268, + "grad_norm": 6.402141571044922, + "learning_rate": 8.398914316646452e-06, + "loss": 1.8685, + "step": 2753 + }, + { + "epoch": 1.6946388739327745, + "grad_norm": 6.435663223266602, + "learning_rate": 8.392355736027338e-06, + "loss": 1.8664, + "step": 2754 + }, + { + "epoch": 1.695254211214522, + "grad_norm": 5.210092544555664, + "learning_rate": 8.385797865206178e-06, + "loss": 1.9205, + "step": 2755 + }, + { + "epoch": 1.6958695484962694, + "grad_norm": 5.166592121124268, + "learning_rate": 8.379240707078385e-06, + "loss": 1.7219, + "step": 2756 + }, + { + "epoch": 1.696484885778017, + "grad_norm": 5.850072860717773, + "learning_rate": 8.372684264539025e-06, + "loss": 1.9926, + "step": 2757 + }, + { + "epoch": 1.6971002230597647, + "grad_norm": 6.205296516418457, + "learning_rate": 8.366128540482861e-06, + "loss": 1.8208, + "step": 2758 + }, + { + "epoch": 1.6977155603415122, + "grad_norm": 8.064471244812012, + "learning_rate": 8.359573537804347e-06, + "loss": 1.6681, + "step": 2759 + }, + { + "epoch": 1.6983308976232596, + "grad_norm": 7.116405487060547, + "learning_rate": 8.353019259397609e-06, + "loss": 1.6615, + "step": 2760 + }, + { + "epoch": 1.6989462349050073, + "grad_norm": 6.922061443328857, + "learning_rate": 8.346465708156449e-06, + "loss": 1.8604, + "step": 2761 + }, + { + "epoch": 1.699561572186755, + "grad_norm": 5.894085884094238, + "learning_rate": 8.339912886974355e-06, + "loss": 1.9276, + "step": 2762 + }, + { + "epoch": 1.7001769094685024, + "grad_norm": 5.556724548339844, + "learning_rate": 8.333360798744496e-06, + "loss": 1.8017, + "step": 2763 + }, + { + "epoch": 1.7007922467502499, + "grad_norm": 8.093595504760742, + "learning_rate": 8.326809446359711e-06, + "loss": 1.9774, + "step": 2764 + }, + { + "epoch": 1.7014075840319975, + "grad_norm": 5.481853008270264, + "learning_rate": 8.320258832712513e-06, + "loss": 1.9324, + "step": 2765 + }, + { + "epoch": 1.7020229213137452, + "grad_norm": 6.876484394073486, + "learning_rate": 8.313708960695094e-06, + "loss": 1.9895, + "step": 2766 + }, + { + "epoch": 1.7026382585954927, + "grad_norm": 8.975723266601562, + "learning_rate": 8.307159833199317e-06, + "loss": 1.7848, + "step": 2767 + }, + { + "epoch": 1.7032535958772401, + "grad_norm": 5.864856719970703, + "learning_rate": 8.300611453116714e-06, + "loss": 1.9563, + "step": 2768 + }, + { + "epoch": 1.7038689331589878, + "grad_norm": 4.695316791534424, + "learning_rate": 8.294063823338486e-06, + "loss": 1.8241, + "step": 2769 + }, + { + "epoch": 1.7044842704407355, + "grad_norm": 5.905238151550293, + "learning_rate": 8.287516946755514e-06, + "loss": 2.0614, + "step": 2770 + }, + { + "epoch": 1.705099607722483, + "grad_norm": 7.026803493499756, + "learning_rate": 8.28097082625833e-06, + "loss": 1.7822, + "step": 2771 + }, + { + "epoch": 1.7057149450042304, + "grad_norm": 5.908527374267578, + "learning_rate": 8.274425464737139e-06, + "loss": 1.8078, + "step": 2772 + }, + { + "epoch": 1.706330282285978, + "grad_norm": 5.3596720695495605, + "learning_rate": 8.26788086508182e-06, + "loss": 1.8711, + "step": 2773 + }, + { + "epoch": 1.7069456195677257, + "grad_norm": 5.909498691558838, + "learning_rate": 8.261337030181901e-06, + "loss": 1.7718, + "step": 2774 + }, + { + "epoch": 1.707560956849473, + "grad_norm": 6.461374759674072, + "learning_rate": 8.254793962926579e-06, + "loss": 1.7976, + "step": 2775 + }, + { + "epoch": 1.7081762941312206, + "grad_norm": 6.641840934753418, + "learning_rate": 8.248251666204711e-06, + "loss": 1.9104, + "step": 2776 + }, + { + "epoch": 1.7087916314129683, + "grad_norm": 6.307281494140625, + "learning_rate": 8.241710142904821e-06, + "loss": 1.907, + "step": 2777 + }, + { + "epoch": 1.7094069686947158, + "grad_norm": 6.3059468269348145, + "learning_rate": 8.23516939591508e-06, + "loss": 1.8846, + "step": 2778 + }, + { + "epoch": 1.7100223059764632, + "grad_norm": 6.000069618225098, + "learning_rate": 8.228629428123319e-06, + "loss": 1.7496, + "step": 2779 + }, + { + "epoch": 1.7106376432582109, + "grad_norm": 5.8398566246032715, + "learning_rate": 8.222090242417036e-06, + "loss": 1.8822, + "step": 2780 + }, + { + "epoch": 1.7112529805399586, + "grad_norm": 5.9609832763671875, + "learning_rate": 8.215551841683369e-06, + "loss": 1.9132, + "step": 2781 + }, + { + "epoch": 1.711868317821706, + "grad_norm": 14.550946235656738, + "learning_rate": 8.209014228809117e-06, + "loss": 2.0031, + "step": 2782 + }, + { + "epoch": 1.7124836551034535, + "grad_norm": 6.9422149658203125, + "learning_rate": 8.202477406680725e-06, + "loss": 1.8069, + "step": 2783 + }, + { + "epoch": 1.7130989923852011, + "grad_norm": 7.9280548095703125, + "learning_rate": 8.195941378184303e-06, + "loss": 2.0022, + "step": 2784 + }, + { + "epoch": 1.7137143296669488, + "grad_norm": 5.560222625732422, + "learning_rate": 8.189406146205594e-06, + "loss": 2.0714, + "step": 2785 + }, + { + "epoch": 1.7143296669486963, + "grad_norm": 5.939920902252197, + "learning_rate": 8.182871713629998e-06, + "loss": 1.8882, + "step": 2786 + }, + { + "epoch": 1.7149450042304437, + "grad_norm": 5.825139999389648, + "learning_rate": 8.17633808334256e-06, + "loss": 1.905, + "step": 2787 + }, + { + "epoch": 1.7155603415121914, + "grad_norm": 6.989468574523926, + "learning_rate": 8.169805258227978e-06, + "loss": 1.9201, + "step": 2788 + }, + { + "epoch": 1.716175678793939, + "grad_norm": 6.3336873054504395, + "learning_rate": 8.16327324117058e-06, + "loss": 1.6946, + "step": 2789 + }, + { + "epoch": 1.7167910160756865, + "grad_norm": 6.720188617706299, + "learning_rate": 8.156742035054343e-06, + "loss": 1.9699, + "step": 2790 + }, + { + "epoch": 1.717406353357434, + "grad_norm": 6.590327739715576, + "learning_rate": 8.150211642762895e-06, + "loss": 1.7764, + "step": 2791 + }, + { + "epoch": 1.7180216906391816, + "grad_norm": 8.003299713134766, + "learning_rate": 8.143682067179498e-06, + "loss": 1.9427, + "step": 2792 + }, + { + "epoch": 1.7186370279209293, + "grad_norm": 6.104455947875977, + "learning_rate": 8.137153311187044e-06, + "loss": 1.8001, + "step": 2793 + }, + { + "epoch": 1.7192523652026768, + "grad_norm": 6.596056938171387, + "learning_rate": 8.130625377668084e-06, + "loss": 1.7597, + "step": 2794 + }, + { + "epoch": 1.7198677024844242, + "grad_norm": 5.561343669891357, + "learning_rate": 8.124098269504786e-06, + "loss": 1.8288, + "step": 2795 + }, + { + "epoch": 1.720483039766172, + "grad_norm": 6.195290565490723, + "learning_rate": 8.117571989578969e-06, + "loss": 1.7681, + "step": 2796 + }, + { + "epoch": 1.7210983770479193, + "grad_norm": 6.021491050720215, + "learning_rate": 8.111046540772064e-06, + "loss": 1.7692, + "step": 2797 + }, + { + "epoch": 1.7217137143296668, + "grad_norm": 5.646200656890869, + "learning_rate": 8.10452192596517e-06, + "loss": 1.8789, + "step": 2798 + }, + { + "epoch": 1.7223290516114145, + "grad_norm": 5.714975833892822, + "learning_rate": 8.097998148038986e-06, + "loss": 1.9182, + "step": 2799 + }, + { + "epoch": 1.7229443888931621, + "grad_norm": 7.301562309265137, + "learning_rate": 8.091475209873853e-06, + "loss": 1.8219, + "step": 2800 + }, + { + "epoch": 1.7235597261749096, + "grad_norm": 5.924217700958252, + "learning_rate": 8.084953114349745e-06, + "loss": 1.9052, + "step": 2801 + }, + { + "epoch": 1.724175063456657, + "grad_norm": 5.582614421844482, + "learning_rate": 8.078431864346265e-06, + "loss": 2.0421, + "step": 2802 + }, + { + "epoch": 1.7247904007384047, + "grad_norm": 6.881945610046387, + "learning_rate": 8.07191146274263e-06, + "loss": 2.2777, + "step": 2803 + }, + { + "epoch": 1.7254057380201524, + "grad_norm": 6.66041898727417, + "learning_rate": 8.06539191241769e-06, + "loss": 1.7578, + "step": 2804 + }, + { + "epoch": 1.7260210753018999, + "grad_norm": 7.515273571014404, + "learning_rate": 8.058873216249927e-06, + "loss": 1.9447, + "step": 2805 + }, + { + "epoch": 1.7266364125836473, + "grad_norm": 6.461984157562256, + "learning_rate": 8.052355377117438e-06, + "loss": 1.9702, + "step": 2806 + }, + { + "epoch": 1.727251749865395, + "grad_norm": 6.264524936676025, + "learning_rate": 8.045838397897932e-06, + "loss": 1.7191, + "step": 2807 + }, + { + "epoch": 1.7278670871471427, + "grad_norm": 6.996860980987549, + "learning_rate": 8.039322281468765e-06, + "loss": 1.9653, + "step": 2808 + }, + { + "epoch": 1.72848242442889, + "grad_norm": 6.5339813232421875, + "learning_rate": 8.032807030706882e-06, + "loss": 2.1127, + "step": 2809 + }, + { + "epoch": 1.7290977617106376, + "grad_norm": 6.825698375701904, + "learning_rate": 8.026292648488864e-06, + "loss": 1.9719, + "step": 2810 + }, + { + "epoch": 1.7297130989923852, + "grad_norm": 6.51813268661499, + "learning_rate": 8.019779137690906e-06, + "loss": 2.0888, + "step": 2811 + }, + { + "epoch": 1.730328436274133, + "grad_norm": 6.545801162719727, + "learning_rate": 8.013266501188816e-06, + "loss": 1.7149, + "step": 2812 + }, + { + "epoch": 1.7309437735558804, + "grad_norm": 6.836146831512451, + "learning_rate": 8.006754741858017e-06, + "loss": 1.9727, + "step": 2813 + }, + { + "epoch": 1.7315591108376278, + "grad_norm": 6.5350189208984375, + "learning_rate": 8.000243862573536e-06, + "loss": 1.7651, + "step": 2814 + }, + { + "epoch": 1.7321744481193755, + "grad_norm": 7.254685878753662, + "learning_rate": 7.993733866210029e-06, + "loss": 1.8864, + "step": 2815 + }, + { + "epoch": 1.732789785401123, + "grad_norm": 6.513028144836426, + "learning_rate": 7.98722475564175e-06, + "loss": 1.7008, + "step": 2816 + }, + { + "epoch": 1.7334051226828704, + "grad_norm": 7.063111305236816, + "learning_rate": 7.980716533742558e-06, + "loss": 1.6788, + "step": 2817 + }, + { + "epoch": 1.734020459964618, + "grad_norm": 8.998428344726562, + "learning_rate": 7.97420920338594e-06, + "loss": 1.9751, + "step": 2818 + }, + { + "epoch": 1.7346357972463657, + "grad_norm": 6.060565948486328, + "learning_rate": 7.967702767444963e-06, + "loss": 2.0313, + "step": 2819 + }, + { + "epoch": 1.7352511345281132, + "grad_norm": 7.996494770050049, + "learning_rate": 7.96119722879232e-06, + "loss": 2.0803, + "step": 2820 + }, + { + "epoch": 1.7358664718098606, + "grad_norm": 7.089321136474609, + "learning_rate": 7.954692590300288e-06, + "loss": 1.8707, + "step": 2821 + }, + { + "epoch": 1.7364818090916083, + "grad_norm": 5.931098461151123, + "learning_rate": 7.948188854840773e-06, + "loss": 2.0256, + "step": 2822 + }, + { + "epoch": 1.737097146373356, + "grad_norm": 6.15126371383667, + "learning_rate": 7.941686025285257e-06, + "loss": 1.839, + "step": 2823 + }, + { + "epoch": 1.7377124836551034, + "grad_norm": 6.996822834014893, + "learning_rate": 7.935184104504832e-06, + "loss": 1.8574, + "step": 2824 + }, + { + "epoch": 1.738327820936851, + "grad_norm": 6.814410209655762, + "learning_rate": 7.928683095370195e-06, + "loss": 1.8946, + "step": 2825 + }, + { + "epoch": 1.7389431582185986, + "grad_norm": 6.75179386138916, + "learning_rate": 7.922183000751635e-06, + "loss": 1.714, + "step": 2826 + }, + { + "epoch": 1.7395584955003462, + "grad_norm": 7.56758451461792, + "learning_rate": 7.915683823519031e-06, + "loss": 1.8691, + "step": 2827 + }, + { + "epoch": 1.7401738327820937, + "grad_norm": 6.604315757751465, + "learning_rate": 7.909185566541866e-06, + "loss": 1.7549, + "step": 2828 + }, + { + "epoch": 1.7407891700638412, + "grad_norm": 7.348072052001953, + "learning_rate": 7.902688232689212e-06, + "loss": 1.9617, + "step": 2829 + }, + { + "epoch": 1.7414045073455888, + "grad_norm": 9.329558372497559, + "learning_rate": 7.89619182482974e-06, + "loss": 1.7995, + "step": 2830 + }, + { + "epoch": 1.7420198446273365, + "grad_norm": 6.998362064361572, + "learning_rate": 7.889696345831696e-06, + "loss": 1.8258, + "step": 2831 + }, + { + "epoch": 1.742635181909084, + "grad_norm": 6.626535892486572, + "learning_rate": 7.88320179856294e-06, + "loss": 1.8596, + "step": 2832 + }, + { + "epoch": 1.7432505191908314, + "grad_norm": 5.636691093444824, + "learning_rate": 7.876708185890901e-06, + "loss": 1.7219, + "step": 2833 + }, + { + "epoch": 1.743865856472579, + "grad_norm": 5.685457706451416, + "learning_rate": 7.870215510682604e-06, + "loss": 1.8919, + "step": 2834 + }, + { + "epoch": 1.7444811937543268, + "grad_norm": 5.7673187255859375, + "learning_rate": 7.863723775804651e-06, + "loss": 1.8033, + "step": 2835 + }, + { + "epoch": 1.745096531036074, + "grad_norm": 6.47315788269043, + "learning_rate": 7.857232984123249e-06, + "loss": 1.9626, + "step": 2836 + }, + { + "epoch": 1.7457118683178217, + "grad_norm": 5.128846168518066, + "learning_rate": 7.850743138504165e-06, + "loss": 1.9037, + "step": 2837 + }, + { + "epoch": 1.7463272055995693, + "grad_norm": 6.738183975219727, + "learning_rate": 7.84425424181276e-06, + "loss": 1.9494, + "step": 2838 + }, + { + "epoch": 1.7469425428813168, + "grad_norm": 5.815114498138428, + "learning_rate": 7.83776629691398e-06, + "loss": 2.0084, + "step": 2839 + }, + { + "epoch": 1.7475578801630642, + "grad_norm": 5.449089050292969, + "learning_rate": 7.831279306672343e-06, + "loss": 1.8784, + "step": 2840 + }, + { + "epoch": 1.748173217444812, + "grad_norm": 6.166156768798828, + "learning_rate": 7.824793273951945e-06, + "loss": 1.7631, + "step": 2841 + }, + { + "epoch": 1.7487885547265596, + "grad_norm": 7.416117191314697, + "learning_rate": 7.818308201616465e-06, + "loss": 1.795, + "step": 2842 + }, + { + "epoch": 1.749403892008307, + "grad_norm": 6.911072254180908, + "learning_rate": 7.811824092529155e-06, + "loss": 1.8391, + "step": 2843 + }, + { + "epoch": 1.7500192292900545, + "grad_norm": 6.061740398406982, + "learning_rate": 7.805340949552846e-06, + "loss": 1.9863, + "step": 2844 + }, + { + "epoch": 1.7506345665718022, + "grad_norm": 5.9164958000183105, + "learning_rate": 7.798858775549927e-06, + "loss": 1.7144, + "step": 2845 + }, + { + "epoch": 1.7512499038535498, + "grad_norm": 7.023952484130859, + "learning_rate": 7.792377573382385e-06, + "loss": 1.7614, + "step": 2846 + }, + { + "epoch": 1.7518652411352973, + "grad_norm": 5.740278720855713, + "learning_rate": 7.785897345911755e-06, + "loss": 1.9483, + "step": 2847 + }, + { + "epoch": 1.7524805784170447, + "grad_norm": 5.436533451080322, + "learning_rate": 7.779418095999151e-06, + "loss": 2.0051, + "step": 2848 + }, + { + "epoch": 1.7530959156987924, + "grad_norm": 6.777318000793457, + "learning_rate": 7.772939826505254e-06, + "loss": 1.9252, + "step": 2849 + }, + { + "epoch": 1.75371125298054, + "grad_norm": 6.541757583618164, + "learning_rate": 7.76646254029032e-06, + "loss": 1.988, + "step": 2850 + }, + { + "epoch": 1.7543265902622875, + "grad_norm": 6.121025562286377, + "learning_rate": 7.759986240214155e-06, + "loss": 1.9384, + "step": 2851 + }, + { + "epoch": 1.754941927544035, + "grad_norm": 6.26165246963501, + "learning_rate": 7.753510929136137e-06, + "loss": 2.031, + "step": 2852 + }, + { + "epoch": 1.7555572648257827, + "grad_norm": 5.240309238433838, + "learning_rate": 7.747036609915217e-06, + "loss": 1.7741, + "step": 2853 + }, + { + "epoch": 1.7561726021075303, + "grad_norm": 6.34402322769165, + "learning_rate": 7.740563285409898e-06, + "loss": 1.9369, + "step": 2854 + }, + { + "epoch": 1.7567879393892778, + "grad_norm": 6.500885009765625, + "learning_rate": 7.734090958478241e-06, + "loss": 1.7265, + "step": 2855 + }, + { + "epoch": 1.7574032766710252, + "grad_norm": 6.147590160369873, + "learning_rate": 7.727619631977869e-06, + "loss": 1.9588, + "step": 2856 + }, + { + "epoch": 1.758018613952773, + "grad_norm": 5.684258937835693, + "learning_rate": 7.721149308765973e-06, + "loss": 1.9375, + "step": 2857 + }, + { + "epoch": 1.7586339512345204, + "grad_norm": 6.142180442810059, + "learning_rate": 7.714679991699292e-06, + "loss": 1.9267, + "step": 2858 + }, + { + "epoch": 1.7592492885162678, + "grad_norm": 5.93076753616333, + "learning_rate": 7.708211683634112e-06, + "loss": 2.0401, + "step": 2859 + }, + { + "epoch": 1.7598646257980155, + "grad_norm": 5.2672882080078125, + "learning_rate": 7.701744387426297e-06, + "loss": 1.8976, + "step": 2860 + }, + { + "epoch": 1.7604799630797632, + "grad_norm": 7.271423816680908, + "learning_rate": 7.695278105931244e-06, + "loss": 2.0151, + "step": 2861 + }, + { + "epoch": 1.7610953003615106, + "grad_norm": 7.848180294036865, + "learning_rate": 7.68881284200391e-06, + "loss": 1.8137, + "step": 2862 + }, + { + "epoch": 1.761710637643258, + "grad_norm": 7.766068935394287, + "learning_rate": 7.6823485984988e-06, + "loss": 1.8216, + "step": 2863 + }, + { + "epoch": 1.7623259749250058, + "grad_norm": 5.243047714233398, + "learning_rate": 7.675885378269977e-06, + "loss": 1.9563, + "step": 2864 + }, + { + "epoch": 1.7629413122067534, + "grad_norm": 5.002991199493408, + "learning_rate": 7.669423184171037e-06, + "loss": 1.8245, + "step": 2865 + }, + { + "epoch": 1.7635566494885009, + "grad_norm": 6.296370506286621, + "learning_rate": 7.662962019055133e-06, + "loss": 1.9435, + "step": 2866 + }, + { + "epoch": 1.7641719867702483, + "grad_norm": 6.357257843017578, + "learning_rate": 7.656501885774963e-06, + "loss": 1.8616, + "step": 2867 + }, + { + "epoch": 1.764787324051996, + "grad_norm": 5.564073085784912, + "learning_rate": 7.650042787182773e-06, + "loss": 1.9139, + "step": 2868 + }, + { + "epoch": 1.7654026613337437, + "grad_norm": 6.351842880249023, + "learning_rate": 7.64358472613034e-06, + "loss": 1.9136, + "step": 2869 + }, + { + "epoch": 1.7660179986154911, + "grad_norm": 7.4939470291137695, + "learning_rate": 7.637127705468992e-06, + "loss": 1.7815, + "step": 2870 + }, + { + "epoch": 1.7666333358972386, + "grad_norm": 8.543089866638184, + "learning_rate": 7.6306717280496e-06, + "loss": 2.0321, + "step": 2871 + }, + { + "epoch": 1.7672486731789863, + "grad_norm": 6.421932220458984, + "learning_rate": 7.624216796722567e-06, + "loss": 1.8147, + "step": 2872 + }, + { + "epoch": 1.767864010460734, + "grad_norm": 5.7502970695495605, + "learning_rate": 7.617762914337837e-06, + "loss": 1.9818, + "step": 2873 + }, + { + "epoch": 1.7684793477424814, + "grad_norm": 6.2833571434021, + "learning_rate": 7.611310083744897e-06, + "loss": 2.052, + "step": 2874 + }, + { + "epoch": 1.7690946850242288, + "grad_norm": 5.664073467254639, + "learning_rate": 7.604858307792759e-06, + "loss": 1.9172, + "step": 2875 + }, + { + "epoch": 1.7697100223059765, + "grad_norm": 6.894599914550781, + "learning_rate": 7.598407589329975e-06, + "loss": 1.8382, + "step": 2876 + }, + { + "epoch": 1.770325359587724, + "grad_norm": 5.621814250946045, + "learning_rate": 7.591957931204629e-06, + "loss": 1.8098, + "step": 2877 + }, + { + "epoch": 1.7709406968694714, + "grad_norm": 5.637104034423828, + "learning_rate": 7.5855093362643425e-06, + "loss": 2.032, + "step": 2878 + }, + { + "epoch": 1.771556034151219, + "grad_norm": 5.9342756271362305, + "learning_rate": 7.579061807356256e-06, + "loss": 1.9608, + "step": 2879 + }, + { + "epoch": 1.7721713714329668, + "grad_norm": 6.254754543304443, + "learning_rate": 7.572615347327048e-06, + "loss": 1.8701, + "step": 2880 + }, + { + "epoch": 1.7727867087147142, + "grad_norm": 6.351192951202393, + "learning_rate": 7.566169959022925e-06, + "loss": 1.7841, + "step": 2881 + }, + { + "epoch": 1.7734020459964617, + "grad_norm": 5.831969261169434, + "learning_rate": 7.55972564528962e-06, + "loss": 1.8471, + "step": 2882 + }, + { + "epoch": 1.7740173832782093, + "grad_norm": 5.229042053222656, + "learning_rate": 7.553282408972382e-06, + "loss": 1.7605, + "step": 2883 + }, + { + "epoch": 1.774632720559957, + "grad_norm": 6.845451831817627, + "learning_rate": 7.546840252915996e-06, + "loss": 1.7884, + "step": 2884 + }, + { + "epoch": 1.7752480578417045, + "grad_norm": 7.87675142288208, + "learning_rate": 7.540399179964769e-06, + "loss": 1.9415, + "step": 2885 + }, + { + "epoch": 1.775863395123452, + "grad_norm": 5.8365020751953125, + "learning_rate": 7.533959192962525e-06, + "loss": 1.8408, + "step": 2886 + }, + { + "epoch": 1.7764787324051996, + "grad_norm": 7.577380657196045, + "learning_rate": 7.527520294752607e-06, + "loss": 1.8157, + "step": 2887 + }, + { + "epoch": 1.7770940696869473, + "grad_norm": 5.470252990722656, + "learning_rate": 7.521082488177889e-06, + "loss": 1.9472, + "step": 2888 + }, + { + "epoch": 1.7777094069686947, + "grad_norm": 6.355104923248291, + "learning_rate": 7.514645776080747e-06, + "loss": 1.8797, + "step": 2889 + }, + { + "epoch": 1.7783247442504422, + "grad_norm": 7.003969192504883, + "learning_rate": 7.508210161303085e-06, + "loss": 1.9614, + "step": 2890 + }, + { + "epoch": 1.7789400815321899, + "grad_norm": 5.08639669418335, + "learning_rate": 7.501775646686316e-06, + "loss": 1.7705, + "step": 2891 + }, + { + "epoch": 1.7795554188139375, + "grad_norm": 6.7761454582214355, + "learning_rate": 7.495342235071377e-06, + "loss": 1.9811, + "step": 2892 + }, + { + "epoch": 1.780170756095685, + "grad_norm": 5.4904069900512695, + "learning_rate": 7.488909929298706e-06, + "loss": 1.9244, + "step": 2893 + }, + { + "epoch": 1.7807860933774324, + "grad_norm": 7.074924945831299, + "learning_rate": 7.482478732208256e-06, + "loss": 1.9926, + "step": 2894 + }, + { + "epoch": 1.78140143065918, + "grad_norm": 6.355985641479492, + "learning_rate": 7.476048646639498e-06, + "loss": 1.6788, + "step": 2895 + }, + { + "epoch": 1.7820167679409278, + "grad_norm": 7.054625511169434, + "learning_rate": 7.469619675431406e-06, + "loss": 1.9456, + "step": 2896 + }, + { + "epoch": 1.7826321052226752, + "grad_norm": 8.333847999572754, + "learning_rate": 7.463191821422462e-06, + "loss": 1.9524, + "step": 2897 + }, + { + "epoch": 1.7832474425044227, + "grad_norm": 6.874453067779541, + "learning_rate": 7.456765087450652e-06, + "loss": 1.9672, + "step": 2898 + }, + { + "epoch": 1.7838627797861704, + "grad_norm": 6.195107936859131, + "learning_rate": 7.450339476353475e-06, + "loss": 1.7569, + "step": 2899 + }, + { + "epoch": 1.7844781170679178, + "grad_norm": 6.9322829246521, + "learning_rate": 7.443914990967931e-06, + "loss": 1.772, + "step": 2900 + }, + { + "epoch": 1.7850934543496653, + "grad_norm": 6.3153533935546875, + "learning_rate": 7.437491634130518e-06, + "loss": 1.8013, + "step": 2901 + }, + { + "epoch": 1.785708791631413, + "grad_norm": 5.742201328277588, + "learning_rate": 7.431069408677248e-06, + "loss": 1.6644, + "step": 2902 + }, + { + "epoch": 1.7863241289131606, + "grad_norm": 6.328940391540527, + "learning_rate": 7.424648317443616e-06, + "loss": 1.9257, + "step": 2903 + }, + { + "epoch": 1.786939466194908, + "grad_norm": 5.51646614074707, + "learning_rate": 7.418228363264631e-06, + "loss": 1.9495, + "step": 2904 + }, + { + "epoch": 1.7875548034766555, + "grad_norm": 6.436853408813477, + "learning_rate": 7.411809548974792e-06, + "loss": 1.9144, + "step": 2905 + }, + { + "epoch": 1.7881701407584032, + "grad_norm": 6.839389324188232, + "learning_rate": 7.405391877408103e-06, + "loss": 1.8554, + "step": 2906 + }, + { + "epoch": 1.7887854780401509, + "grad_norm": 7.1280107498168945, + "learning_rate": 7.398975351398054e-06, + "loss": 1.8365, + "step": 2907 + }, + { + "epoch": 1.7894008153218983, + "grad_norm": 6.110474586486816, + "learning_rate": 7.3925599737776285e-06, + "loss": 2.0077, + "step": 2908 + }, + { + "epoch": 1.7900161526036458, + "grad_norm": 5.9164910316467285, + "learning_rate": 7.386145747379316e-06, + "loss": 1.8362, + "step": 2909 + }, + { + "epoch": 1.7906314898853934, + "grad_norm": 5.938102722167969, + "learning_rate": 7.379732675035085e-06, + "loss": 1.7751, + "step": 2910 + }, + { + "epoch": 1.7912468271671411, + "grad_norm": 7.600171089172363, + "learning_rate": 7.3733207595764015e-06, + "loss": 1.8035, + "step": 2911 + }, + { + "epoch": 1.7918621644488886, + "grad_norm": 5.603367328643799, + "learning_rate": 7.366910003834211e-06, + "loss": 1.9586, + "step": 2912 + }, + { + "epoch": 1.792477501730636, + "grad_norm": 6.496038436889648, + "learning_rate": 7.360500410638962e-06, + "loss": 2.0061, + "step": 2913 + }, + { + "epoch": 1.7930928390123837, + "grad_norm": 6.465391159057617, + "learning_rate": 7.354091982820578e-06, + "loss": 1.8332, + "step": 2914 + }, + { + "epoch": 1.7937081762941314, + "grad_norm": 7.731108665466309, + "learning_rate": 7.347684723208471e-06, + "loss": 1.8619, + "step": 2915 + }, + { + "epoch": 1.7943235135758788, + "grad_norm": 6.770203590393066, + "learning_rate": 7.3412786346315415e-06, + "loss": 1.9343, + "step": 2916 + }, + { + "epoch": 1.7949388508576263, + "grad_norm": 8.1485013961792, + "learning_rate": 7.334873719918165e-06, + "loss": 1.6558, + "step": 2917 + }, + { + "epoch": 1.795554188139374, + "grad_norm": 5.323476791381836, + "learning_rate": 7.328469981896207e-06, + "loss": 1.8871, + "step": 2918 + }, + { + "epoch": 1.7961695254211214, + "grad_norm": 5.825166702270508, + "learning_rate": 7.322067423393002e-06, + "loss": 2.0461, + "step": 2919 + }, + { + "epoch": 1.7967848627028689, + "grad_norm": 6.594723224639893, + "learning_rate": 7.3156660472353814e-06, + "loss": 1.9303, + "step": 2920 + }, + { + "epoch": 1.7974001999846165, + "grad_norm": 6.05696964263916, + "learning_rate": 7.309265856249641e-06, + "loss": 1.9618, + "step": 2921 + }, + { + "epoch": 1.7980155372663642, + "grad_norm": 7.404928207397461, + "learning_rate": 7.302866853261552e-06, + "loss": 2.0901, + "step": 2922 + }, + { + "epoch": 1.7986308745481117, + "grad_norm": 6.846864700317383, + "learning_rate": 7.2964690410963705e-06, + "loss": 1.9626, + "step": 2923 + }, + { + "epoch": 1.799246211829859, + "grad_norm": 5.477418422698975, + "learning_rate": 7.2900724225788225e-06, + "loss": 1.8018, + "step": 2924 + }, + { + "epoch": 1.7998615491116068, + "grad_norm": 6.206912994384766, + "learning_rate": 7.283677000533106e-06, + "loss": 1.8314, + "step": 2925 + }, + { + "epoch": 1.8004768863933545, + "grad_norm": 6.858780384063721, + "learning_rate": 7.277282777782886e-06, + "loss": 1.7446, + "step": 2926 + }, + { + "epoch": 1.801092223675102, + "grad_norm": 6.911221981048584, + "learning_rate": 7.2708897571513116e-06, + "loss": 1.8075, + "step": 2927 + }, + { + "epoch": 1.8017075609568494, + "grad_norm": 6.050870895385742, + "learning_rate": 7.264497941460988e-06, + "loss": 1.9284, + "step": 2928 + }, + { + "epoch": 1.802322898238597, + "grad_norm": 6.2010817527771, + "learning_rate": 7.258107333533994e-06, + "loss": 1.9283, + "step": 2929 + }, + { + "epoch": 1.8029382355203447, + "grad_norm": 6.8262128829956055, + "learning_rate": 7.251717936191879e-06, + "loss": 1.9377, + "step": 2930 + }, + { + "epoch": 1.8035535728020922, + "grad_norm": 7.113044738769531, + "learning_rate": 7.2453297522556475e-06, + "loss": 1.9229, + "step": 2931 + }, + { + "epoch": 1.8041689100838396, + "grad_norm": 7.426556587219238, + "learning_rate": 7.238942784545777e-06, + "loss": 2.011, + "step": 2932 + }, + { + "epoch": 1.8047842473655873, + "grad_norm": 6.750155925750732, + "learning_rate": 7.232557035882202e-06, + "loss": 1.8323, + "step": 2933 + }, + { + "epoch": 1.805399584647335, + "grad_norm": 6.239223003387451, + "learning_rate": 7.226172509084326e-06, + "loss": 1.991, + "step": 2934 + }, + { + "epoch": 1.8060149219290824, + "grad_norm": 5.294129848480225, + "learning_rate": 7.2197892069710105e-06, + "loss": 1.8269, + "step": 2935 + }, + { + "epoch": 1.8066302592108299, + "grad_norm": 5.653473377227783, + "learning_rate": 7.213407132360567e-06, + "loss": 1.9086, + "step": 2936 + }, + { + "epoch": 1.8072455964925775, + "grad_norm": 7.257890701293945, + "learning_rate": 7.20702628807078e-06, + "loss": 1.7861, + "step": 2937 + }, + { + "epoch": 1.8078609337743252, + "grad_norm": 7.149225234985352, + "learning_rate": 7.200646676918881e-06, + "loss": 1.8994, + "step": 2938 + }, + { + "epoch": 1.8084762710560724, + "grad_norm": 6.928760528564453, + "learning_rate": 7.194268301721563e-06, + "loss": 1.947, + "step": 2939 + }, + { + "epoch": 1.8090916083378201, + "grad_norm": 6.9028239250183105, + "learning_rate": 7.1878911652949626e-06, + "loss": 1.8903, + "step": 2940 + }, + { + "epoch": 1.8097069456195678, + "grad_norm": 5.582470893859863, + "learning_rate": 7.181515270454683e-06, + "loss": 1.9392, + "step": 2941 + }, + { + "epoch": 1.8103222829013152, + "grad_norm": 5.7642011642456055, + "learning_rate": 7.1751406200157714e-06, + "loss": 1.8778, + "step": 2942 + }, + { + "epoch": 1.8109376201830627, + "grad_norm": 6.612176418304443, + "learning_rate": 7.168767216792725e-06, + "loss": 2.0369, + "step": 2943 + }, + { + "epoch": 1.8115529574648104, + "grad_norm": 7.875355243682861, + "learning_rate": 7.162395063599497e-06, + "loss": 1.8903, + "step": 2944 + }, + { + "epoch": 1.812168294746558, + "grad_norm": 5.544853687286377, + "learning_rate": 7.1560241632494845e-06, + "loss": 1.9377, + "step": 2945 + }, + { + "epoch": 1.8127836320283055, + "grad_norm": 6.563533306121826, + "learning_rate": 7.149654518555526e-06, + "loss": 2.0516, + "step": 2946 + }, + { + "epoch": 1.813398969310053, + "grad_norm": 6.159337043762207, + "learning_rate": 7.143286132329912e-06, + "loss": 1.7961, + "step": 2947 + }, + { + "epoch": 1.8140143065918006, + "grad_norm": 5.944519996643066, + "learning_rate": 7.136919007384379e-06, + "loss": 2.1808, + "step": 2948 + }, + { + "epoch": 1.8146296438735483, + "grad_norm": 5.63344669342041, + "learning_rate": 7.130553146530105e-06, + "loss": 1.7858, + "step": 2949 + }, + { + "epoch": 1.8152449811552958, + "grad_norm": 6.646350860595703, + "learning_rate": 7.124188552577702e-06, + "loss": 1.8588, + "step": 2950 + }, + { + "epoch": 1.8158603184370432, + "grad_norm": 5.7756123542785645, + "learning_rate": 7.117825228337236e-06, + "loss": 1.7576, + "step": 2951 + }, + { + "epoch": 1.8164756557187909, + "grad_norm": 6.541412830352783, + "learning_rate": 7.111463176618205e-06, + "loss": 1.9243, + "step": 2952 + }, + { + "epoch": 1.8170909930005386, + "grad_norm": 6.126303672790527, + "learning_rate": 7.1051024002295444e-06, + "loss": 2.008, + "step": 2953 + }, + { + "epoch": 1.817706330282286, + "grad_norm": 5.749372959136963, + "learning_rate": 7.098742901979625e-06, + "loss": 1.8438, + "step": 2954 + }, + { + "epoch": 1.8183216675640335, + "grad_norm": 6.944143772125244, + "learning_rate": 7.092384684676263e-06, + "loss": 1.7579, + "step": 2955 + }, + { + "epoch": 1.8189370048457811, + "grad_norm": 6.798779010772705, + "learning_rate": 7.086027751126696e-06, + "loss": 2.0431, + "step": 2956 + }, + { + "epoch": 1.8195523421275288, + "grad_norm": 6.7004714012146, + "learning_rate": 7.079672104137604e-06, + "loss": 2.018, + "step": 2957 + }, + { + "epoch": 1.8201676794092763, + "grad_norm": 5.590427398681641, + "learning_rate": 7.073317746515097e-06, + "loss": 1.8633, + "step": 2958 + }, + { + "epoch": 1.8207830166910237, + "grad_norm": 5.79734468460083, + "learning_rate": 7.066964681064716e-06, + "loss": 1.9046, + "step": 2959 + }, + { + "epoch": 1.8213983539727714, + "grad_norm": 7.895987510681152, + "learning_rate": 7.0606129105914274e-06, + "loss": 1.8252, + "step": 2960 + }, + { + "epoch": 1.8220136912545188, + "grad_norm": 6.648369312286377, + "learning_rate": 7.054262437899628e-06, + "loss": 1.9112, + "step": 2961 + }, + { + "epoch": 1.8226290285362663, + "grad_norm": 6.495028972625732, + "learning_rate": 7.047913265793146e-06, + "loss": 1.8389, + "step": 2962 + }, + { + "epoch": 1.823244365818014, + "grad_norm": 5.8531270027160645, + "learning_rate": 7.041565397075232e-06, + "loss": 1.649, + "step": 2963 + }, + { + "epoch": 1.8238597030997616, + "grad_norm": 6.4414567947387695, + "learning_rate": 7.035218834548554e-06, + "loss": 1.9493, + "step": 2964 + }, + { + "epoch": 1.824475040381509, + "grad_norm": 7.604949951171875, + "learning_rate": 7.028873581015219e-06, + "loss": 1.6818, + "step": 2965 + }, + { + "epoch": 1.8250903776632565, + "grad_norm": 5.431918144226074, + "learning_rate": 7.022529639276743e-06, + "loss": 1.9378, + "step": 2966 + }, + { + "epoch": 1.8257057149450042, + "grad_norm": 6.310035228729248, + "learning_rate": 7.016187012134069e-06, + "loss": 1.729, + "step": 2967 + }, + { + "epoch": 1.826321052226752, + "grad_norm": 6.589941501617432, + "learning_rate": 7.009845702387551e-06, + "loss": 1.9296, + "step": 2968 + }, + { + "epoch": 1.8269363895084993, + "grad_norm": 5.50190544128418, + "learning_rate": 7.00350571283698e-06, + "loss": 1.8915, + "step": 2969 + }, + { + "epoch": 1.8275517267902468, + "grad_norm": 6.698362350463867, + "learning_rate": 6.9971670462815425e-06, + "loss": 1.6445, + "step": 2970 + }, + { + "epoch": 1.8281670640719945, + "grad_norm": 6.610686779022217, + "learning_rate": 6.990829705519853e-06, + "loss": 1.8003, + "step": 2971 + }, + { + "epoch": 1.8287824013537421, + "grad_norm": 7.308267116546631, + "learning_rate": 6.984493693349939e-06, + "loss": 2.0865, + "step": 2972 + }, + { + "epoch": 1.8293977386354896, + "grad_norm": 7.324178218841553, + "learning_rate": 6.978159012569243e-06, + "loss": 1.9035, + "step": 2973 + }, + { + "epoch": 1.830013075917237, + "grad_norm": 7.657386779785156, + "learning_rate": 6.971825665974614e-06, + "loss": 1.863, + "step": 2974 + }, + { + "epoch": 1.8306284131989847, + "grad_norm": 7.536708354949951, + "learning_rate": 6.965493656362314e-06, + "loss": 1.731, + "step": 2975 + }, + { + "epoch": 1.8312437504807324, + "grad_norm": 7.074484825134277, + "learning_rate": 6.959162986528019e-06, + "loss": 1.8449, + "step": 2976 + }, + { + "epoch": 1.8318590877624799, + "grad_norm": 5.903941631317139, + "learning_rate": 6.952833659266811e-06, + "loss": 1.9007, + "step": 2977 + }, + { + "epoch": 1.8324744250442273, + "grad_norm": 6.563668251037598, + "learning_rate": 6.946505677373172e-06, + "loss": 1.8161, + "step": 2978 + }, + { + "epoch": 1.833089762325975, + "grad_norm": 6.215497016906738, + "learning_rate": 6.940179043641005e-06, + "loss": 1.7924, + "step": 2979 + }, + { + "epoch": 1.8337050996077224, + "grad_norm": 6.137181758880615, + "learning_rate": 6.933853760863607e-06, + "loss": 1.932, + "step": 2980 + }, + { + "epoch": 1.8343204368894699, + "grad_norm": 6.038852691650391, + "learning_rate": 6.927529831833679e-06, + "loss": 2.0779, + "step": 2981 + }, + { + "epoch": 1.8349357741712176, + "grad_norm": 6.7155070304870605, + "learning_rate": 6.921207259343323e-06, + "loss": 1.7362, + "step": 2982 + }, + { + "epoch": 1.8355511114529652, + "grad_norm": 6.22803258895874, + "learning_rate": 6.914886046184055e-06, + "loss": 1.7345, + "step": 2983 + }, + { + "epoch": 1.8361664487347127, + "grad_norm": 6.977692604064941, + "learning_rate": 6.908566195146773e-06, + "loss": 1.7804, + "step": 2984 + }, + { + "epoch": 1.8367817860164601, + "grad_norm": 6.002998352050781, + "learning_rate": 6.902247709021783e-06, + "loss": 1.8873, + "step": 2985 + }, + { + "epoch": 1.8373971232982078, + "grad_norm": 6.377654075622559, + "learning_rate": 6.895930590598789e-06, + "loss": 1.8717, + "step": 2986 + }, + { + "epoch": 1.8380124605799555, + "grad_norm": 7.575545310974121, + "learning_rate": 6.889614842666892e-06, + "loss": 1.8835, + "step": 2987 + }, + { + "epoch": 1.838627797861703, + "grad_norm": 6.645220756530762, + "learning_rate": 6.88330046801458e-06, + "loss": 1.9337, + "step": 2988 + }, + { + "epoch": 1.8392431351434504, + "grad_norm": 6.47761869430542, + "learning_rate": 6.876987469429738e-06, + "loss": 1.8049, + "step": 2989 + }, + { + "epoch": 1.839858472425198, + "grad_norm": 6.398726940155029, + "learning_rate": 6.870675849699651e-06, + "loss": 1.8391, + "step": 2990 + }, + { + "epoch": 1.8404738097069457, + "grad_norm": 6.827997207641602, + "learning_rate": 6.86436561161099e-06, + "loss": 1.7713, + "step": 2991 + }, + { + "epoch": 1.8410891469886932, + "grad_norm": 6.339231014251709, + "learning_rate": 6.858056757949807e-06, + "loss": 2.0425, + "step": 2992 + }, + { + "epoch": 1.8417044842704406, + "grad_norm": 7.055133819580078, + "learning_rate": 6.851749291501561e-06, + "loss": 1.7326, + "step": 2993 + }, + { + "epoch": 1.8423198215521883, + "grad_norm": 6.158465385437012, + "learning_rate": 6.845443215051084e-06, + "loss": 1.8352, + "step": 2994 + }, + { + "epoch": 1.842935158833936, + "grad_norm": 5.75634241104126, + "learning_rate": 6.8391385313826035e-06, + "loss": 1.8718, + "step": 2995 + }, + { + "epoch": 1.8435504961156834, + "grad_norm": 6.531718730926514, + "learning_rate": 6.832835243279717e-06, + "loss": 1.8747, + "step": 2996 + }, + { + "epoch": 1.844165833397431, + "grad_norm": 5.874340534210205, + "learning_rate": 6.826533353525427e-06, + "loss": 1.9316, + "step": 2997 + }, + { + "epoch": 1.8447811706791786, + "grad_norm": 7.817565441131592, + "learning_rate": 6.8202328649021055e-06, + "loss": 1.8303, + "step": 2998 + }, + { + "epoch": 1.8453965079609262, + "grad_norm": 6.780702590942383, + "learning_rate": 6.813933780191504e-06, + "loss": 1.7909, + "step": 2999 + }, + { + "epoch": 1.8460118452426735, + "grad_norm": 5.794909477233887, + "learning_rate": 6.807636102174764e-06, + "loss": 1.9319, + "step": 3000 + }, + { + "epoch": 1.8466271825244212, + "grad_norm": 6.415708541870117, + "learning_rate": 6.801339833632401e-06, + "loss": 1.7709, + "step": 3001 + }, + { + "epoch": 1.8472425198061688, + "grad_norm": 6.912281513214111, + "learning_rate": 6.795044977344303e-06, + "loss": 1.7925, + "step": 3002 + }, + { + "epoch": 1.8478578570879163, + "grad_norm": 5.932978630065918, + "learning_rate": 6.788751536089739e-06, + "loss": 1.8128, + "step": 3003 + }, + { + "epoch": 1.8484731943696637, + "grad_norm": 5.987113952636719, + "learning_rate": 6.782459512647358e-06, + "loss": 2.0326, + "step": 3004 + }, + { + "epoch": 1.8490885316514114, + "grad_norm": 6.89380407333374, + "learning_rate": 6.776168909795178e-06, + "loss": 1.7238, + "step": 3005 + }, + { + "epoch": 1.849703868933159, + "grad_norm": 5.33206844329834, + "learning_rate": 6.769879730310583e-06, + "loss": 2.0167, + "step": 3006 + }, + { + "epoch": 1.8503192062149065, + "grad_norm": 6.0620880126953125, + "learning_rate": 6.763591976970347e-06, + "loss": 1.8344, + "step": 3007 + }, + { + "epoch": 1.850934543496654, + "grad_norm": 5.725876808166504, + "learning_rate": 6.757305652550596e-06, + "loss": 1.8765, + "step": 3008 + }, + { + "epoch": 1.8515498807784017, + "grad_norm": 7.886386871337891, + "learning_rate": 6.751020759826836e-06, + "loss": 1.619, + "step": 3009 + }, + { + "epoch": 1.8521652180601493, + "grad_norm": 6.038943290710449, + "learning_rate": 6.744737301573929e-06, + "loss": 1.9847, + "step": 3010 + }, + { + "epoch": 1.8527805553418968, + "grad_norm": 5.862661361694336, + "learning_rate": 6.738455280566124e-06, + "loss": 1.9833, + "step": 3011 + }, + { + "epoch": 1.8533958926236442, + "grad_norm": 7.327219486236572, + "learning_rate": 6.7321746995770144e-06, + "loss": 1.9184, + "step": 3012 + }, + { + "epoch": 1.854011229905392, + "grad_norm": 7.170162200927734, + "learning_rate": 6.725895561379567e-06, + "loss": 1.8577, + "step": 3013 + }, + { + "epoch": 1.8546265671871396, + "grad_norm": 6.522434711456299, + "learning_rate": 6.719617868746116e-06, + "loss": 1.8517, + "step": 3014 + }, + { + "epoch": 1.855241904468887, + "grad_norm": 6.228058815002441, + "learning_rate": 6.713341624448351e-06, + "loss": 1.8161, + "step": 3015 + }, + { + "epoch": 1.8558572417506345, + "grad_norm": 6.874424934387207, + "learning_rate": 6.70706683125732e-06, + "loss": 1.8731, + "step": 3016 + }, + { + "epoch": 1.8564725790323822, + "grad_norm": 6.8899970054626465, + "learning_rate": 6.700793491943436e-06, + "loss": 1.9166, + "step": 3017 + }, + { + "epoch": 1.8570879163141298, + "grad_norm": 7.171206474304199, + "learning_rate": 6.694521609276471e-06, + "loss": 1.8814, + "step": 3018 + }, + { + "epoch": 1.8577032535958773, + "grad_norm": 6.5530805587768555, + "learning_rate": 6.6882511860255505e-06, + "loss": 1.8199, + "step": 3019 + }, + { + "epoch": 1.8583185908776247, + "grad_norm": 7.899121284484863, + "learning_rate": 6.681982224959149e-06, + "loss": 1.8149, + "step": 3020 + }, + { + "epoch": 1.8589339281593724, + "grad_norm": 6.447272300720215, + "learning_rate": 6.675714728845114e-06, + "loss": 2.0209, + "step": 3021 + }, + { + "epoch": 1.8595492654411199, + "grad_norm": 6.433976173400879, + "learning_rate": 6.669448700450627e-06, + "loss": 1.7906, + "step": 3022 + }, + { + "epoch": 1.8601646027228673, + "grad_norm": 6.0615153312683105, + "learning_rate": 6.663184142542233e-06, + "loss": 1.9739, + "step": 3023 + }, + { + "epoch": 1.860779940004615, + "grad_norm": 5.883580684661865, + "learning_rate": 6.656921057885815e-06, + "loss": 1.9118, + "step": 3024 + }, + { + "epoch": 1.8613952772863627, + "grad_norm": 6.911752700805664, + "learning_rate": 6.65065944924663e-06, + "loss": 1.8967, + "step": 3025 + }, + { + "epoch": 1.8620106145681101, + "grad_norm": 7.694616317749023, + "learning_rate": 6.6443993193892566e-06, + "loss": 1.9312, + "step": 3026 + }, + { + "epoch": 1.8626259518498576, + "grad_norm": 8.674033164978027, + "learning_rate": 6.638140671077633e-06, + "loss": 1.9474, + "step": 3027 + }, + { + "epoch": 1.8632412891316052, + "grad_norm": 7.660486221313477, + "learning_rate": 6.631883507075045e-06, + "loss": 1.9517, + "step": 3028 + }, + { + "epoch": 1.863856626413353, + "grad_norm": 5.57664680480957, + "learning_rate": 6.625627830144121e-06, + "loss": 1.8605, + "step": 3029 + }, + { + "epoch": 1.8644719636951004, + "grad_norm": 6.106961250305176, + "learning_rate": 6.619373643046829e-06, + "loss": 1.8195, + "step": 3030 + }, + { + "epoch": 1.8650873009768478, + "grad_norm": 6.04787540435791, + "learning_rate": 6.613120948544482e-06, + "loss": 1.8551, + "step": 3031 + }, + { + "epoch": 1.8657026382585955, + "grad_norm": 6.625510215759277, + "learning_rate": 6.606869749397735e-06, + "loss": 1.741, + "step": 3032 + }, + { + "epoch": 1.8663179755403432, + "grad_norm": 5.028156280517578, + "learning_rate": 6.600620048366586e-06, + "loss": 1.6284, + "step": 3033 + }, + { + "epoch": 1.8669333128220906, + "grad_norm": 5.985222816467285, + "learning_rate": 6.594371848210357e-06, + "loss": 2.0336, + "step": 3034 + }, + { + "epoch": 1.867548650103838, + "grad_norm": 5.814454555511475, + "learning_rate": 6.58812515168773e-06, + "loss": 1.9282, + "step": 3035 + }, + { + "epoch": 1.8681639873855858, + "grad_norm": 6.258947849273682, + "learning_rate": 6.581879961556703e-06, + "loss": 2.1352, + "step": 3036 + }, + { + "epoch": 1.8687793246673334, + "grad_norm": 5.446627140045166, + "learning_rate": 6.57563628057462e-06, + "loss": 1.8531, + "step": 3037 + }, + { + "epoch": 1.8693946619490809, + "grad_norm": 6.669717311859131, + "learning_rate": 6.569394111498153e-06, + "loss": 1.9455, + "step": 3038 + }, + { + "epoch": 1.8700099992308283, + "grad_norm": 6.487255096435547, + "learning_rate": 6.563153457083315e-06, + "loss": 1.9994, + "step": 3039 + }, + { + "epoch": 1.870625336512576, + "grad_norm": 6.578310012817383, + "learning_rate": 6.556914320085439e-06, + "loss": 1.884, + "step": 3040 + }, + { + "epoch": 1.8712406737943235, + "grad_norm": 8.038909912109375, + "learning_rate": 6.550676703259193e-06, + "loss": 1.9004, + "step": 3041 + }, + { + "epoch": 1.871856011076071, + "grad_norm": 7.1881232261657715, + "learning_rate": 6.544440609358575e-06, + "loss": 2.1095, + "step": 3042 + }, + { + "epoch": 1.8724713483578186, + "grad_norm": 5.708949089050293, + "learning_rate": 6.538206041136915e-06, + "loss": 2.0466, + "step": 3043 + }, + { + "epoch": 1.8730866856395663, + "grad_norm": 7.16619348526001, + "learning_rate": 6.531973001346857e-06, + "loss": 1.7693, + "step": 3044 + }, + { + "epoch": 1.8737020229213137, + "grad_norm": 7.231443881988525, + "learning_rate": 6.525741492740378e-06, + "loss": 1.8822, + "step": 3045 + }, + { + "epoch": 1.8743173602030612, + "grad_norm": 6.462650299072266, + "learning_rate": 6.519511518068782e-06, + "loss": 1.9308, + "step": 3046 + }, + { + "epoch": 1.8749326974848088, + "grad_norm": 7.18596076965332, + "learning_rate": 6.513283080082692e-06, + "loss": 1.8653, + "step": 3047 + }, + { + "epoch": 1.8755480347665565, + "grad_norm": 5.869198322296143, + "learning_rate": 6.507056181532045e-06, + "loss": 1.8232, + "step": 3048 + }, + { + "epoch": 1.876163372048304, + "grad_norm": 6.877960205078125, + "learning_rate": 6.5008308251661175e-06, + "loss": 1.6025, + "step": 3049 + }, + { + "epoch": 1.8767787093300514, + "grad_norm": 5.9657793045043945, + "learning_rate": 6.494607013733486e-06, + "loss": 1.7739, + "step": 3050 + }, + { + "epoch": 1.877394046611799, + "grad_norm": 8.76340103149414, + "learning_rate": 6.488384749982054e-06, + "loss": 1.8333, + "step": 3051 + }, + { + "epoch": 1.8780093838935468, + "grad_norm": 6.43775749206543, + "learning_rate": 6.482164036659037e-06, + "loss": 1.7622, + "step": 3052 + }, + { + "epoch": 1.8786247211752942, + "grad_norm": 6.501559257507324, + "learning_rate": 6.4759448765109776e-06, + "loss": 1.9842, + "step": 3053 + }, + { + "epoch": 1.8792400584570417, + "grad_norm": 6.404965400695801, + "learning_rate": 6.469727272283717e-06, + "loss": 2.0881, + "step": 3054 + }, + { + "epoch": 1.8798553957387893, + "grad_norm": 6.940575122833252, + "learning_rate": 6.463511226722416e-06, + "loss": 1.9421, + "step": 3055 + }, + { + "epoch": 1.880470733020537, + "grad_norm": 5.95926570892334, + "learning_rate": 6.45729674257155e-06, + "loss": 1.7925, + "step": 3056 + }, + { + "epoch": 1.8810860703022845, + "grad_norm": 6.117151737213135, + "learning_rate": 6.451083822574907e-06, + "loss": 1.9695, + "step": 3057 + }, + { + "epoch": 1.881701407584032, + "grad_norm": 6.587475299835205, + "learning_rate": 6.444872469475572e-06, + "loss": 1.7272, + "step": 3058 + }, + { + "epoch": 1.8823167448657796, + "grad_norm": 6.024574279785156, + "learning_rate": 6.438662686015947e-06, + "loss": 1.9675, + "step": 3059 + }, + { + "epoch": 1.8829320821475273, + "grad_norm": 7.264667510986328, + "learning_rate": 6.432454474937747e-06, + "loss": 1.925, + "step": 3060 + }, + { + "epoch": 1.8835474194292745, + "grad_norm": 5.8699235916137695, + "learning_rate": 6.426247838981983e-06, + "loss": 1.7758, + "step": 3061 + }, + { + "epoch": 1.8841627567110222, + "grad_norm": 6.871630668640137, + "learning_rate": 6.420042780888969e-06, + "loss": 1.8721, + "step": 3062 + }, + { + "epoch": 1.8847780939927699, + "grad_norm": 6.5113019943237305, + "learning_rate": 6.413839303398337e-06, + "loss": 1.8837, + "step": 3063 + }, + { + "epoch": 1.8853934312745173, + "grad_norm": 7.433158874511719, + "learning_rate": 6.407637409249002e-06, + "loss": 1.7184, + "step": 3064 + }, + { + "epoch": 1.8860087685562648, + "grad_norm": 6.1806182861328125, + "learning_rate": 6.401437101179193e-06, + "loss": 1.7291, + "step": 3065 + }, + { + "epoch": 1.8866241058380124, + "grad_norm": 9.904901504516602, + "learning_rate": 6.395238381926432e-06, + "loss": 1.8698, + "step": 3066 + }, + { + "epoch": 1.88723944311976, + "grad_norm": 7.546746253967285, + "learning_rate": 6.389041254227548e-06, + "loss": 1.7834, + "step": 3067 + }, + { + "epoch": 1.8878547804015076, + "grad_norm": 7.875397682189941, + "learning_rate": 6.3828457208186564e-06, + "loss": 1.9021, + "step": 3068 + }, + { + "epoch": 1.888470117683255, + "grad_norm": 7.2924041748046875, + "learning_rate": 6.376651784435174e-06, + "loss": 2.0191, + "step": 3069 + }, + { + "epoch": 1.8890854549650027, + "grad_norm": 7.384467124938965, + "learning_rate": 6.370459447811815e-06, + "loss": 2.0152, + "step": 3070 + }, + { + "epoch": 1.8897007922467504, + "grad_norm": 5.6992082595825195, + "learning_rate": 6.364268713682586e-06, + "loss": 1.7292, + "step": 3071 + }, + { + "epoch": 1.8903161295284978, + "grad_norm": 5.422779560089111, + "learning_rate": 6.358079584780781e-06, + "loss": 1.7859, + "step": 3072 + }, + { + "epoch": 1.8909314668102453, + "grad_norm": 6.4749956130981445, + "learning_rate": 6.351892063838988e-06, + "loss": 1.9086, + "step": 3073 + }, + { + "epoch": 1.891546804091993, + "grad_norm": 5.305809020996094, + "learning_rate": 6.345706153589091e-06, + "loss": 1.8355, + "step": 3074 + }, + { + "epoch": 1.8921621413737406, + "grad_norm": 6.233950614929199, + "learning_rate": 6.339521856762254e-06, + "loss": 1.7989, + "step": 3075 + }, + { + "epoch": 1.892777478655488, + "grad_norm": 6.445683002471924, + "learning_rate": 6.333339176088934e-06, + "loss": 1.9714, + "step": 3076 + }, + { + "epoch": 1.8933928159372355, + "grad_norm": 5.507927894592285, + "learning_rate": 6.327158114298877e-06, + "loss": 1.9168, + "step": 3077 + }, + { + "epoch": 1.8940081532189832, + "grad_norm": 7.005361557006836, + "learning_rate": 6.320978674121103e-06, + "loss": 1.9648, + "step": 3078 + }, + { + "epoch": 1.8946234905007309, + "grad_norm": 6.684882164001465, + "learning_rate": 6.3148008582839284e-06, + "loss": 2.124, + "step": 3079 + }, + { + "epoch": 1.8952388277824783, + "grad_norm": 7.9882097244262695, + "learning_rate": 6.308624669514942e-06, + "loss": 1.6743, + "step": 3080 + }, + { + "epoch": 1.8958541650642258, + "grad_norm": 7.399175643920898, + "learning_rate": 6.3024501105410295e-06, + "loss": 1.9943, + "step": 3081 + }, + { + "epoch": 1.8964695023459734, + "grad_norm": 6.906001091003418, + "learning_rate": 6.2962771840883375e-06, + "loss": 1.8137, + "step": 3082 + }, + { + "epoch": 1.897084839627721, + "grad_norm": 5.16950798034668, + "learning_rate": 6.290105892882303e-06, + "loss": 1.9145, + "step": 3083 + }, + { + "epoch": 1.8977001769094684, + "grad_norm": 5.4872589111328125, + "learning_rate": 6.283936239647643e-06, + "loss": 1.9779, + "step": 3084 + }, + { + "epoch": 1.898315514191216, + "grad_norm": 6.357547760009766, + "learning_rate": 6.277768227108348e-06, + "loss": 1.9237, + "step": 3085 + }, + { + "epoch": 1.8989308514729637, + "grad_norm": 6.9478840827941895, + "learning_rate": 6.271601857987682e-06, + "loss": 1.8177, + "step": 3086 + }, + { + "epoch": 1.8995461887547112, + "grad_norm": 5.740734100341797, + "learning_rate": 6.265437135008181e-06, + "loss": 2.0014, + "step": 3087 + }, + { + "epoch": 1.9001615260364586, + "grad_norm": 5.886911869049072, + "learning_rate": 6.259274060891665e-06, + "loss": 1.8771, + "step": 3088 + }, + { + "epoch": 1.9007768633182063, + "grad_norm": 7.06456995010376, + "learning_rate": 6.253112638359218e-06, + "loss": 1.7439, + "step": 3089 + }, + { + "epoch": 1.901392200599954, + "grad_norm": 6.295661449432373, + "learning_rate": 6.2469528701311935e-06, + "loss": 1.8156, + "step": 3090 + }, + { + "epoch": 1.9020075378817014, + "grad_norm": 7.328492164611816, + "learning_rate": 6.240794758927222e-06, + "loss": 1.6171, + "step": 3091 + }, + { + "epoch": 1.9026228751634489, + "grad_norm": 6.041377544403076, + "learning_rate": 6.234638307466193e-06, + "loss": 2.0, + "step": 3092 + }, + { + "epoch": 1.9032382124451965, + "grad_norm": 6.3828125, + "learning_rate": 6.228483518466271e-06, + "loss": 1.8965, + "step": 3093 + }, + { + "epoch": 1.9038535497269442, + "grad_norm": 7.509955406188965, + "learning_rate": 6.22233039464488e-06, + "loss": 1.8379, + "step": 3094 + }, + { + "epoch": 1.9044688870086917, + "grad_norm": 8.741901397705078, + "learning_rate": 6.21617893871872e-06, + "loss": 2.0279, + "step": 3095 + }, + { + "epoch": 1.905084224290439, + "grad_norm": 6.221779823303223, + "learning_rate": 6.210029153403738e-06, + "loss": 1.9961, + "step": 3096 + }, + { + "epoch": 1.9056995615721868, + "grad_norm": 5.858999252319336, + "learning_rate": 6.203881041415154e-06, + "loss": 1.8917, + "step": 3097 + }, + { + "epoch": 1.9063148988539345, + "grad_norm": 5.034229278564453, + "learning_rate": 6.197734605467453e-06, + "loss": 1.7821, + "step": 3098 + }, + { + "epoch": 1.906930236135682, + "grad_norm": 7.297373294830322, + "learning_rate": 6.191589848274369e-06, + "loss": 1.6651, + "step": 3099 + }, + { + "epoch": 1.9075455734174294, + "grad_norm": 7.049715995788574, + "learning_rate": 6.185446772548905e-06, + "loss": 1.934, + "step": 3100 + }, + { + "epoch": 1.908160910699177, + "grad_norm": 5.964282512664795, + "learning_rate": 6.179305381003311e-06, + "loss": 2.0886, + "step": 3101 + }, + { + "epoch": 1.9087762479809245, + "grad_norm": 8.535961151123047, + "learning_rate": 6.173165676349103e-06, + "loss": 1.9735, + "step": 3102 + }, + { + "epoch": 1.909391585262672, + "grad_norm": 5.541299819946289, + "learning_rate": 6.167027661297049e-06, + "loss": 2.0402, + "step": 3103 + }, + { + "epoch": 1.9100069225444196, + "grad_norm": 6.954150676727295, + "learning_rate": 6.160891338557167e-06, + "loss": 1.9224, + "step": 3104 + }, + { + "epoch": 1.9106222598261673, + "grad_norm": 9.501497268676758, + "learning_rate": 6.154756710838739e-06, + "loss": 1.9981, + "step": 3105 + }, + { + "epoch": 1.9112375971079147, + "grad_norm": 5.724242687225342, + "learning_rate": 6.148623780850282e-06, + "loss": 1.6738, + "step": 3106 + }, + { + "epoch": 1.9118529343896622, + "grad_norm": 6.661984920501709, + "learning_rate": 6.142492551299577e-06, + "loss": 1.7702, + "step": 3107 + }, + { + "epoch": 1.9124682716714099, + "grad_norm": 6.571625709533691, + "learning_rate": 6.136363024893647e-06, + "loss": 1.8949, + "step": 3108 + }, + { + "epoch": 1.9130836089531575, + "grad_norm": 6.602062702178955, + "learning_rate": 6.13023520433877e-06, + "loss": 1.7566, + "step": 3109 + }, + { + "epoch": 1.913698946234905, + "grad_norm": 6.538501739501953, + "learning_rate": 6.124109092340465e-06, + "loss": 1.9172, + "step": 3110 + }, + { + "epoch": 1.9143142835166524, + "grad_norm": 5.672876834869385, + "learning_rate": 6.117984691603493e-06, + "loss": 1.9276, + "step": 3111 + }, + { + "epoch": 1.9149296207984001, + "grad_norm": 6.286431312561035, + "learning_rate": 6.1118620048318724e-06, + "loss": 1.7933, + "step": 3112 + }, + { + "epoch": 1.9155449580801478, + "grad_norm": 7.679755210876465, + "learning_rate": 6.1057410347288535e-06, + "loss": 2.1418, + "step": 3113 + }, + { + "epoch": 1.9161602953618952, + "grad_norm": 6.750072956085205, + "learning_rate": 6.099621783996934e-06, + "loss": 1.9488, + "step": 3114 + }, + { + "epoch": 1.9167756326436427, + "grad_norm": 6.306653022766113, + "learning_rate": 6.093504255337844e-06, + "loss": 1.8186, + "step": 3115 + }, + { + "epoch": 1.9173909699253904, + "grad_norm": 6.926363468170166, + "learning_rate": 6.087388451452567e-06, + "loss": 1.9135, + "step": 3116 + }, + { + "epoch": 1.918006307207138, + "grad_norm": 8.021592140197754, + "learning_rate": 6.081274375041317e-06, + "loss": 1.7752, + "step": 3117 + }, + { + "epoch": 1.9186216444888855, + "grad_norm": 6.030392646789551, + "learning_rate": 6.075162028803544e-06, + "loss": 1.6912, + "step": 3118 + }, + { + "epoch": 1.919236981770633, + "grad_norm": 7.353367805480957, + "learning_rate": 6.069051415437941e-06, + "loss": 1.9269, + "step": 3119 + }, + { + "epoch": 1.9198523190523806, + "grad_norm": 5.899839401245117, + "learning_rate": 6.062942537642427e-06, + "loss": 2.0167, + "step": 3120 + }, + { + "epoch": 1.9204676563341283, + "grad_norm": 7.830085754394531, + "learning_rate": 6.056835398114158e-06, + "loss": 1.8657, + "step": 3121 + }, + { + "epoch": 1.9210829936158758, + "grad_norm": 6.65626335144043, + "learning_rate": 6.050729999549525e-06, + "loss": 1.8612, + "step": 3122 + }, + { + "epoch": 1.9216983308976232, + "grad_norm": 5.651752948760986, + "learning_rate": 6.044626344644151e-06, + "loss": 1.8635, + "step": 3123 + }, + { + "epoch": 1.9223136681793709, + "grad_norm": 5.365637302398682, + "learning_rate": 6.038524436092887e-06, + "loss": 1.7744, + "step": 3124 + }, + { + "epoch": 1.9229290054611183, + "grad_norm": 6.00227689743042, + "learning_rate": 6.032424276589808e-06, + "loss": 2.0591, + "step": 3125 + }, + { + "epoch": 1.9235443427428658, + "grad_norm": 6.21249532699585, + "learning_rate": 6.026325868828228e-06, + "loss": 1.7129, + "step": 3126 + }, + { + "epoch": 1.9241596800246135, + "grad_norm": 6.049663543701172, + "learning_rate": 6.020229215500677e-06, + "loss": 2.0589, + "step": 3127 + }, + { + "epoch": 1.9247750173063611, + "grad_norm": 6.15496826171875, + "learning_rate": 6.014134319298921e-06, + "loss": 2.0064, + "step": 3128 + }, + { + "epoch": 1.9253903545881086, + "grad_norm": 7.0945611000061035, + "learning_rate": 6.008041182913933e-06, + "loss": 1.8118, + "step": 3129 + }, + { + "epoch": 1.926005691869856, + "grad_norm": 6.364123344421387, + "learning_rate": 6.001949809035929e-06, + "loss": 1.7518, + "step": 3130 + }, + { + "epoch": 1.9266210291516037, + "grad_norm": 6.506528854370117, + "learning_rate": 5.995860200354335e-06, + "loss": 1.96, + "step": 3131 + }, + { + "epoch": 1.9272363664333514, + "grad_norm": 6.372161388397217, + "learning_rate": 5.989772359557799e-06, + "loss": 1.8648, + "step": 3132 + }, + { + "epoch": 1.9278517037150988, + "grad_norm": 6.43768835067749, + "learning_rate": 5.983686289334194e-06, + "loss": 2.0037, + "step": 3133 + }, + { + "epoch": 1.9284670409968463, + "grad_norm": 6.023187160491943, + "learning_rate": 5.9776019923706055e-06, + "loss": 1.8181, + "step": 3134 + }, + { + "epoch": 1.929082378278594, + "grad_norm": 5.79160213470459, + "learning_rate": 5.971519471353335e-06, + "loss": 1.8671, + "step": 3135 + }, + { + "epoch": 1.9296977155603416, + "grad_norm": 6.67268705368042, + "learning_rate": 5.965438728967902e-06, + "loss": 1.973, + "step": 3136 + }, + { + "epoch": 1.930313052842089, + "grad_norm": 5.740410327911377, + "learning_rate": 5.959359767899048e-06, + "loss": 1.8587, + "step": 3137 + }, + { + "epoch": 1.9309283901238365, + "grad_norm": 6.612072944641113, + "learning_rate": 5.953282590830718e-06, + "loss": 1.8953, + "step": 3138 + }, + { + "epoch": 1.9315437274055842, + "grad_norm": 5.6946702003479, + "learning_rate": 5.9472072004460665e-06, + "loss": 1.8921, + "step": 3139 + }, + { + "epoch": 1.932159064687332, + "grad_norm": 6.166197776794434, + "learning_rate": 5.941133599427474e-06, + "loss": 1.886, + "step": 3140 + }, + { + "epoch": 1.9327744019690793, + "grad_norm": 7.392965316772461, + "learning_rate": 5.935061790456519e-06, + "loss": 1.8294, + "step": 3141 + }, + { + "epoch": 1.9333897392508268, + "grad_norm": 6.513240337371826, + "learning_rate": 5.928991776213994e-06, + "loss": 1.9275, + "step": 3142 + }, + { + "epoch": 1.9340050765325745, + "grad_norm": 8.336796760559082, + "learning_rate": 5.922923559379892e-06, + "loss": 2.0046, + "step": 3143 + }, + { + "epoch": 1.934620413814322, + "grad_norm": 6.678196907043457, + "learning_rate": 5.916857142633421e-06, + "loss": 1.8247, + "step": 3144 + }, + { + "epoch": 1.9352357510960694, + "grad_norm": 6.03043794631958, + "learning_rate": 5.910792528652992e-06, + "loss": 1.6613, + "step": 3145 + }, + { + "epoch": 1.935851088377817, + "grad_norm": 9.05323600769043, + "learning_rate": 5.904729720116216e-06, + "loss": 1.975, + "step": 3146 + }, + { + "epoch": 1.9364664256595647, + "grad_norm": 6.962547779083252, + "learning_rate": 5.898668719699914e-06, + "loss": 1.9807, + "step": 3147 + }, + { + "epoch": 1.9370817629413122, + "grad_norm": 5.6400980949401855, + "learning_rate": 5.892609530080102e-06, + "loss": 1.8127, + "step": 3148 + }, + { + "epoch": 1.9376971002230596, + "grad_norm": 6.610001087188721, + "learning_rate": 5.886552153931999e-06, + "loss": 1.8252, + "step": 3149 + }, + { + "epoch": 1.9383124375048073, + "grad_norm": 6.982892990112305, + "learning_rate": 5.880496593930021e-06, + "loss": 1.7494, + "step": 3150 + }, + { + "epoch": 1.938927774786555, + "grad_norm": 6.845014572143555, + "learning_rate": 5.874442852747788e-06, + "loss": 1.7984, + "step": 3151 + }, + { + "epoch": 1.9395431120683024, + "grad_norm": 7.862992286682129, + "learning_rate": 5.8683909330581146e-06, + "loss": 1.8391, + "step": 3152 + }, + { + "epoch": 1.9401584493500499, + "grad_norm": 7.033839702606201, + "learning_rate": 5.8623408375330035e-06, + "loss": 1.7274, + "step": 3153 + }, + { + "epoch": 1.9407737866317976, + "grad_norm": 6.814262866973877, + "learning_rate": 5.856292568843665e-06, + "loss": 2.0244, + "step": 3154 + }, + { + "epoch": 1.9413891239135452, + "grad_norm": 7.806066036224365, + "learning_rate": 5.850246129660494e-06, + "loss": 1.6518, + "step": 3155 + }, + { + "epoch": 1.9420044611952927, + "grad_norm": 5.833174228668213, + "learning_rate": 5.844201522653081e-06, + "loss": 1.8551, + "step": 3156 + }, + { + "epoch": 1.9426197984770401, + "grad_norm": 6.788995742797852, + "learning_rate": 5.8381587504902025e-06, + "loss": 1.9231, + "step": 3157 + }, + { + "epoch": 1.9432351357587878, + "grad_norm": 6.312173843383789, + "learning_rate": 5.832117815839833e-06, + "loss": 1.9101, + "step": 3158 + }, + { + "epoch": 1.9438504730405355, + "grad_norm": 6.278231143951416, + "learning_rate": 5.82607872136913e-06, + "loss": 1.9259, + "step": 3159 + }, + { + "epoch": 1.944465810322283, + "grad_norm": 6.448060035705566, + "learning_rate": 5.820041469744443e-06, + "loss": 1.9999, + "step": 3160 + }, + { + "epoch": 1.9450811476040304, + "grad_norm": 7.971956253051758, + "learning_rate": 5.8140060636313015e-06, + "loss": 1.8515, + "step": 3161 + }, + { + "epoch": 1.945696484885778, + "grad_norm": 6.568082809448242, + "learning_rate": 5.8079725056944245e-06, + "loss": 1.7639, + "step": 3162 + }, + { + "epoch": 1.9463118221675257, + "grad_norm": 8.435286521911621, + "learning_rate": 5.801940798597716e-06, + "loss": 1.9116, + "step": 3163 + }, + { + "epoch": 1.946927159449273, + "grad_norm": 6.31759786605835, + "learning_rate": 5.795910945004255e-06, + "loss": 1.8704, + "step": 3164 + }, + { + "epoch": 1.9475424967310206, + "grad_norm": 7.596998691558838, + "learning_rate": 5.789882947576321e-06, + "loss": 1.8358, + "step": 3165 + }, + { + "epoch": 1.9481578340127683, + "grad_norm": 6.3987908363342285, + "learning_rate": 5.783856808975349e-06, + "loss": 1.8412, + "step": 3166 + }, + { + "epoch": 1.9487731712945158, + "grad_norm": 5.999912738800049, + "learning_rate": 5.7778325318619665e-06, + "loss": 1.9298, + "step": 3167 + }, + { + "epoch": 1.9493885085762632, + "grad_norm": 5.255100727081299, + "learning_rate": 5.771810118895984e-06, + "loss": 1.924, + "step": 3168 + }, + { + "epoch": 1.950003845858011, + "grad_norm": 6.121715068817139, + "learning_rate": 5.765789572736386e-06, + "loss": 1.7455, + "step": 3169 + }, + { + "epoch": 1.9506191831397586, + "grad_norm": 7.481916904449463, + "learning_rate": 5.759770896041319e-06, + "loss": 1.8953, + "step": 3170 + }, + { + "epoch": 1.951234520421506, + "grad_norm": 6.850796699523926, + "learning_rate": 5.753754091468116e-06, + "loss": 2.0368, + "step": 3171 + }, + { + "epoch": 1.9518498577032535, + "grad_norm": 6.626687049865723, + "learning_rate": 5.747739161673291e-06, + "loss": 1.8116, + "step": 3172 + }, + { + "epoch": 1.9524651949850012, + "grad_norm": 7.205296516418457, + "learning_rate": 5.741726109312521e-06, + "loss": 1.9338, + "step": 3173 + }, + { + "epoch": 1.9530805322667488, + "grad_norm": 9.204854965209961, + "learning_rate": 5.735714937040642e-06, + "loss": 1.8441, + "step": 3174 + }, + { + "epoch": 1.9536958695484963, + "grad_norm": 6.084777355194092, + "learning_rate": 5.729705647511688e-06, + "loss": 1.8161, + "step": 3175 + }, + { + "epoch": 1.9543112068302437, + "grad_norm": 6.203259468078613, + "learning_rate": 5.723698243378837e-06, + "loss": 1.9146, + "step": 3176 + }, + { + "epoch": 1.9549265441119914, + "grad_norm": 5.784270286560059, + "learning_rate": 5.717692727294453e-06, + "loss": 1.709, + "step": 3177 + }, + { + "epoch": 1.955541881393739, + "grad_norm": 8.033754348754883, + "learning_rate": 5.711689101910042e-06, + "loss": 1.89, + "step": 3178 + }, + { + "epoch": 1.9561572186754865, + "grad_norm": 7.297550201416016, + "learning_rate": 5.7056873698763035e-06, + "loss": 2.1003, + "step": 3179 + }, + { + "epoch": 1.956772555957234, + "grad_norm": 6.362544059753418, + "learning_rate": 5.699687533843086e-06, + "loss": 1.8882, + "step": 3180 + }, + { + "epoch": 1.9573878932389817, + "grad_norm": 6.54764986038208, + "learning_rate": 5.6936895964593995e-06, + "loss": 1.7589, + "step": 3181 + }, + { + "epoch": 1.9580032305207293, + "grad_norm": 6.771161079406738, + "learning_rate": 5.687693560373421e-06, + "loss": 2.0426, + "step": 3182 + }, + { + "epoch": 1.9586185678024768, + "grad_norm": 6.942575454711914, + "learning_rate": 5.681699428232488e-06, + "loss": 1.8989, + "step": 3183 + }, + { + "epoch": 1.9592339050842242, + "grad_norm": 8.05281925201416, + "learning_rate": 5.675707202683094e-06, + "loss": 1.9912, + "step": 3184 + }, + { + "epoch": 1.959849242365972, + "grad_norm": 5.863151550292969, + "learning_rate": 5.669716886370893e-06, + "loss": 1.8845, + "step": 3185 + }, + { + "epoch": 1.9604645796477194, + "grad_norm": 7.191119194030762, + "learning_rate": 5.6637284819406936e-06, + "loss": 1.9469, + "step": 3186 + }, + { + "epoch": 1.9610799169294668, + "grad_norm": 7.391888618469238, + "learning_rate": 5.657741992036463e-06, + "loss": 1.895, + "step": 3187 + }, + { + "epoch": 1.9616952542112145, + "grad_norm": 7.41832160949707, + "learning_rate": 5.651757419301321e-06, + "loss": 1.9097, + "step": 3188 + }, + { + "epoch": 1.9623105914929622, + "grad_norm": 5.870285987854004, + "learning_rate": 5.64577476637755e-06, + "loss": 1.8531, + "step": 3189 + }, + { + "epoch": 1.9629259287747096, + "grad_norm": 5.509987831115723, + "learning_rate": 5.639794035906567e-06, + "loss": 1.749, + "step": 3190 + }, + { + "epoch": 1.963541266056457, + "grad_norm": 8.326367378234863, + "learning_rate": 5.633815230528954e-06, + "loss": 2.0137, + "step": 3191 + }, + { + "epoch": 1.9641566033382047, + "grad_norm": 5.187560558319092, + "learning_rate": 5.627838352884435e-06, + "loss": 1.791, + "step": 3192 + }, + { + "epoch": 1.9647719406199524, + "grad_norm": 6.634841442108154, + "learning_rate": 5.6218634056119e-06, + "loss": 1.9733, + "step": 3193 + }, + { + "epoch": 1.9653872779016999, + "grad_norm": 6.766387939453125, + "learning_rate": 5.615890391349359e-06, + "loss": 1.7479, + "step": 3194 + }, + { + "epoch": 1.9660026151834473, + "grad_norm": 7.7135009765625, + "learning_rate": 5.609919312733987e-06, + "loss": 1.7756, + "step": 3195 + }, + { + "epoch": 1.966617952465195, + "grad_norm": 6.345865726470947, + "learning_rate": 5.603950172402107e-06, + "loss": 2.0241, + "step": 3196 + }, + { + "epoch": 1.9672332897469427, + "grad_norm": 5.683028697967529, + "learning_rate": 5.5979829729891796e-06, + "loss": 1.9147, + "step": 3197 + }, + { + "epoch": 1.9678486270286901, + "grad_norm": 6.740362644195557, + "learning_rate": 5.592017717129801e-06, + "loss": 1.8109, + "step": 3198 + }, + { + "epoch": 1.9684639643104376, + "grad_norm": 5.514584064483643, + "learning_rate": 5.586054407457717e-06, + "loss": 1.5463, + "step": 3199 + }, + { + "epoch": 1.9690793015921852, + "grad_norm": 5.9816975593566895, + "learning_rate": 5.580093046605823e-06, + "loss": 1.8046, + "step": 3200 + }, + { + "epoch": 1.969694638873933, + "grad_norm": 8.915163040161133, + "learning_rate": 5.574133637206144e-06, + "loss": 1.8007, + "step": 3201 + }, + { + "epoch": 1.9703099761556804, + "grad_norm": 6.4204559326171875, + "learning_rate": 5.568176181889833e-06, + "loss": 1.7711, + "step": 3202 + }, + { + "epoch": 1.9709253134374278, + "grad_norm": 8.580522537231445, + "learning_rate": 5.562220683287205e-06, + "loss": 2.0681, + "step": 3203 + }, + { + "epoch": 1.9715406507191755, + "grad_norm": 7.247260093688965, + "learning_rate": 5.556267144027692e-06, + "loss": 1.7596, + "step": 3204 + }, + { + "epoch": 1.972155988000923, + "grad_norm": 6.181526184082031, + "learning_rate": 5.550315566739872e-06, + "loss": 1.912, + "step": 3205 + }, + { + "epoch": 1.9727713252826704, + "grad_norm": 7.744994163513184, + "learning_rate": 5.544365954051441e-06, + "loss": 1.9707, + "step": 3206 + }, + { + "epoch": 1.973386662564418, + "grad_norm": 6.095126628875732, + "learning_rate": 5.538418308589246e-06, + "loss": 1.5949, + "step": 3207 + }, + { + "epoch": 1.9740019998461658, + "grad_norm": 6.679848670959473, + "learning_rate": 5.532472632979256e-06, + "loss": 1.7563, + "step": 3208 + }, + { + "epoch": 1.9746173371279132, + "grad_norm": 6.535772323608398, + "learning_rate": 5.5265289298465744e-06, + "loss": 1.8837, + "step": 3209 + }, + { + "epoch": 1.9752326744096607, + "grad_norm": 6.025022983551025, + "learning_rate": 5.520587201815428e-06, + "loss": 1.8163, + "step": 3210 + }, + { + "epoch": 1.9758480116914083, + "grad_norm": 6.097251892089844, + "learning_rate": 5.514647451509175e-06, + "loss": 2.0119, + "step": 3211 + }, + { + "epoch": 1.976463348973156, + "grad_norm": 5.13022518157959, + "learning_rate": 5.508709681550301e-06, + "loss": 1.8976, + "step": 3212 + }, + { + "epoch": 1.9770786862549035, + "grad_norm": 7.87878942489624, + "learning_rate": 5.502773894560418e-06, + "loss": 1.5908, + "step": 3213 + }, + { + "epoch": 1.977694023536651, + "grad_norm": 5.937126159667969, + "learning_rate": 5.496840093160259e-06, + "loss": 2.1902, + "step": 3214 + }, + { + "epoch": 1.9783093608183986, + "grad_norm": 6.018588066101074, + "learning_rate": 5.490908279969682e-06, + "loss": 1.8673, + "step": 3215 + }, + { + "epoch": 1.9789246981001463, + "grad_norm": 6.608116149902344, + "learning_rate": 5.484978457607664e-06, + "loss": 1.9522, + "step": 3216 + }, + { + "epoch": 1.9795400353818937, + "grad_norm": 5.593914985656738, + "learning_rate": 5.47905062869232e-06, + "loss": 1.8099, + "step": 3217 + }, + { + "epoch": 1.9801553726636412, + "grad_norm": 8.022329330444336, + "learning_rate": 5.473124795840856e-06, + "loss": 1.7829, + "step": 3218 + }, + { + "epoch": 1.9807707099453888, + "grad_norm": 7.83609676361084, + "learning_rate": 5.467200961669619e-06, + "loss": 1.8661, + "step": 3219 + }, + { + "epoch": 1.9813860472271365, + "grad_norm": 5.653415203094482, + "learning_rate": 5.461279128794061e-06, + "loss": 2.0066, + "step": 3220 + }, + { + "epoch": 1.982001384508884, + "grad_norm": 6.801254749298096, + "learning_rate": 5.455359299828769e-06, + "loss": 1.6868, + "step": 3221 + }, + { + "epoch": 1.9826167217906314, + "grad_norm": 5.556186199188232, + "learning_rate": 5.4494414773874205e-06, + "loss": 1.6966, + "step": 3222 + }, + { + "epoch": 1.983232059072379, + "grad_norm": 4.674101829528809, + "learning_rate": 5.443525664082818e-06, + "loss": 1.7944, + "step": 3223 + }, + { + "epoch": 1.9838473963541268, + "grad_norm": 6.195478916168213, + "learning_rate": 5.437611862526887e-06, + "loss": 2.0649, + "step": 3224 + }, + { + "epoch": 1.984462733635874, + "grad_norm": 6.294257640838623, + "learning_rate": 5.431700075330654e-06, + "loss": 1.9189, + "step": 3225 + }, + { + "epoch": 1.9850780709176217, + "grad_norm": 6.332217693328857, + "learning_rate": 5.425790305104251e-06, + "loss": 1.6725, + "step": 3226 + }, + { + "epoch": 1.9856934081993693, + "grad_norm": 6.894514560699463, + "learning_rate": 5.419882554456923e-06, + "loss": 1.8447, + "step": 3227 + }, + { + "epoch": 1.9863087454811168, + "grad_norm": 8.022863388061523, + "learning_rate": 5.4139768259970385e-06, + "loss": 1.9752, + "step": 3228 + }, + { + "epoch": 1.9869240827628643, + "grad_norm": 6.786382675170898, + "learning_rate": 5.40807312233206e-06, + "loss": 1.7017, + "step": 3229 + }, + { + "epoch": 1.987539420044612, + "grad_norm": 7.099830150604248, + "learning_rate": 5.402171446068545e-06, + "loss": 1.7934, + "step": 3230 + }, + { + "epoch": 1.9881547573263596, + "grad_norm": 5.62034273147583, + "learning_rate": 5.39627179981218e-06, + "loss": 1.7885, + "step": 3231 + }, + { + "epoch": 1.988770094608107, + "grad_norm": 5.6713080406188965, + "learning_rate": 5.390374186167739e-06, + "loss": 2.0724, + "step": 3232 + }, + { + "epoch": 1.9893854318898545, + "grad_norm": 6.367765426635742, + "learning_rate": 5.384478607739103e-06, + "loss": 1.9328, + "step": 3233 + }, + { + "epoch": 1.9900007691716022, + "grad_norm": 6.351325035095215, + "learning_rate": 5.378585067129256e-06, + "loss": 1.8449, + "step": 3234 + }, + { + "epoch": 1.9906161064533499, + "grad_norm": 6.40389347076416, + "learning_rate": 5.372693566940277e-06, + "loss": 1.8356, + "step": 3235 + }, + { + "epoch": 1.9912314437350973, + "grad_norm": 8.280359268188477, + "learning_rate": 5.366804109773352e-06, + "loss": 2.1311, + "step": 3236 + }, + { + "epoch": 1.9918467810168448, + "grad_norm": 6.525228977203369, + "learning_rate": 5.360916698228756e-06, + "loss": 1.7066, + "step": 3237 + }, + { + "epoch": 1.9924621182985924, + "grad_norm": 9.430890083312988, + "learning_rate": 5.3550313349058695e-06, + "loss": 2.2113, + "step": 3238 + }, + { + "epoch": 1.99307745558034, + "grad_norm": 6.110797882080078, + "learning_rate": 5.349148022403162e-06, + "loss": 1.8399, + "step": 3239 + }, + { + "epoch": 1.9936927928620876, + "grad_norm": 7.1408843994140625, + "learning_rate": 5.343266763318202e-06, + "loss": 1.9739, + "step": 3240 + }, + { + "epoch": 1.994308130143835, + "grad_norm": 5.745360851287842, + "learning_rate": 5.337387560247648e-06, + "loss": 1.9048, + "step": 3241 + }, + { + "epoch": 1.9949234674255827, + "grad_norm": 5.582793712615967, + "learning_rate": 5.331510415787255e-06, + "loss": 1.9134, + "step": 3242 + }, + { + "epoch": 1.9955388047073304, + "grad_norm": 7.194936275482178, + "learning_rate": 5.325635332531864e-06, + "loss": 2.1308, + "step": 3243 + }, + { + "epoch": 1.9961541419890778, + "grad_norm": 5.708277225494385, + "learning_rate": 5.319762313075405e-06, + "loss": 1.9353, + "step": 3244 + }, + { + "epoch": 1.9967694792708253, + "grad_norm": 5.390617370605469, + "learning_rate": 5.313891360010912e-06, + "loss": 1.893, + "step": 3245 + }, + { + "epoch": 1.997384816552573, + "grad_norm": 5.393786430358887, + "learning_rate": 5.308022475930484e-06, + "loss": 2.0889, + "step": 3246 + }, + { + "epoch": 1.9980001538343204, + "grad_norm": 5.745631217956543, + "learning_rate": 5.302155663425319e-06, + "loss": 1.8718, + "step": 3247 + }, + { + "epoch": 1.9986154911160678, + "grad_norm": 6.936227798461914, + "learning_rate": 5.296290925085699e-06, + "loss": 1.9053, + "step": 3248 + }, + { + "epoch": 1.9992308283978155, + "grad_norm": 6.210941791534424, + "learning_rate": 5.290428263500996e-06, + "loss": 2.024, + "step": 3249 + }, + { + "epoch": 1.9998461656795632, + "grad_norm": 6.614715576171875, + "learning_rate": 5.284567681259651e-06, + "loss": 1.9556, + "step": 3250 + }, + { + "epoch": 2.000461502961311, + "grad_norm": 6.394988536834717, + "learning_rate": 5.278709180949195e-06, + "loss": 1.8333, + "step": 3251 + }, + { + "epoch": 2.001076840243058, + "grad_norm": 6.716355323791504, + "learning_rate": 5.2728527651562475e-06, + "loss": 1.6569, + "step": 3252 + }, + { + "epoch": 2.0016921775248058, + "grad_norm": 7.2900004386901855, + "learning_rate": 5.2669984364664996e-06, + "loss": 1.8021, + "step": 3253 + }, + { + "epoch": 2.0023075148065534, + "grad_norm": 7.229827880859375, + "learning_rate": 5.261146197464711e-06, + "loss": 1.8015, + "step": 3254 + }, + { + "epoch": 2.002922852088301, + "grad_norm": 5.78691291809082, + "learning_rate": 5.255296050734733e-06, + "loss": 1.603, + "step": 3255 + }, + { + "epoch": 2.0035381893700484, + "grad_norm": 6.154083728790283, + "learning_rate": 5.2494479988594936e-06, + "loss": 1.9114, + "step": 3256 + }, + { + "epoch": 2.004153526651796, + "grad_norm": 6.098405838012695, + "learning_rate": 5.243602044420991e-06, + "loss": 1.82, + "step": 3257 + }, + { + "epoch": 2.0047688639335437, + "grad_norm": 5.797898769378662, + "learning_rate": 5.237758190000295e-06, + "loss": 1.872, + "step": 3258 + }, + { + "epoch": 2.005384201215291, + "grad_norm": 6.0135273933410645, + "learning_rate": 5.231916438177552e-06, + "loss": 1.602, + "step": 3259 + }, + { + "epoch": 2.0059995384970386, + "grad_norm": 6.441640853881836, + "learning_rate": 5.226076791531979e-06, + "loss": 1.864, + "step": 3260 + }, + { + "epoch": 2.0066148757787863, + "grad_norm": 6.560187816619873, + "learning_rate": 5.220239252641863e-06, + "loss": 1.7944, + "step": 3261 + }, + { + "epoch": 2.007230213060534, + "grad_norm": 9.097228050231934, + "learning_rate": 5.214403824084562e-06, + "loss": 1.951, + "step": 3262 + }, + { + "epoch": 2.007845550342281, + "grad_norm": 5.977606773376465, + "learning_rate": 5.208570508436501e-06, + "loss": 1.749, + "step": 3263 + }, + { + "epoch": 2.008460887624029, + "grad_norm": 6.478504657745361, + "learning_rate": 5.202739308273172e-06, + "loss": 1.8139, + "step": 3264 + }, + { + "epoch": 2.0090762249057765, + "grad_norm": 5.604608058929443, + "learning_rate": 5.196910226169134e-06, + "loss": 1.8566, + "step": 3265 + }, + { + "epoch": 2.009691562187524, + "grad_norm": 6.13186502456665, + "learning_rate": 5.191083264698011e-06, + "loss": 1.8532, + "step": 3266 + }, + { + "epoch": 2.0103068994692714, + "grad_norm": 5.513442516326904, + "learning_rate": 5.1852584264324864e-06, + "loss": 1.8313, + "step": 3267 + }, + { + "epoch": 2.010922236751019, + "grad_norm": 6.778023719787598, + "learning_rate": 5.179435713944314e-06, + "loss": 1.9021, + "step": 3268 + }, + { + "epoch": 2.011537574032767, + "grad_norm": 7.356326580047607, + "learning_rate": 5.173615129804301e-06, + "loss": 1.773, + "step": 3269 + }, + { + "epoch": 2.0121529113145145, + "grad_norm": 6.674051284790039, + "learning_rate": 5.1677966765823226e-06, + "loss": 1.9619, + "step": 3270 + }, + { + "epoch": 2.0127682485962617, + "grad_norm": 7.200241565704346, + "learning_rate": 5.161980356847307e-06, + "loss": 1.6942, + "step": 3271 + }, + { + "epoch": 2.0133835858780094, + "grad_norm": 5.515239238739014, + "learning_rate": 5.156166173167237e-06, + "loss": 1.6415, + "step": 3272 + }, + { + "epoch": 2.013998923159757, + "grad_norm": 5.797714710235596, + "learning_rate": 5.150354128109173e-06, + "loss": 1.7229, + "step": 3273 + }, + { + "epoch": 2.0146142604415047, + "grad_norm": 5.668634414672852, + "learning_rate": 5.144544224239203e-06, + "loss": 1.659, + "step": 3274 + }, + { + "epoch": 2.015229597723252, + "grad_norm": 5.850852966308594, + "learning_rate": 5.138736464122485e-06, + "loss": 1.6465, + "step": 3275 + }, + { + "epoch": 2.0158449350049996, + "grad_norm": 6.488497257232666, + "learning_rate": 5.132930850323228e-06, + "loss": 1.5163, + "step": 3276 + }, + { + "epoch": 2.0164602722867473, + "grad_norm": 5.886690139770508, + "learning_rate": 5.127127385404702e-06, + "loss": 1.824, + "step": 3277 + }, + { + "epoch": 2.0170756095684945, + "grad_norm": 5.137667179107666, + "learning_rate": 5.121326071929209e-06, + "loss": 1.9265, + "step": 3278 + }, + { + "epoch": 2.017690946850242, + "grad_norm": 5.783592224121094, + "learning_rate": 5.115526912458113e-06, + "loss": 1.6694, + "step": 3279 + }, + { + "epoch": 2.01830628413199, + "grad_norm": 5.673574924468994, + "learning_rate": 5.109729909551833e-06, + "loss": 1.8729, + "step": 3280 + }, + { + "epoch": 2.0189216214137375, + "grad_norm": 6.321591854095459, + "learning_rate": 5.103935065769825e-06, + "loss": 1.8199, + "step": 3281 + }, + { + "epoch": 2.0195369586954848, + "grad_norm": 6.101338863372803, + "learning_rate": 5.0981423836706e-06, + "loss": 1.8317, + "step": 3282 + }, + { + "epoch": 2.0201522959772324, + "grad_norm": 6.699808597564697, + "learning_rate": 5.092351865811698e-06, + "loss": 1.9474, + "step": 3283 + }, + { + "epoch": 2.02076763325898, + "grad_norm": 7.019954681396484, + "learning_rate": 5.086563514749727e-06, + "loss": 1.8984, + "step": 3284 + }, + { + "epoch": 2.021382970540728, + "grad_norm": 7.509356498718262, + "learning_rate": 5.080777333040323e-06, + "loss": 1.7006, + "step": 3285 + }, + { + "epoch": 2.021998307822475, + "grad_norm": 5.687875747680664, + "learning_rate": 5.074993323238168e-06, + "loss": 1.8069, + "step": 3286 + }, + { + "epoch": 2.0226136451042227, + "grad_norm": 6.424361228942871, + "learning_rate": 5.0692114878969845e-06, + "loss": 1.7761, + "step": 3287 + }, + { + "epoch": 2.0232289823859704, + "grad_norm": 7.202826976776123, + "learning_rate": 5.063431829569537e-06, + "loss": 1.7803, + "step": 3288 + }, + { + "epoch": 2.023844319667718, + "grad_norm": 6.629302501678467, + "learning_rate": 5.057654350807628e-06, + "loss": 1.6767, + "step": 3289 + }, + { + "epoch": 2.0244596569494653, + "grad_norm": 6.663406848907471, + "learning_rate": 5.0518790541620945e-06, + "loss": 1.7442, + "step": 3290 + }, + { + "epoch": 2.025074994231213, + "grad_norm": 5.468122959136963, + "learning_rate": 5.046105942182815e-06, + "loss": 1.8378, + "step": 3291 + }, + { + "epoch": 2.0256903315129606, + "grad_norm": 6.342126369476318, + "learning_rate": 5.0403350174186995e-06, + "loss": 1.8066, + "step": 3292 + }, + { + "epoch": 2.0263056687947083, + "grad_norm": 6.55281400680542, + "learning_rate": 5.034566282417694e-06, + "loss": 1.6929, + "step": 3293 + }, + { + "epoch": 2.0269210060764555, + "grad_norm": 7.13408088684082, + "learning_rate": 5.02879973972678e-06, + "loss": 1.7513, + "step": 3294 + }, + { + "epoch": 2.027536343358203, + "grad_norm": 6.785405158996582, + "learning_rate": 5.023035391891966e-06, + "loss": 1.9065, + "step": 3295 + }, + { + "epoch": 2.028151680639951, + "grad_norm": 6.023896217346191, + "learning_rate": 5.017273241458296e-06, + "loss": 1.8885, + "step": 3296 + }, + { + "epoch": 2.028767017921698, + "grad_norm": 6.695373058319092, + "learning_rate": 5.011513290969839e-06, + "loss": 1.6923, + "step": 3297 + }, + { + "epoch": 2.029382355203446, + "grad_norm": 7.450175762176514, + "learning_rate": 5.005755542969699e-06, + "loss": 2.0373, + "step": 3298 + }, + { + "epoch": 2.0299976924851935, + "grad_norm": 8.885830879211426, + "learning_rate": 5.000000000000003e-06, + "loss": 1.8414, + "step": 3299 + }, + { + "epoch": 2.030613029766941, + "grad_norm": 7.0693206787109375, + "learning_rate": 4.9942466646019e-06, + "loss": 1.6837, + "step": 3300 + }, + { + "epoch": 2.0312283670486884, + "grad_norm": 6.346035003662109, + "learning_rate": 4.988495539315584e-06, + "loss": 1.933, + "step": 3301 + }, + { + "epoch": 2.031843704330436, + "grad_norm": 5.4847211837768555, + "learning_rate": 4.982746626680247e-06, + "loss": 1.7698, + "step": 3302 + }, + { + "epoch": 2.0324590416121837, + "grad_norm": 6.456057071685791, + "learning_rate": 4.97699992923412e-06, + "loss": 1.8958, + "step": 3303 + }, + { + "epoch": 2.0330743788939314, + "grad_norm": 6.965982437133789, + "learning_rate": 4.971255449514447e-06, + "loss": 1.7585, + "step": 3304 + }, + { + "epoch": 2.0336897161756786, + "grad_norm": 6.01101541519165, + "learning_rate": 4.965513190057508e-06, + "loss": 1.8846, + "step": 3305 + }, + { + "epoch": 2.0343050534574263, + "grad_norm": 4.896847724914551, + "learning_rate": 4.9597731533985915e-06, + "loss": 1.7094, + "step": 3306 + }, + { + "epoch": 2.034920390739174, + "grad_norm": 5.749021530151367, + "learning_rate": 4.954035342071994e-06, + "loss": 1.7912, + "step": 3307 + }, + { + "epoch": 2.0355357280209216, + "grad_norm": 5.607626438140869, + "learning_rate": 4.9482997586110545e-06, + "loss": 1.9409, + "step": 3308 + }, + { + "epoch": 2.036151065302669, + "grad_norm": 7.599244594573975, + "learning_rate": 4.942566405548109e-06, + "loss": 1.794, + "step": 3309 + }, + { + "epoch": 2.0367664025844165, + "grad_norm": 6.1468186378479, + "learning_rate": 4.936835285414522e-06, + "loss": 1.8979, + "step": 3310 + }, + { + "epoch": 2.037381739866164, + "grad_norm": 5.005866050720215, + "learning_rate": 4.9311064007406494e-06, + "loss": 1.911, + "step": 3311 + }, + { + "epoch": 2.037997077147912, + "grad_norm": 5.9411773681640625, + "learning_rate": 4.925379754055891e-06, + "loss": 1.7553, + "step": 3312 + }, + { + "epoch": 2.038612414429659, + "grad_norm": 6.030778884887695, + "learning_rate": 4.919655347888638e-06, + "loss": 1.6393, + "step": 3313 + }, + { + "epoch": 2.039227751711407, + "grad_norm": 6.1177592277526855, + "learning_rate": 4.913933184766298e-06, + "loss": 1.8597, + "step": 3314 + }, + { + "epoch": 2.0398430889931545, + "grad_norm": 5.702389717102051, + "learning_rate": 4.908213267215287e-06, + "loss": 1.7258, + "step": 3315 + }, + { + "epoch": 2.040458426274902, + "grad_norm": 5.394784927368164, + "learning_rate": 4.902495597761032e-06, + "loss": 1.8252, + "step": 3316 + }, + { + "epoch": 2.0410737635566494, + "grad_norm": 6.565735340118408, + "learning_rate": 4.896780178927965e-06, + "loss": 1.8773, + "step": 3317 + }, + { + "epoch": 2.041689100838397, + "grad_norm": 5.8572611808776855, + "learning_rate": 4.891067013239526e-06, + "loss": 1.8705, + "step": 3318 + }, + { + "epoch": 2.0423044381201447, + "grad_norm": 6.141772747039795, + "learning_rate": 4.885356103218159e-06, + "loss": 1.652, + "step": 3319 + }, + { + "epoch": 2.042919775401892, + "grad_norm": 6.561894416809082, + "learning_rate": 4.879647451385311e-06, + "loss": 1.8309, + "step": 3320 + }, + { + "epoch": 2.0435351126836396, + "grad_norm": 6.576610088348389, + "learning_rate": 4.873941060261437e-06, + "loss": 1.7401, + "step": 3321 + }, + { + "epoch": 2.0441504499653873, + "grad_norm": 5.523563385009766, + "learning_rate": 4.868236932365988e-06, + "loss": 1.8905, + "step": 3322 + }, + { + "epoch": 2.044765787247135, + "grad_norm": 5.973571300506592, + "learning_rate": 4.862535070217416e-06, + "loss": 1.8115, + "step": 3323 + }, + { + "epoch": 2.045381124528882, + "grad_norm": 5.550798416137695, + "learning_rate": 4.856835476333181e-06, + "loss": 1.9953, + "step": 3324 + }, + { + "epoch": 2.04599646181063, + "grad_norm": 5.250948905944824, + "learning_rate": 4.8511381532297296e-06, + "loss": 1.729, + "step": 3325 + }, + { + "epoch": 2.0466117990923776, + "grad_norm": 5.951372146606445, + "learning_rate": 4.845443103422513e-06, + "loss": 1.803, + "step": 3326 + }, + { + "epoch": 2.0472271363741252, + "grad_norm": 5.559879779815674, + "learning_rate": 4.839750329425979e-06, + "loss": 2.0996, + "step": 3327 + }, + { + "epoch": 2.0478424736558725, + "grad_norm": 6.469560623168945, + "learning_rate": 4.834059833753565e-06, + "loss": 1.9106, + "step": 3328 + }, + { + "epoch": 2.04845781093762, + "grad_norm": 5.962950229644775, + "learning_rate": 4.828371618917711e-06, + "loss": 1.9361, + "step": 3329 + }, + { + "epoch": 2.049073148219368, + "grad_norm": 7.422765254974365, + "learning_rate": 4.822685687429848e-06, + "loss": 1.601, + "step": 3330 + }, + { + "epoch": 2.0496884855011155, + "grad_norm": 5.539458274841309, + "learning_rate": 4.8170020418003884e-06, + "loss": 1.672, + "step": 3331 + }, + { + "epoch": 2.0503038227828627, + "grad_norm": 7.1247968673706055, + "learning_rate": 4.811320684538741e-06, + "loss": 1.8444, + "step": 3332 + }, + { + "epoch": 2.0509191600646104, + "grad_norm": 7.0008344650268555, + "learning_rate": 4.805641618153316e-06, + "loss": 1.933, + "step": 3333 + }, + { + "epoch": 2.051534497346358, + "grad_norm": 6.886376857757568, + "learning_rate": 4.799964845151501e-06, + "loss": 1.7491, + "step": 3334 + }, + { + "epoch": 2.0521498346281057, + "grad_norm": 6.076746463775635, + "learning_rate": 4.794290368039664e-06, + "loss": 1.6984, + "step": 3335 + }, + { + "epoch": 2.052765171909853, + "grad_norm": 7.298564434051514, + "learning_rate": 4.788618189323176e-06, + "loss": 1.6471, + "step": 3336 + }, + { + "epoch": 2.0533805091916006, + "grad_norm": 5.478745937347412, + "learning_rate": 4.782948311506383e-06, + "loss": 1.7237, + "step": 3337 + }, + { + "epoch": 2.0539958464733483, + "grad_norm": 6.292068004608154, + "learning_rate": 4.777280737092621e-06, + "loss": 1.6636, + "step": 3338 + }, + { + "epoch": 2.0546111837550955, + "grad_norm": 7.428073406219482, + "learning_rate": 4.771615468584194e-06, + "loss": 1.6098, + "step": 3339 + }, + { + "epoch": 2.0552265210368432, + "grad_norm": 6.5578718185424805, + "learning_rate": 4.76595250848241e-06, + "loss": 1.8266, + "step": 3340 + }, + { + "epoch": 2.055841858318591, + "grad_norm": 6.293591499328613, + "learning_rate": 4.760291859287543e-06, + "loss": 1.7283, + "step": 3341 + }, + { + "epoch": 2.0564571956003386, + "grad_norm": 8.350112915039062, + "learning_rate": 4.7546335234988515e-06, + "loss": 1.7211, + "step": 3342 + }, + { + "epoch": 2.057072532882086, + "grad_norm": 5.352015972137451, + "learning_rate": 4.7489775036145715e-06, + "loss": 1.6802, + "step": 3343 + }, + { + "epoch": 2.0576878701638335, + "grad_norm": 5.90147066116333, + "learning_rate": 4.743323802131916e-06, + "loss": 1.879, + "step": 3344 + }, + { + "epoch": 2.058303207445581, + "grad_norm": 6.732089042663574, + "learning_rate": 4.737672421547076e-06, + "loss": 1.5635, + "step": 3345 + }, + { + "epoch": 2.058918544727329, + "grad_norm": 5.204902172088623, + "learning_rate": 4.732023364355217e-06, + "loss": 1.8182, + "step": 3346 + }, + { + "epoch": 2.059533882009076, + "grad_norm": 6.496474266052246, + "learning_rate": 4.726376633050479e-06, + "loss": 1.7951, + "step": 3347 + }, + { + "epoch": 2.0601492192908237, + "grad_norm": 7.1246466636657715, + "learning_rate": 4.720732230125973e-06, + "loss": 1.8817, + "step": 3348 + }, + { + "epoch": 2.0607645565725714, + "grad_norm": 6.074631214141846, + "learning_rate": 4.715090158073785e-06, + "loss": 1.7631, + "step": 3349 + }, + { + "epoch": 2.061379893854319, + "grad_norm": 6.618818283081055, + "learning_rate": 4.709450419384971e-06, + "loss": 1.8754, + "step": 3350 + }, + { + "epoch": 2.0619952311360663, + "grad_norm": 6.270000457763672, + "learning_rate": 4.703813016549555e-06, + "loss": 1.7565, + "step": 3351 + }, + { + "epoch": 2.062610568417814, + "grad_norm": 6.7020039558410645, + "learning_rate": 4.698177952056533e-06, + "loss": 1.8978, + "step": 3352 + }, + { + "epoch": 2.0632259056995617, + "grad_norm": 8.08785629272461, + "learning_rate": 4.692545228393861e-06, + "loss": 1.8696, + "step": 3353 + }, + { + "epoch": 2.0638412429813093, + "grad_norm": 7.457103252410889, + "learning_rate": 4.6869148480484795e-06, + "loss": 1.8545, + "step": 3354 + }, + { + "epoch": 2.0644565802630566, + "grad_norm": 5.8503265380859375, + "learning_rate": 4.681286813506271e-06, + "loss": 1.9954, + "step": 3355 + }, + { + "epoch": 2.0650719175448042, + "grad_norm": 6.669358730316162, + "learning_rate": 4.675661127252092e-06, + "loss": 1.9105, + "step": 3356 + }, + { + "epoch": 2.065687254826552, + "grad_norm": 5.653698921203613, + "learning_rate": 4.670037791769772e-06, + "loss": 1.8248, + "step": 3357 + }, + { + "epoch": 2.0663025921082996, + "grad_norm": 7.3878865242004395, + "learning_rate": 4.664416809542095e-06, + "loss": 1.788, + "step": 3358 + }, + { + "epoch": 2.066917929390047, + "grad_norm": 7.134374618530273, + "learning_rate": 4.658798183050796e-06, + "loss": 1.9037, + "step": 3359 + }, + { + "epoch": 2.0675332666717945, + "grad_norm": 9.284260749816895, + "learning_rate": 4.65318191477658e-06, + "loss": 1.6337, + "step": 3360 + }, + { + "epoch": 2.068148603953542, + "grad_norm": 6.413081645965576, + "learning_rate": 4.647568007199117e-06, + "loss": 1.9942, + "step": 3361 + }, + { + "epoch": 2.0687639412352894, + "grad_norm": 6.153131008148193, + "learning_rate": 4.641956462797027e-06, + "loss": 1.8777, + "step": 3362 + }, + { + "epoch": 2.069379278517037, + "grad_norm": 6.11932897567749, + "learning_rate": 4.636347284047878e-06, + "loss": 1.5756, + "step": 3363 + }, + { + "epoch": 2.0699946157987847, + "grad_norm": 7.416115760803223, + "learning_rate": 4.630740473428212e-06, + "loss": 1.8113, + "step": 3364 + }, + { + "epoch": 2.0706099530805324, + "grad_norm": 7.491588115692139, + "learning_rate": 4.625136033413512e-06, + "loss": 1.693, + "step": 3365 + }, + { + "epoch": 2.0712252903622796, + "grad_norm": 5.7111053466796875, + "learning_rate": 4.619533966478225e-06, + "loss": 1.7201, + "step": 3366 + }, + { + "epoch": 2.0718406276440273, + "grad_norm": 9.074357032775879, + "learning_rate": 4.613934275095729e-06, + "loss": 1.7363, + "step": 3367 + }, + { + "epoch": 2.072455964925775, + "grad_norm": 5.839381217956543, + "learning_rate": 4.608336961738383e-06, + "loss": 1.6211, + "step": 3368 + }, + { + "epoch": 2.0730713022075227, + "grad_norm": 6.195372104644775, + "learning_rate": 4.602742028877475e-06, + "loss": 1.685, + "step": 3369 + }, + { + "epoch": 2.07368663948927, + "grad_norm": 5.7253217697143555, + "learning_rate": 4.597149478983247e-06, + "loss": 1.7744, + "step": 3370 + }, + { + "epoch": 2.0743019767710176, + "grad_norm": 5.3319573402404785, + "learning_rate": 4.5915593145248926e-06, + "loss": 1.693, + "step": 3371 + }, + { + "epoch": 2.0749173140527652, + "grad_norm": 6.086673259735107, + "learning_rate": 4.5859715379705475e-06, + "loss": 1.8653, + "step": 3372 + }, + { + "epoch": 2.075532651334513, + "grad_norm": 7.890754699707031, + "learning_rate": 4.580386151787295e-06, + "loss": 1.7133, + "step": 3373 + }, + { + "epoch": 2.07614798861626, + "grad_norm": 6.602984428405762, + "learning_rate": 4.574803158441165e-06, + "loss": 1.7522, + "step": 3374 + }, + { + "epoch": 2.076763325898008, + "grad_norm": 5.588774681091309, + "learning_rate": 4.569222560397126e-06, + "loss": 1.8268, + "step": 3375 + }, + { + "epoch": 2.0773786631797555, + "grad_norm": 6.588341236114502, + "learning_rate": 4.5636443601190935e-06, + "loss": 2.0176, + "step": 3376 + }, + { + "epoch": 2.077994000461503, + "grad_norm": 8.112367630004883, + "learning_rate": 4.558068560069921e-06, + "loss": 1.8634, + "step": 3377 + }, + { + "epoch": 2.0786093377432504, + "grad_norm": 7.57504940032959, + "learning_rate": 4.552495162711412e-06, + "loss": 1.8255, + "step": 3378 + }, + { + "epoch": 2.079224675024998, + "grad_norm": 5.7181477546691895, + "learning_rate": 4.546924170504292e-06, + "loss": 1.8462, + "step": 3379 + }, + { + "epoch": 2.0798400123067458, + "grad_norm": 6.8179931640625, + "learning_rate": 4.541355585908237e-06, + "loss": 2.0091, + "step": 3380 + }, + { + "epoch": 2.080455349588493, + "grad_norm": 6.259915828704834, + "learning_rate": 4.535789411381852e-06, + "loss": 1.7707, + "step": 3381 + }, + { + "epoch": 2.0810706868702407, + "grad_norm": 7.292726993560791, + "learning_rate": 4.530225649382697e-06, + "loss": 1.6228, + "step": 3382 + }, + { + "epoch": 2.0816860241519883, + "grad_norm": 5.297233581542969, + "learning_rate": 4.524664302367239e-06, + "loss": 1.6123, + "step": 3383 + }, + { + "epoch": 2.082301361433736, + "grad_norm": 5.687510013580322, + "learning_rate": 4.519105372790892e-06, + "loss": 1.6705, + "step": 3384 + }, + { + "epoch": 2.0829166987154832, + "grad_norm": 6.365267276763916, + "learning_rate": 4.513548863108012e-06, + "loss": 1.8972, + "step": 3385 + }, + { + "epoch": 2.083532035997231, + "grad_norm": 6.6207404136657715, + "learning_rate": 4.507994775771877e-06, + "loss": 1.5369, + "step": 3386 + }, + { + "epoch": 2.0841473732789786, + "grad_norm": 8.237905502319336, + "learning_rate": 4.502443113234688e-06, + "loss": 1.862, + "step": 3387 + }, + { + "epoch": 2.0847627105607263, + "grad_norm": 6.042557716369629, + "learning_rate": 4.496893877947583e-06, + "loss": 1.659, + "step": 3388 + }, + { + "epoch": 2.0853780478424735, + "grad_norm": 6.555940628051758, + "learning_rate": 4.49134707236064e-06, + "loss": 1.8097, + "step": 3389 + }, + { + "epoch": 2.085993385124221, + "grad_norm": 5.784060001373291, + "learning_rate": 4.485802698922849e-06, + "loss": 1.7261, + "step": 3390 + }, + { + "epoch": 2.086608722405969, + "grad_norm": 7.354560852050781, + "learning_rate": 4.48026076008212e-06, + "loss": 1.7978, + "step": 3391 + }, + { + "epoch": 2.0872240596877165, + "grad_norm": 6.921026229858398, + "learning_rate": 4.474721258285311e-06, + "loss": 1.7688, + "step": 3392 + }, + { + "epoch": 2.0878393969694637, + "grad_norm": 5.657590866088867, + "learning_rate": 4.4691841959781865e-06, + "loss": 1.9275, + "step": 3393 + }, + { + "epoch": 2.0884547342512114, + "grad_norm": 5.571079730987549, + "learning_rate": 4.463649575605442e-06, + "loss": 1.7595, + "step": 3394 + }, + { + "epoch": 2.089070071532959, + "grad_norm": 6.183929920196533, + "learning_rate": 4.458117399610682e-06, + "loss": 1.9147, + "step": 3395 + }, + { + "epoch": 2.0896854088147068, + "grad_norm": 7.1358489990234375, + "learning_rate": 4.452587670436451e-06, + "loss": 2.0107, + "step": 3396 + }, + { + "epoch": 2.090300746096454, + "grad_norm": 6.67927885055542, + "learning_rate": 4.4470603905242014e-06, + "loss": 1.9244, + "step": 3397 + }, + { + "epoch": 2.0909160833782017, + "grad_norm": 6.034088134765625, + "learning_rate": 4.441535562314307e-06, + "loss": 1.7839, + "step": 3398 + }, + { + "epoch": 2.0915314206599493, + "grad_norm": 6.093302249908447, + "learning_rate": 4.436013188246056e-06, + "loss": 1.8382, + "step": 3399 + }, + { + "epoch": 2.0921467579416966, + "grad_norm": 8.17961311340332, + "learning_rate": 4.430493270757656e-06, + "loss": 1.6502, + "step": 3400 + }, + { + "epoch": 2.0927620952234443, + "grad_norm": 7.250993251800537, + "learning_rate": 4.424975812286228e-06, + "loss": 1.7708, + "step": 3401 + }, + { + "epoch": 2.093377432505192, + "grad_norm": 6.544002532958984, + "learning_rate": 4.419460815267818e-06, + "loss": 1.6879, + "step": 3402 + }, + { + "epoch": 2.0939927697869396, + "grad_norm": 6.984464168548584, + "learning_rate": 4.4139482821373665e-06, + "loss": 1.7223, + "step": 3403 + }, + { + "epoch": 2.094608107068687, + "grad_norm": 7.110546112060547, + "learning_rate": 4.408438215328739e-06, + "loss": 2.0498, + "step": 3404 + }, + { + "epoch": 2.0952234443504345, + "grad_norm": 6.857098579406738, + "learning_rate": 4.402930617274704e-06, + "loss": 1.8442, + "step": 3405 + }, + { + "epoch": 2.095838781632182, + "grad_norm": 6.730200290679932, + "learning_rate": 4.397425490406959e-06, + "loss": 1.73, + "step": 3406 + }, + { + "epoch": 2.09645411891393, + "grad_norm": 6.327692031860352, + "learning_rate": 4.391922837156082e-06, + "loss": 1.7304, + "step": 3407 + }, + { + "epoch": 2.097069456195677, + "grad_norm": 6.548787593841553, + "learning_rate": 4.386422659951575e-06, + "loss": 1.6831, + "step": 3408 + }, + { + "epoch": 2.0976847934774248, + "grad_norm": 6.116971015930176, + "learning_rate": 4.3809249612218516e-06, + "loss": 1.9017, + "step": 3409 + }, + { + "epoch": 2.0983001307591724, + "grad_norm": 5.550501823425293, + "learning_rate": 4.375429743394226e-06, + "loss": 1.9905, + "step": 3410 + }, + { + "epoch": 2.09891546804092, + "grad_norm": 6.236998081207275, + "learning_rate": 4.369937008894906e-06, + "loss": 1.8331, + "step": 3411 + }, + { + "epoch": 2.0995308053226673, + "grad_norm": 6.377808094024658, + "learning_rate": 4.364446760149014e-06, + "loss": 1.7178, + "step": 3412 + }, + { + "epoch": 2.100146142604415, + "grad_norm": 5.621842861175537, + "learning_rate": 4.35895899958058e-06, + "loss": 1.7989, + "step": 3413 + }, + { + "epoch": 2.1007614798861627, + "grad_norm": 7.039883613586426, + "learning_rate": 4.353473729612531e-06, + "loss": 1.7735, + "step": 3414 + }, + { + "epoch": 2.1013768171679104, + "grad_norm": 6.652933120727539, + "learning_rate": 4.3479909526666775e-06, + "loss": 1.747, + "step": 3415 + }, + { + "epoch": 2.1019921544496576, + "grad_norm": 5.216039180755615, + "learning_rate": 4.3425106711637575e-06, + "loss": 1.6399, + "step": 3416 + }, + { + "epoch": 2.1026074917314053, + "grad_norm": 6.2535200119018555, + "learning_rate": 4.33703288752339e-06, + "loss": 1.9096, + "step": 3417 + }, + { + "epoch": 2.103222829013153, + "grad_norm": 6.197005748748779, + "learning_rate": 4.331557604164098e-06, + "loss": 1.9063, + "step": 3418 + }, + { + "epoch": 2.1038381662949, + "grad_norm": 6.66378927230835, + "learning_rate": 4.326084823503287e-06, + "loss": 1.8687, + "step": 3419 + }, + { + "epoch": 2.104453503576648, + "grad_norm": 5.811575889587402, + "learning_rate": 4.320614547957278e-06, + "loss": 1.9376, + "step": 3420 + }, + { + "epoch": 2.1050688408583955, + "grad_norm": 5.788993835449219, + "learning_rate": 4.315146779941274e-06, + "loss": 1.6321, + "step": 3421 + }, + { + "epoch": 2.105684178140143, + "grad_norm": 10.83026123046875, + "learning_rate": 4.309681521869371e-06, + "loss": 1.7908, + "step": 3422 + }, + { + "epoch": 2.1062995154218904, + "grad_norm": 6.313831806182861, + "learning_rate": 4.30421877615456e-06, + "loss": 1.8131, + "step": 3423 + }, + { + "epoch": 2.106914852703638, + "grad_norm": 6.941000461578369, + "learning_rate": 4.298758545208722e-06, + "loss": 1.7597, + "step": 3424 + }, + { + "epoch": 2.1075301899853858, + "grad_norm": 6.162196159362793, + "learning_rate": 4.293300831442625e-06, + "loss": 1.853, + "step": 3425 + }, + { + "epoch": 2.1081455272671334, + "grad_norm": 7.969858646392822, + "learning_rate": 4.2878456372659295e-06, + "loss": 1.6045, + "step": 3426 + }, + { + "epoch": 2.1087608645488807, + "grad_norm": 6.1959686279296875, + "learning_rate": 4.282392965087182e-06, + "loss": 1.7688, + "step": 3427 + }, + { + "epoch": 2.1093762018306284, + "grad_norm": 5.879762172698975, + "learning_rate": 4.276942817313814e-06, + "loss": 1.8454, + "step": 3428 + }, + { + "epoch": 2.109991539112376, + "grad_norm": 7.557710647583008, + "learning_rate": 4.271495196352141e-06, + "loss": 1.7262, + "step": 3429 + }, + { + "epoch": 2.1106068763941237, + "grad_norm": 5.557179927825928, + "learning_rate": 4.266050104607377e-06, + "loss": 1.5913, + "step": 3430 + }, + { + "epoch": 2.111222213675871, + "grad_norm": 6.4916558265686035, + "learning_rate": 4.260607544483596e-06, + "loss": 1.6744, + "step": 3431 + }, + { + "epoch": 2.1118375509576186, + "grad_norm": 7.549380302429199, + "learning_rate": 4.255167518383771e-06, + "loss": 1.7086, + "step": 3432 + }, + { + "epoch": 2.1124528882393663, + "grad_norm": 6.396212100982666, + "learning_rate": 4.249730028709746e-06, + "loss": 1.6145, + "step": 3433 + }, + { + "epoch": 2.113068225521114, + "grad_norm": 7.559441566467285, + "learning_rate": 4.244295077862263e-06, + "loss": 1.7983, + "step": 3434 + }, + { + "epoch": 2.113683562802861, + "grad_norm": 6.590290546417236, + "learning_rate": 4.23886266824092e-06, + "loss": 1.5711, + "step": 3435 + }, + { + "epoch": 2.114298900084609, + "grad_norm": 7.195917129516602, + "learning_rate": 4.233432802244203e-06, + "loss": 1.7692, + "step": 3436 + }, + { + "epoch": 2.1149142373663565, + "grad_norm": 6.687677383422852, + "learning_rate": 4.228005482269481e-06, + "loss": 1.7384, + "step": 3437 + }, + { + "epoch": 2.115529574648104, + "grad_norm": 5.699735164642334, + "learning_rate": 4.222580710712998e-06, + "loss": 1.6707, + "step": 3438 + }, + { + "epoch": 2.1161449119298514, + "grad_norm": 6.000125885009766, + "learning_rate": 4.217158489969857e-06, + "loss": 1.7306, + "step": 3439 + }, + { + "epoch": 2.116760249211599, + "grad_norm": 7.834855079650879, + "learning_rate": 4.211738822434047e-06, + "loss": 1.6486, + "step": 3440 + }, + { + "epoch": 2.117375586493347, + "grad_norm": 6.987403869628906, + "learning_rate": 4.2063217104984374e-06, + "loss": 1.7275, + "step": 3441 + }, + { + "epoch": 2.117990923775094, + "grad_norm": 6.23411750793457, + "learning_rate": 4.20090715655476e-06, + "loss": 1.7822, + "step": 3442 + }, + { + "epoch": 2.1186062610568417, + "grad_norm": 6.039050579071045, + "learning_rate": 4.195495162993607e-06, + "loss": 1.7453, + "step": 3443 + }, + { + "epoch": 2.1192215983385894, + "grad_norm": 5.998954772949219, + "learning_rate": 4.190085732204462e-06, + "loss": 1.8972, + "step": 3444 + }, + { + "epoch": 2.119836935620337, + "grad_norm": 6.687854290008545, + "learning_rate": 4.184678866575661e-06, + "loss": 1.8407, + "step": 3445 + }, + { + "epoch": 2.1204522729020843, + "grad_norm": 6.210193634033203, + "learning_rate": 4.179274568494416e-06, + "loss": 1.8996, + "step": 3446 + }, + { + "epoch": 2.121067610183832, + "grad_norm": 6.2494049072265625, + "learning_rate": 4.173872840346798e-06, + "loss": 1.725, + "step": 3447 + }, + { + "epoch": 2.1216829474655796, + "grad_norm": 5.948059558868408, + "learning_rate": 4.1684736845177496e-06, + "loss": 1.7866, + "step": 3448 + }, + { + "epoch": 2.1222982847473273, + "grad_norm": 7.316122531890869, + "learning_rate": 4.1630771033910764e-06, + "loss": 1.683, + "step": 3449 + }, + { + "epoch": 2.1229136220290745, + "grad_norm": 6.441366672515869, + "learning_rate": 4.157683099349444e-06, + "loss": 1.8521, + "step": 3450 + }, + { + "epoch": 2.123528959310822, + "grad_norm": 5.997797012329102, + "learning_rate": 4.152291674774384e-06, + "loss": 1.8644, + "step": 3451 + }, + { + "epoch": 2.12414429659257, + "grad_norm": 6.526950359344482, + "learning_rate": 4.1469028320462854e-06, + "loss": 1.8191, + "step": 3452 + }, + { + "epoch": 2.1247596338743175, + "grad_norm": 7.525435924530029, + "learning_rate": 4.1415165735444016e-06, + "loss": 1.7894, + "step": 3453 + }, + { + "epoch": 2.1253749711560648, + "grad_norm": 6.054096698760986, + "learning_rate": 4.136132901646843e-06, + "loss": 1.9322, + "step": 3454 + }, + { + "epoch": 2.1259903084378124, + "grad_norm": 7.2936248779296875, + "learning_rate": 4.130751818730578e-06, + "loss": 1.8635, + "step": 3455 + }, + { + "epoch": 2.12660564571956, + "grad_norm": 5.032671928405762, + "learning_rate": 4.125373327171428e-06, + "loss": 1.8111, + "step": 3456 + }, + { + "epoch": 2.127220983001308, + "grad_norm": 4.81178092956543, + "learning_rate": 4.119997429344074e-06, + "loss": 1.8891, + "step": 3457 + }, + { + "epoch": 2.127836320283055, + "grad_norm": 5.949984073638916, + "learning_rate": 4.114624127622063e-06, + "loss": 1.8014, + "step": 3458 + }, + { + "epoch": 2.1284516575648027, + "grad_norm": 8.81669807434082, + "learning_rate": 4.109253424377773e-06, + "loss": 1.9095, + "step": 3459 + }, + { + "epoch": 2.1290669948465504, + "grad_norm": 6.261661052703857, + "learning_rate": 4.103885321982448e-06, + "loss": 1.5936, + "step": 3460 + }, + { + "epoch": 2.129682332128298, + "grad_norm": 6.435263633728027, + "learning_rate": 4.098519822806182e-06, + "loss": 1.8852, + "step": 3461 + }, + { + "epoch": 2.1302976694100453, + "grad_norm": 6.7929511070251465, + "learning_rate": 4.093156929217929e-06, + "loss": 1.7089, + "step": 3462 + }, + { + "epoch": 2.130913006691793, + "grad_norm": 6.66023588180542, + "learning_rate": 4.08779664358547e-06, + "loss": 1.8463, + "step": 3463 + }, + { + "epoch": 2.1315283439735406, + "grad_norm": 6.4495673179626465, + "learning_rate": 4.082438968275452e-06, + "loss": 1.8501, + "step": 3464 + }, + { + "epoch": 2.132143681255288, + "grad_norm": 6.101770877838135, + "learning_rate": 4.07708390565337e-06, + "loss": 1.7517, + "step": 3465 + }, + { + "epoch": 2.1327590185370355, + "grad_norm": 5.903330326080322, + "learning_rate": 4.071731458083562e-06, + "loss": 1.751, + "step": 3466 + }, + { + "epoch": 2.133374355818783, + "grad_norm": 7.882909297943115, + "learning_rate": 4.066381627929202e-06, + "loss": 2.0, + "step": 3467 + }, + { + "epoch": 2.133989693100531, + "grad_norm": 8.59914493560791, + "learning_rate": 4.0610344175523175e-06, + "loss": 1.7214, + "step": 3468 + }, + { + "epoch": 2.134605030382278, + "grad_norm": 6.022368907928467, + "learning_rate": 4.0556898293137845e-06, + "loss": 1.868, + "step": 3469 + }, + { + "epoch": 2.135220367664026, + "grad_norm": 6.226086616516113, + "learning_rate": 4.0503478655733146e-06, + "loss": 1.8734, + "step": 3470 + }, + { + "epoch": 2.1358357049457735, + "grad_norm": 6.783802509307861, + "learning_rate": 4.045008528689457e-06, + "loss": 1.6985, + "step": 3471 + }, + { + "epoch": 2.136451042227521, + "grad_norm": 6.034868240356445, + "learning_rate": 4.039671821019609e-06, + "loss": 1.7351, + "step": 3472 + }, + { + "epoch": 2.1370663795092684, + "grad_norm": 5.638566017150879, + "learning_rate": 4.034337744920002e-06, + "loss": 1.8371, + "step": 3473 + }, + { + "epoch": 2.137681716791016, + "grad_norm": 5.741688251495361, + "learning_rate": 4.029006302745706e-06, + "loss": 1.8493, + "step": 3474 + }, + { + "epoch": 2.1382970540727637, + "grad_norm": 6.540286064147949, + "learning_rate": 4.02367749685063e-06, + "loss": 1.7876, + "step": 3475 + }, + { + "epoch": 2.1389123913545114, + "grad_norm": 7.431596279144287, + "learning_rate": 4.0183513295875184e-06, + "loss": 1.8439, + "step": 3476 + }, + { + "epoch": 2.1395277286362586, + "grad_norm": 6.3528828620910645, + "learning_rate": 4.01302780330795e-06, + "loss": 1.7545, + "step": 3477 + }, + { + "epoch": 2.1401430659180063, + "grad_norm": 12.513903617858887, + "learning_rate": 4.00770692036234e-06, + "loss": 1.5672, + "step": 3478 + }, + { + "epoch": 2.140758403199754, + "grad_norm": 8.017230033874512, + "learning_rate": 4.00238868309993e-06, + "loss": 1.7609, + "step": 3479 + }, + { + "epoch": 2.1413737404815016, + "grad_norm": 6.970734119415283, + "learning_rate": 3.997073093868802e-06, + "loss": 1.707, + "step": 3480 + }, + { + "epoch": 2.141989077763249, + "grad_norm": 5.898890018463135, + "learning_rate": 3.9917601550158616e-06, + "loss": 1.7447, + "step": 3481 + }, + { + "epoch": 2.1426044150449965, + "grad_norm": 6.96876335144043, + "learning_rate": 3.986449868886849e-06, + "loss": 1.6927, + "step": 3482 + }, + { + "epoch": 2.143219752326744, + "grad_norm": 6.155405521392822, + "learning_rate": 3.981142237826332e-06, + "loss": 1.987, + "step": 3483 + }, + { + "epoch": 2.1438350896084915, + "grad_norm": 5.8643035888671875, + "learning_rate": 3.975837264177706e-06, + "loss": 1.6284, + "step": 3484 + }, + { + "epoch": 2.144450426890239, + "grad_norm": 6.540576457977295, + "learning_rate": 3.970534950283187e-06, + "loss": 1.9597, + "step": 3485 + }, + { + "epoch": 2.145065764171987, + "grad_norm": 6.038638114929199, + "learning_rate": 3.965235298483835e-06, + "loss": 2.012, + "step": 3486 + }, + { + "epoch": 2.1456811014537345, + "grad_norm": 5.754665851593018, + "learning_rate": 3.959938311119511e-06, + "loss": 1.6595, + "step": 3487 + }, + { + "epoch": 2.1462964387354817, + "grad_norm": 7.253922939300537, + "learning_rate": 3.954643990528914e-06, + "loss": 1.6526, + "step": 3488 + }, + { + "epoch": 2.1469117760172294, + "grad_norm": 6.026256561279297, + "learning_rate": 3.949352339049561e-06, + "loss": 1.7601, + "step": 3489 + }, + { + "epoch": 2.147527113298977, + "grad_norm": 7.472198486328125, + "learning_rate": 3.944063359017799e-06, + "loss": 1.9185, + "step": 3490 + }, + { + "epoch": 2.1481424505807247, + "grad_norm": 5.0746564865112305, + "learning_rate": 3.9387770527687795e-06, + "loss": 1.9318, + "step": 3491 + }, + { + "epoch": 2.148757787862472, + "grad_norm": 6.121618270874023, + "learning_rate": 3.933493422636483e-06, + "loss": 1.597, + "step": 3492 + }, + { + "epoch": 2.1493731251442196, + "grad_norm": 5.739800930023193, + "learning_rate": 3.928212470953714e-06, + "loss": 1.7866, + "step": 3493 + }, + { + "epoch": 2.1499884624259673, + "grad_norm": 8.669281005859375, + "learning_rate": 3.9229342000520855e-06, + "loss": 1.8607, + "step": 3494 + }, + { + "epoch": 2.150603799707715, + "grad_norm": 5.826760292053223, + "learning_rate": 3.917658612262033e-06, + "loss": 1.8306, + "step": 3495 + }, + { + "epoch": 2.151219136989462, + "grad_norm": 9.474946022033691, + "learning_rate": 3.912385709912794e-06, + "loss": 1.753, + "step": 3496 + }, + { + "epoch": 2.15183447427121, + "grad_norm": 5.793565273284912, + "learning_rate": 3.907115495332439e-06, + "loss": 1.8757, + "step": 3497 + }, + { + "epoch": 2.1524498115529576, + "grad_norm": 8.622337341308594, + "learning_rate": 3.901847970847842e-06, + "loss": 1.7299, + "step": 3498 + }, + { + "epoch": 2.1530651488347052, + "grad_norm": 5.495602130889893, + "learning_rate": 3.8965831387846885e-06, + "loss": 1.8963, + "step": 3499 + }, + { + "epoch": 2.1536804861164525, + "grad_norm": 7.140043258666992, + "learning_rate": 3.891321001467478e-06, + "loss": 1.8134, + "step": 3500 + }, + { + "epoch": 2.1542958233982, + "grad_norm": 7.439120769500732, + "learning_rate": 3.886061561219521e-06, + "loss": 1.748, + "step": 3501 + }, + { + "epoch": 2.154911160679948, + "grad_norm": 7.066040992736816, + "learning_rate": 3.880804820362934e-06, + "loss": 1.8305, + "step": 3502 + }, + { + "epoch": 2.155526497961695, + "grad_norm": 6.917507648468018, + "learning_rate": 3.875550781218644e-06, + "loss": 1.8725, + "step": 3503 + }, + { + "epoch": 2.1561418352434427, + "grad_norm": 7.324021339416504, + "learning_rate": 3.870299446106382e-06, + "loss": 1.8762, + "step": 3504 + }, + { + "epoch": 2.1567571725251904, + "grad_norm": 6.353249549865723, + "learning_rate": 3.865050817344692e-06, + "loss": 1.9309, + "step": 3505 + }, + { + "epoch": 2.157372509806938, + "grad_norm": 6.5409836769104, + "learning_rate": 3.859804897250918e-06, + "loss": 1.9084, + "step": 3506 + }, + { + "epoch": 2.1579878470886853, + "grad_norm": 6.432894229888916, + "learning_rate": 3.854561688141205e-06, + "loss": 1.7443, + "step": 3507 + }, + { + "epoch": 2.158603184370433, + "grad_norm": 6.847968578338623, + "learning_rate": 3.8493211923305075e-06, + "loss": 1.6943, + "step": 3508 + }, + { + "epoch": 2.1592185216521806, + "grad_norm": 8.001476287841797, + "learning_rate": 3.844083412132581e-06, + "loss": 1.8986, + "step": 3509 + }, + { + "epoch": 2.1598338589339283, + "grad_norm": 7.7860589027404785, + "learning_rate": 3.838848349859977e-06, + "loss": 1.9288, + "step": 3510 + }, + { + "epoch": 2.1604491962156755, + "grad_norm": 6.573337078094482, + "learning_rate": 3.833616007824054e-06, + "loss": 1.859, + "step": 3511 + }, + { + "epoch": 2.1610645334974232, + "grad_norm": 6.982348442077637, + "learning_rate": 3.828386388334962e-06, + "loss": 1.7955, + "step": 3512 + }, + { + "epoch": 2.161679870779171, + "grad_norm": 7.069790840148926, + "learning_rate": 3.823159493701654e-06, + "loss": 1.9839, + "step": 3513 + }, + { + "epoch": 2.1622952080609186, + "grad_norm": 8.189737319946289, + "learning_rate": 3.817935326231886e-06, + "loss": 1.686, + "step": 3514 + }, + { + "epoch": 2.162910545342666, + "grad_norm": 7.1715168952941895, + "learning_rate": 3.8127138882321937e-06, + "loss": 1.7137, + "step": 3515 + }, + { + "epoch": 2.1635258826244135, + "grad_norm": 7.728034973144531, + "learning_rate": 3.8074951820079186e-06, + "loss": 1.8788, + "step": 3516 + }, + { + "epoch": 2.164141219906161, + "grad_norm": 6.643385410308838, + "learning_rate": 3.802279209863192e-06, + "loss": 1.7314, + "step": 3517 + }, + { + "epoch": 2.164756557187909, + "grad_norm": 5.913739204406738, + "learning_rate": 3.7970659741009473e-06, + "loss": 1.8102, + "step": 3518 + }, + { + "epoch": 2.165371894469656, + "grad_norm": 5.616042613983154, + "learning_rate": 3.791855477022903e-06, + "loss": 1.6161, + "step": 3519 + }, + { + "epoch": 2.1659872317514037, + "grad_norm": 6.841211795806885, + "learning_rate": 3.786647720929557e-06, + "loss": 1.6363, + "step": 3520 + }, + { + "epoch": 2.1666025690331514, + "grad_norm": 10.204116821289062, + "learning_rate": 3.781442708120219e-06, + "loss": 1.4883, + "step": 3521 + }, + { + "epoch": 2.1672179063148986, + "grad_norm": 7.616401672363281, + "learning_rate": 3.776240440892973e-06, + "loss": 1.8757, + "step": 3522 + }, + { + "epoch": 2.1678332435966463, + "grad_norm": 6.198624610900879, + "learning_rate": 3.7710409215446986e-06, + "loss": 1.773, + "step": 3523 + }, + { + "epoch": 2.168448580878394, + "grad_norm": 5.552943706512451, + "learning_rate": 3.7658441523710467e-06, + "loss": 1.8592, + "step": 3524 + }, + { + "epoch": 2.1690639181601417, + "grad_norm": 6.639764785766602, + "learning_rate": 3.760650135666476e-06, + "loss": 1.7552, + "step": 3525 + }, + { + "epoch": 2.169679255441889, + "grad_norm": 6.116837978363037, + "learning_rate": 3.755458873724216e-06, + "loss": 1.6618, + "step": 3526 + }, + { + "epoch": 2.1702945927236366, + "grad_norm": 6.100852012634277, + "learning_rate": 3.750270368836283e-06, + "loss": 1.939, + "step": 3527 + }, + { + "epoch": 2.1709099300053842, + "grad_norm": 6.794033527374268, + "learning_rate": 3.7450846232934768e-06, + "loss": 1.7364, + "step": 3528 + }, + { + "epoch": 2.171525267287132, + "grad_norm": 5.016022682189941, + "learning_rate": 3.7399016393853772e-06, + "loss": 1.7088, + "step": 3529 + }, + { + "epoch": 2.172140604568879, + "grad_norm": 6.446207523345947, + "learning_rate": 3.7347214194003466e-06, + "loss": 1.6803, + "step": 3530 + }, + { + "epoch": 2.172755941850627, + "grad_norm": 6.21454381942749, + "learning_rate": 3.729543965625526e-06, + "loss": 1.8238, + "step": 3531 + }, + { + "epoch": 2.1733712791323745, + "grad_norm": 5.046460151672363, + "learning_rate": 3.7243692803468357e-06, + "loss": 1.7732, + "step": 3532 + }, + { + "epoch": 2.173986616414122, + "grad_norm": 6.583563327789307, + "learning_rate": 3.7191973658489735e-06, + "loss": 1.758, + "step": 3533 + }, + { + "epoch": 2.1746019536958694, + "grad_norm": 6.290155410766602, + "learning_rate": 3.7140282244154136e-06, + "loss": 1.8478, + "step": 3534 + }, + { + "epoch": 2.175217290977617, + "grad_norm": 5.470250606536865, + "learning_rate": 3.7088618583284054e-06, + "loss": 1.7686, + "step": 3535 + }, + { + "epoch": 2.1758326282593647, + "grad_norm": 6.562404632568359, + "learning_rate": 3.7036982698689726e-06, + "loss": 1.8, + "step": 3536 + }, + { + "epoch": 2.1764479655411124, + "grad_norm": 5.468561172485352, + "learning_rate": 3.6985374613169166e-06, + "loss": 1.8256, + "step": 3537 + }, + { + "epoch": 2.1770633028228596, + "grad_norm": 6.803021430969238, + "learning_rate": 3.6933794349508057e-06, + "loss": 1.845, + "step": 3538 + }, + { + "epoch": 2.1776786401046073, + "grad_norm": 7.819682598114014, + "learning_rate": 3.6882241930479824e-06, + "loss": 1.6526, + "step": 3539 + }, + { + "epoch": 2.178293977386355, + "grad_norm": 6.040180683135986, + "learning_rate": 3.6830717378845614e-06, + "loss": 1.9836, + "step": 3540 + }, + { + "epoch": 2.1789093146681022, + "grad_norm": 6.4542107582092285, + "learning_rate": 3.6779220717354204e-06, + "loss": 1.5045, + "step": 3541 + }, + { + "epoch": 2.17952465194985, + "grad_norm": 6.920939922332764, + "learning_rate": 3.6727751968742185e-06, + "loss": 1.8291, + "step": 3542 + }, + { + "epoch": 2.1801399892315976, + "grad_norm": 6.436679363250732, + "learning_rate": 3.6676311155733745e-06, + "loss": 1.7758, + "step": 3543 + }, + { + "epoch": 2.1807553265133452, + "grad_norm": 6.941107749938965, + "learning_rate": 3.662489830104068e-06, + "loss": 1.5626, + "step": 3544 + }, + { + "epoch": 2.1813706637950925, + "grad_norm": 7.556491851806641, + "learning_rate": 3.6573513427362485e-06, + "loss": 1.7638, + "step": 3545 + }, + { + "epoch": 2.18198600107684, + "grad_norm": 5.634767532348633, + "learning_rate": 3.652215655738639e-06, + "loss": 1.6803, + "step": 3546 + }, + { + "epoch": 2.182601338358588, + "grad_norm": 7.94499397277832, + "learning_rate": 3.6470827713787195e-06, + "loss": 1.6748, + "step": 3547 + }, + { + "epoch": 2.1832166756403355, + "grad_norm": 6.499550819396973, + "learning_rate": 3.641952691922721e-06, + "loss": 1.8324, + "step": 3548 + }, + { + "epoch": 2.1838320129220827, + "grad_norm": 5.753168106079102, + "learning_rate": 3.6368254196356576e-06, + "loss": 1.6514, + "step": 3549 + }, + { + "epoch": 2.1844473502038304, + "grad_norm": 5.956106662750244, + "learning_rate": 3.63170095678129e-06, + "loss": 1.7004, + "step": 3550 + }, + { + "epoch": 2.185062687485578, + "grad_norm": 7.406048774719238, + "learning_rate": 3.6265793056221465e-06, + "loss": 1.8522, + "step": 3551 + }, + { + "epoch": 2.1856780247673258, + "grad_norm": 6.147319793701172, + "learning_rate": 3.6214604684194974e-06, + "loss": 1.5487, + "step": 3552 + }, + { + "epoch": 2.186293362049073, + "grad_norm": 6.3816328048706055, + "learning_rate": 3.6163444474333933e-06, + "loss": 1.5987, + "step": 3553 + }, + { + "epoch": 2.1869086993308207, + "grad_norm": 6.2878594398498535, + "learning_rate": 3.611231244922628e-06, + "loss": 1.6732, + "step": 3554 + }, + { + "epoch": 2.1875240366125683, + "grad_norm": 6.863286972045898, + "learning_rate": 3.606120863144753e-06, + "loss": 1.6638, + "step": 3555 + }, + { + "epoch": 2.188139373894316, + "grad_norm": 6.432553768157959, + "learning_rate": 3.6010133043560748e-06, + "loss": 1.8247, + "step": 3556 + }, + { + "epoch": 2.1887547111760632, + "grad_norm": 6.585569858551025, + "learning_rate": 3.5959085708116546e-06, + "loss": 1.7384, + "step": 3557 + }, + { + "epoch": 2.189370048457811, + "grad_norm": 6.46146297454834, + "learning_rate": 3.590806664765305e-06, + "loss": 1.8075, + "step": 3558 + }, + { + "epoch": 2.1899853857395586, + "grad_norm": 5.864133358001709, + "learning_rate": 3.5857075884695915e-06, + "loss": 1.7433, + "step": 3559 + }, + { + "epoch": 2.190600723021306, + "grad_norm": 5.76063871383667, + "learning_rate": 3.5806113441758272e-06, + "loss": 1.8489, + "step": 3560 + }, + { + "epoch": 2.1912160603030535, + "grad_norm": 5.8711652755737305, + "learning_rate": 3.5755179341340772e-06, + "loss": 1.6359, + "step": 3561 + }, + { + "epoch": 2.191831397584801, + "grad_norm": 5.699733257293701, + "learning_rate": 3.570427360593157e-06, + "loss": 1.7338, + "step": 3562 + }, + { + "epoch": 2.192446734866549, + "grad_norm": 7.245506763458252, + "learning_rate": 3.5653396258006266e-06, + "loss": 1.925, + "step": 3563 + }, + { + "epoch": 2.1930620721482965, + "grad_norm": 6.797809600830078, + "learning_rate": 3.5602547320027937e-06, + "loss": 1.9464, + "step": 3564 + }, + { + "epoch": 2.1936774094300437, + "grad_norm": 6.179542541503906, + "learning_rate": 3.555172681444712e-06, + "loss": 1.7246, + "step": 3565 + }, + { + "epoch": 2.1942927467117914, + "grad_norm": 5.481915473937988, + "learning_rate": 3.5500934763701743e-06, + "loss": 1.7539, + "step": 3566 + }, + { + "epoch": 2.194908083993539, + "grad_norm": 6.403146266937256, + "learning_rate": 3.5450171190217364e-06, + "loss": 1.5821, + "step": 3567 + }, + { + "epoch": 2.1955234212752863, + "grad_norm": 6.226749420166016, + "learning_rate": 3.539943611640669e-06, + "loss": 1.7547, + "step": 3568 + }, + { + "epoch": 2.196138758557034, + "grad_norm": 9.456087112426758, + "learning_rate": 3.5348729564669993e-06, + "loss": 2.0131, + "step": 3569 + }, + { + "epoch": 2.1967540958387817, + "grad_norm": 7.174044132232666, + "learning_rate": 3.529805155739502e-06, + "loss": 1.8156, + "step": 3570 + }, + { + "epoch": 2.1973694331205293, + "grad_norm": 5.694392681121826, + "learning_rate": 3.524740211695683e-06, + "loss": 1.809, + "step": 3571 + }, + { + "epoch": 2.1979847704022766, + "grad_norm": 6.33433198928833, + "learning_rate": 3.5196781265717807e-06, + "loss": 1.8072, + "step": 3572 + }, + { + "epoch": 2.1986001076840243, + "grad_norm": 6.174037456512451, + "learning_rate": 3.5146189026027787e-06, + "loss": 1.7992, + "step": 3573 + }, + { + "epoch": 2.199215444965772, + "grad_norm": 8.35564136505127, + "learning_rate": 3.509562542022402e-06, + "loss": 1.7617, + "step": 3574 + }, + { + "epoch": 2.1998307822475196, + "grad_norm": 5.940443515777588, + "learning_rate": 3.504509047063108e-06, + "loss": 1.7662, + "step": 3575 + }, + { + "epoch": 2.200446119529267, + "grad_norm": 6.390726089477539, + "learning_rate": 3.4994584199560767e-06, + "loss": 1.6963, + "step": 3576 + }, + { + "epoch": 2.2010614568110145, + "grad_norm": 5.901885986328125, + "learning_rate": 3.4944106629312423e-06, + "loss": 1.7411, + "step": 3577 + }, + { + "epoch": 2.201676794092762, + "grad_norm": 9.327081680297852, + "learning_rate": 3.4893657782172573e-06, + "loss": 1.784, + "step": 3578 + }, + { + "epoch": 2.20229213137451, + "grad_norm": 6.029989719390869, + "learning_rate": 3.4843237680415153e-06, + "loss": 1.835, + "step": 3579 + }, + { + "epoch": 2.202907468656257, + "grad_norm": 6.3562140464782715, + "learning_rate": 3.479284634630126e-06, + "loss": 1.9905, + "step": 3580 + }, + { + "epoch": 2.2035228059380048, + "grad_norm": 7.316710948944092, + "learning_rate": 3.4742483802079473e-06, + "loss": 1.5314, + "step": 3581 + }, + { + "epoch": 2.2041381432197524, + "grad_norm": 6.60170316696167, + "learning_rate": 3.4692150069985563e-06, + "loss": 1.7767, + "step": 3582 + }, + { + "epoch": 2.2047534805015, + "grad_norm": 6.728901386260986, + "learning_rate": 3.4641845172242573e-06, + "loss": 1.7168, + "step": 3583 + }, + { + "epoch": 2.2053688177832473, + "grad_norm": 6.748176574707031, + "learning_rate": 3.4591569131060855e-06, + "loss": 1.6778, + "step": 3584 + }, + { + "epoch": 2.205984155064995, + "grad_norm": 7.140573978424072, + "learning_rate": 3.4541321968637995e-06, + "loss": 1.6806, + "step": 3585 + }, + { + "epoch": 2.2065994923467427, + "grad_norm": 6.539532661437988, + "learning_rate": 3.4491103707158833e-06, + "loss": 1.9815, + "step": 3586 + }, + { + "epoch": 2.20721482962849, + "grad_norm": 6.89892578125, + "learning_rate": 3.444091436879545e-06, + "loss": 1.8372, + "step": 3587 + }, + { + "epoch": 2.2078301669102376, + "grad_norm": 5.535845756530762, + "learning_rate": 3.439075397570716e-06, + "loss": 1.6449, + "step": 3588 + }, + { + "epoch": 2.2084455041919853, + "grad_norm": 8.870833396911621, + "learning_rate": 3.434062255004049e-06, + "loss": 1.6506, + "step": 3589 + }, + { + "epoch": 2.209060841473733, + "grad_norm": 5.792304039001465, + "learning_rate": 3.429052011392916e-06, + "loss": 1.8304, + "step": 3590 + }, + { + "epoch": 2.20967617875548, + "grad_norm": 7.25160551071167, + "learning_rate": 3.42404466894942e-06, + "loss": 1.5745, + "step": 3591 + }, + { + "epoch": 2.210291516037228, + "grad_norm": 6.2864532470703125, + "learning_rate": 3.4190402298843637e-06, + "loss": 1.7586, + "step": 3592 + }, + { + "epoch": 2.2109068533189755, + "grad_norm": 6.978004455566406, + "learning_rate": 3.414038696407285e-06, + "loss": 1.9302, + "step": 3593 + }, + { + "epoch": 2.211522190600723, + "grad_norm": 6.313823223114014, + "learning_rate": 3.409040070726427e-06, + "loss": 1.874, + "step": 3594 + }, + { + "epoch": 2.2121375278824704, + "grad_norm": 5.9382476806640625, + "learning_rate": 3.4040443550487645e-06, + "loss": 1.8486, + "step": 3595 + }, + { + "epoch": 2.212752865164218, + "grad_norm": 7.547611713409424, + "learning_rate": 3.3990515515799684e-06, + "loss": 1.8259, + "step": 3596 + }, + { + "epoch": 2.2133682024459658, + "grad_norm": 5.784145832061768, + "learning_rate": 3.3940616625244314e-06, + "loss": 1.6947, + "step": 3597 + }, + { + "epoch": 2.2139835397277134, + "grad_norm": 6.75522518157959, + "learning_rate": 3.389074690085269e-06, + "loss": 1.7459, + "step": 3598 + }, + { + "epoch": 2.2145988770094607, + "grad_norm": 6.079753875732422, + "learning_rate": 3.384090636464301e-06, + "loss": 1.779, + "step": 3599 + }, + { + "epoch": 2.2152142142912084, + "grad_norm": 5.456742763519287, + "learning_rate": 3.3791095038620515e-06, + "loss": 1.6551, + "step": 3600 + }, + { + "epoch": 2.215829551572956, + "grad_norm": 6.7210469245910645, + "learning_rate": 3.3741312944777616e-06, + "loss": 1.8151, + "step": 3601 + }, + { + "epoch": 2.2164448888547037, + "grad_norm": 5.989696025848389, + "learning_rate": 3.369156010509389e-06, + "loss": 1.8396, + "step": 3602 + }, + { + "epoch": 2.217060226136451, + "grad_norm": 6.40372371673584, + "learning_rate": 3.364183654153592e-06, + "loss": 1.8801, + "step": 3603 + }, + { + "epoch": 2.2176755634181986, + "grad_norm": 6.006094932556152, + "learning_rate": 3.359214227605728e-06, + "loss": 2.0045, + "step": 3604 + }, + { + "epoch": 2.2182909006999463, + "grad_norm": 6.012567043304443, + "learning_rate": 3.3542477330598812e-06, + "loss": 1.6968, + "step": 3605 + }, + { + "epoch": 2.2189062379816935, + "grad_norm": 6.145164489746094, + "learning_rate": 3.349284172708823e-06, + "loss": 1.7409, + "step": 3606 + }, + { + "epoch": 2.219521575263441, + "grad_norm": 6.608831405639648, + "learning_rate": 3.344323548744044e-06, + "loss": 1.886, + "step": 3607 + }, + { + "epoch": 2.220136912545189, + "grad_norm": 5.355971336364746, + "learning_rate": 3.339365863355718e-06, + "loss": 1.9366, + "step": 3608 + }, + { + "epoch": 2.2207522498269365, + "grad_norm": 5.926422595977783, + "learning_rate": 3.334411118732744e-06, + "loss": 1.7481, + "step": 3609 + }, + { + "epoch": 2.2213675871086838, + "grad_norm": 6.289839267730713, + "learning_rate": 3.329459317062712e-06, + "loss": 1.5998, + "step": 3610 + }, + { + "epoch": 2.2219829243904314, + "grad_norm": 7.545709609985352, + "learning_rate": 3.32451046053191e-06, + "loss": 1.7778, + "step": 3611 + }, + { + "epoch": 2.222598261672179, + "grad_norm": 6.085036277770996, + "learning_rate": 3.3195645513253316e-06, + "loss": 1.7954, + "step": 3612 + }, + { + "epoch": 2.223213598953927, + "grad_norm": 6.377877712249756, + "learning_rate": 3.3146215916266643e-06, + "loss": 1.7974, + "step": 3613 + }, + { + "epoch": 2.223828936235674, + "grad_norm": 5.7712788581848145, + "learning_rate": 3.309681583618298e-06, + "loss": 1.8366, + "step": 3614 + }, + { + "epoch": 2.2244442735174217, + "grad_norm": 6.167764186859131, + "learning_rate": 3.304744529481315e-06, + "loss": 1.8536, + "step": 3615 + }, + { + "epoch": 2.2250596107991694, + "grad_norm": 6.189918041229248, + "learning_rate": 3.299810431395497e-06, + "loss": 1.7302, + "step": 3616 + }, + { + "epoch": 2.225674948080917, + "grad_norm": 5.513488292694092, + "learning_rate": 3.294879291539317e-06, + "loss": 1.5158, + "step": 3617 + }, + { + "epoch": 2.2262902853626643, + "grad_norm": 7.9205169677734375, + "learning_rate": 3.289951112089943e-06, + "loss": 1.8258, + "step": 3618 + }, + { + "epoch": 2.226905622644412, + "grad_norm": 6.432737827301025, + "learning_rate": 3.285025895223244e-06, + "loss": 1.8973, + "step": 3619 + }, + { + "epoch": 2.2275209599261596, + "grad_norm": 5.832976818084717, + "learning_rate": 3.2801036431137658e-06, + "loss": 1.6155, + "step": 3620 + }, + { + "epoch": 2.2281362972079073, + "grad_norm": 5.828737735748291, + "learning_rate": 3.2751843579347562e-06, + "loss": 2.0181, + "step": 3621 + }, + { + "epoch": 2.2287516344896545, + "grad_norm": 6.051152229309082, + "learning_rate": 3.2702680418581467e-06, + "loss": 1.9455, + "step": 3622 + }, + { + "epoch": 2.229366971771402, + "grad_norm": 8.533212661743164, + "learning_rate": 3.26535469705457e-06, + "loss": 1.7366, + "step": 3623 + }, + { + "epoch": 2.22998230905315, + "grad_norm": 6.595724582672119, + "learning_rate": 3.2604443256933307e-06, + "loss": 1.7451, + "step": 3624 + }, + { + "epoch": 2.230597646334897, + "grad_norm": 7.864799976348877, + "learning_rate": 3.2555369299424254e-06, + "loss": 1.7124, + "step": 3625 + }, + { + "epoch": 2.2312129836166448, + "grad_norm": 8.006080627441406, + "learning_rate": 3.2506325119685476e-06, + "loss": 1.7801, + "step": 3626 + }, + { + "epoch": 2.2318283208983924, + "grad_norm": 7.001082420349121, + "learning_rate": 3.2457310739370684e-06, + "loss": 2.0109, + "step": 3627 + }, + { + "epoch": 2.23244365818014, + "grad_norm": 5.0442585945129395, + "learning_rate": 3.2408326180120354e-06, + "loss": 1.7639, + "step": 3628 + }, + { + "epoch": 2.2330589954618874, + "grad_norm": 7.506595134735107, + "learning_rate": 3.235937146356187e-06, + "loss": 1.8398, + "step": 3629 + }, + { + "epoch": 2.233674332743635, + "grad_norm": 5.65180778503418, + "learning_rate": 3.231044661130951e-06, + "loss": 1.6252, + "step": 3630 + }, + { + "epoch": 2.2342896700253827, + "grad_norm": 5.887121200561523, + "learning_rate": 3.2261551644964305e-06, + "loss": 1.8705, + "step": 3631 + }, + { + "epoch": 2.2349050073071304, + "grad_norm": 7.835142612457275, + "learning_rate": 3.2212686586113973e-06, + "loss": 1.8733, + "step": 3632 + }, + { + "epoch": 2.2355203445888776, + "grad_norm": 6.161251068115234, + "learning_rate": 3.216385145633324e-06, + "loss": 1.6033, + "step": 3633 + }, + { + "epoch": 2.2361356818706253, + "grad_norm": 5.923214912414551, + "learning_rate": 3.211504627718347e-06, + "loss": 1.8485, + "step": 3634 + }, + { + "epoch": 2.236751019152373, + "grad_norm": 6.26141881942749, + "learning_rate": 3.2066271070212873e-06, + "loss": 1.8833, + "step": 3635 + }, + { + "epoch": 2.2373663564341206, + "grad_norm": 7.397851943969727, + "learning_rate": 3.201752585695638e-06, + "loss": 1.4471, + "step": 3636 + }, + { + "epoch": 2.237981693715868, + "grad_norm": 6.174269676208496, + "learning_rate": 3.196881065893571e-06, + "loss": 1.8734, + "step": 3637 + }, + { + "epoch": 2.2385970309976155, + "grad_norm": 7.373813629150391, + "learning_rate": 3.1920125497659327e-06, + "loss": 1.5465, + "step": 3638 + }, + { + "epoch": 2.239212368279363, + "grad_norm": 5.276606559753418, + "learning_rate": 3.1871470394622407e-06, + "loss": 1.6898, + "step": 3639 + }, + { + "epoch": 2.239827705561111, + "grad_norm": 6.629005432128906, + "learning_rate": 3.1822845371306897e-06, + "loss": 1.8113, + "step": 3640 + }, + { + "epoch": 2.240443042842858, + "grad_norm": 6.154078960418701, + "learning_rate": 3.1774250449181423e-06, + "loss": 1.8701, + "step": 3641 + }, + { + "epoch": 2.241058380124606, + "grad_norm": 4.946253299713135, + "learning_rate": 3.1725685649701354e-06, + "loss": 1.6458, + "step": 3642 + }, + { + "epoch": 2.2416737174063535, + "grad_norm": 5.715054035186768, + "learning_rate": 3.167715099430874e-06, + "loss": 1.8525, + "step": 3643 + }, + { + "epoch": 2.2422890546881007, + "grad_norm": 7.052640914916992, + "learning_rate": 3.1628646504432304e-06, + "loss": 1.7703, + "step": 3644 + }, + { + "epoch": 2.2429043919698484, + "grad_norm": 5.21620512008667, + "learning_rate": 3.158017220148751e-06, + "loss": 1.8303, + "step": 3645 + }, + { + "epoch": 2.243519729251596, + "grad_norm": 6.243503093719482, + "learning_rate": 3.1531728106876404e-06, + "loss": 1.8953, + "step": 3646 + }, + { + "epoch": 2.2441350665333437, + "grad_norm": 6.015785217285156, + "learning_rate": 3.148331424198784e-06, + "loss": 1.9882, + "step": 3647 + }, + { + "epoch": 2.244750403815091, + "grad_norm": 6.24507999420166, + "learning_rate": 3.1434930628197145e-06, + "loss": 1.8344, + "step": 3648 + }, + { + "epoch": 2.2453657410968386, + "grad_norm": 7.677884101867676, + "learning_rate": 3.1386577286866414e-06, + "loss": 1.8544, + "step": 3649 + }, + { + "epoch": 2.2459810783785863, + "grad_norm": 5.427766799926758, + "learning_rate": 3.13382542393443e-06, + "loss": 1.6295, + "step": 3650 + }, + { + "epoch": 2.246596415660334, + "grad_norm": 5.684226036071777, + "learning_rate": 3.1289961506966217e-06, + "loss": 1.7688, + "step": 3651 + }, + { + "epoch": 2.247211752942081, + "grad_norm": 6.135219097137451, + "learning_rate": 3.1241699111054004e-06, + "loss": 1.7245, + "step": 3652 + }, + { + "epoch": 2.247827090223829, + "grad_norm": 6.711691379547119, + "learning_rate": 3.1193467072916195e-06, + "loss": 1.5502, + "step": 3653 + }, + { + "epoch": 2.2484424275055765, + "grad_norm": 7.4002366065979, + "learning_rate": 3.114526541384799e-06, + "loss": 1.7065, + "step": 3654 + }, + { + "epoch": 2.249057764787324, + "grad_norm": 8.1985445022583, + "learning_rate": 3.1097094155131122e-06, + "loss": 1.6704, + "step": 3655 + }, + { + "epoch": 2.2496731020690715, + "grad_norm": 5.932138919830322, + "learning_rate": 3.1048953318033826e-06, + "loss": 1.7557, + "step": 3656 + }, + { + "epoch": 2.250288439350819, + "grad_norm": 6.9598236083984375, + "learning_rate": 3.100084292381095e-06, + "loss": 1.8767, + "step": 3657 + }, + { + "epoch": 2.250903776632567, + "grad_norm": 8.547708511352539, + "learning_rate": 3.095276299370401e-06, + "loss": 1.7565, + "step": 3658 + }, + { + "epoch": 2.2515191139143145, + "grad_norm": 6.709585666656494, + "learning_rate": 3.0904713548940936e-06, + "loss": 1.8131, + "step": 3659 + }, + { + "epoch": 2.2521344511960617, + "grad_norm": 6.788082122802734, + "learning_rate": 3.085669461073626e-06, + "loss": 1.7839, + "step": 3660 + }, + { + "epoch": 2.2527497884778094, + "grad_norm": 6.1273088455200195, + "learning_rate": 3.0808706200291017e-06, + "loss": 1.6444, + "step": 3661 + }, + { + "epoch": 2.253365125759557, + "grad_norm": 6.626688480377197, + "learning_rate": 3.076074833879279e-06, + "loss": 1.7624, + "step": 3662 + }, + { + "epoch": 2.2539804630413043, + "grad_norm": 6.440664768218994, + "learning_rate": 3.0712821047415655e-06, + "loss": 1.5917, + "step": 3663 + }, + { + "epoch": 2.254595800323052, + "grad_norm": 8.34347152709961, + "learning_rate": 3.0664924347320203e-06, + "loss": 1.9212, + "step": 3664 + }, + { + "epoch": 2.2552111376047996, + "grad_norm": 6.346433162689209, + "learning_rate": 3.06170582596535e-06, + "loss": 1.6575, + "step": 3665 + }, + { + "epoch": 2.2558264748865473, + "grad_norm": 6.335967063903809, + "learning_rate": 3.0569222805549114e-06, + "loss": 1.9098, + "step": 3666 + }, + { + "epoch": 2.256441812168295, + "grad_norm": 7.397572994232178, + "learning_rate": 3.052141800612709e-06, + "loss": 1.7021, + "step": 3667 + }, + { + "epoch": 2.257057149450042, + "grad_norm": 5.89297342300415, + "learning_rate": 3.047364388249393e-06, + "loss": 1.6917, + "step": 3668 + }, + { + "epoch": 2.25767248673179, + "grad_norm": 7.93205451965332, + "learning_rate": 3.0425900455742584e-06, + "loss": 1.6747, + "step": 3669 + }, + { + "epoch": 2.2582878240135376, + "grad_norm": 5.942864894866943, + "learning_rate": 3.037818774695247e-06, + "loss": 1.7254, + "step": 3670 + }, + { + "epoch": 2.258903161295285, + "grad_norm": 6.207610607147217, + "learning_rate": 3.0330505777189433e-06, + "loss": 1.9111, + "step": 3671 + }, + { + "epoch": 2.2595184985770325, + "grad_norm": 5.82026481628418, + "learning_rate": 3.0282854567505737e-06, + "loss": 1.8914, + "step": 3672 + }, + { + "epoch": 2.26013383585878, + "grad_norm": 6.619354248046875, + "learning_rate": 3.0235234138940073e-06, + "loss": 1.653, + "step": 3673 + }, + { + "epoch": 2.260749173140528, + "grad_norm": 6.292153835296631, + "learning_rate": 3.0187644512517523e-06, + "loss": 1.9105, + "step": 3674 + }, + { + "epoch": 2.261364510422275, + "grad_norm": 7.377420425415039, + "learning_rate": 3.0140085709249666e-06, + "loss": 1.8288, + "step": 3675 + }, + { + "epoch": 2.2619798477040227, + "grad_norm": 6.426965713500977, + "learning_rate": 3.009255775013431e-06, + "loss": 1.7651, + "step": 3676 + }, + { + "epoch": 2.2625951849857704, + "grad_norm": 5.71284818649292, + "learning_rate": 3.0045060656155766e-06, + "loss": 1.7866, + "step": 3677 + }, + { + "epoch": 2.263210522267518, + "grad_norm": 5.922414302825928, + "learning_rate": 2.9997594448284638e-06, + "loss": 1.7261, + "step": 3678 + }, + { + "epoch": 2.2638258595492653, + "grad_norm": 6.192420959472656, + "learning_rate": 2.9950159147478043e-06, + "loss": 1.7647, + "step": 3679 + }, + { + "epoch": 2.264441196831013, + "grad_norm": 6.754461765289307, + "learning_rate": 2.9902754774679233e-06, + "loss": 1.7745, + "step": 3680 + }, + { + "epoch": 2.2650565341127606, + "grad_norm": 6.319611549377441, + "learning_rate": 2.9855381350817937e-06, + "loss": 1.6683, + "step": 3681 + }, + { + "epoch": 2.265671871394508, + "grad_norm": 6.763113021850586, + "learning_rate": 2.9808038896810254e-06, + "loss": 1.962, + "step": 3682 + }, + { + "epoch": 2.2662872086762555, + "grad_norm": 5.713286399841309, + "learning_rate": 2.976072743355852e-06, + "loss": 1.7872, + "step": 3683 + }, + { + "epoch": 2.2669025459580032, + "grad_norm": 5.80593729019165, + "learning_rate": 2.9713446981951466e-06, + "loss": 1.6728, + "step": 3684 + }, + { + "epoch": 2.267517883239751, + "grad_norm": 6.149264812469482, + "learning_rate": 2.9666197562863973e-06, + "loss": 1.7782, + "step": 3685 + }, + { + "epoch": 2.2681332205214986, + "grad_norm": 7.4233503341674805, + "learning_rate": 2.961897919715746e-06, + "loss": 1.7242, + "step": 3686 + }, + { + "epoch": 2.268748557803246, + "grad_norm": 5.545602321624756, + "learning_rate": 2.957179190567947e-06, + "loss": 1.7393, + "step": 3687 + }, + { + "epoch": 2.2693638950849935, + "grad_norm": 8.051424026489258, + "learning_rate": 2.952463570926385e-06, + "loss": 1.756, + "step": 3688 + }, + { + "epoch": 2.269979232366741, + "grad_norm": 8.511178016662598, + "learning_rate": 2.9477510628730755e-06, + "loss": 1.7671, + "step": 3689 + }, + { + "epoch": 2.2705945696484884, + "grad_norm": 6.2318549156188965, + "learning_rate": 2.9430416684886575e-06, + "loss": 1.547, + "step": 3690 + }, + { + "epoch": 2.271209906930236, + "grad_norm": 7.308364391326904, + "learning_rate": 2.938335389852397e-06, + "loss": 1.8241, + "step": 3691 + }, + { + "epoch": 2.2718252442119837, + "grad_norm": 6.385648250579834, + "learning_rate": 2.9336322290421815e-06, + "loss": 1.8609, + "step": 3692 + }, + { + "epoch": 2.2724405814937314, + "grad_norm": 6.321068286895752, + "learning_rate": 2.9289321881345257e-06, + "loss": 1.7146, + "step": 3693 + }, + { + "epoch": 2.2730559187754786, + "grad_norm": 6.648191452026367, + "learning_rate": 2.9242352692045638e-06, + "loss": 1.7273, + "step": 3694 + }, + { + "epoch": 2.2736712560572263, + "grad_norm": 6.506444931030273, + "learning_rate": 2.9195414743260544e-06, + "loss": 1.6886, + "step": 3695 + }, + { + "epoch": 2.274286593338974, + "grad_norm": 5.748836517333984, + "learning_rate": 2.9148508055713733e-06, + "loss": 1.6371, + "step": 3696 + }, + { + "epoch": 2.2749019306207217, + "grad_norm": 5.698553562164307, + "learning_rate": 2.9101632650115185e-06, + "loss": 1.7984, + "step": 3697 + }, + { + "epoch": 2.275517267902469, + "grad_norm": 5.5597100257873535, + "learning_rate": 2.9054788547161063e-06, + "loss": 1.731, + "step": 3698 + }, + { + "epoch": 2.2761326051842166, + "grad_norm": 7.217710018157959, + "learning_rate": 2.9007975767533714e-06, + "loss": 1.7623, + "step": 3699 + }, + { + "epoch": 2.2767479424659642, + "grad_norm": 6.321074962615967, + "learning_rate": 2.896119433190164e-06, + "loss": 1.8569, + "step": 3700 + }, + { + "epoch": 2.2773632797477115, + "grad_norm": 6.37348747253418, + "learning_rate": 2.8914444260919526e-06, + "loss": 1.8656, + "step": 3701 + }, + { + "epoch": 2.277978617029459, + "grad_norm": 6.9500908851623535, + "learning_rate": 2.8867725575228166e-06, + "loss": 1.7581, + "step": 3702 + }, + { + "epoch": 2.278593954311207, + "grad_norm": 5.929502010345459, + "learning_rate": 2.882103829545462e-06, + "loss": 1.7223, + "step": 3703 + }, + { + "epoch": 2.2792092915929545, + "grad_norm": 5.938424587249756, + "learning_rate": 2.877438244221189e-06, + "loss": 1.8017, + "step": 3704 + }, + { + "epoch": 2.279824628874702, + "grad_norm": 6.125020980834961, + "learning_rate": 2.872775803609924e-06, + "loss": 1.6885, + "step": 3705 + }, + { + "epoch": 2.2804399661564494, + "grad_norm": 6.000967502593994, + "learning_rate": 2.868116509770198e-06, + "loss": 1.8502, + "step": 3706 + }, + { + "epoch": 2.281055303438197, + "grad_norm": 7.100013732910156, + "learning_rate": 2.863460364759163e-06, + "loss": 1.7896, + "step": 3707 + }, + { + "epoch": 2.2816706407199447, + "grad_norm": 7.048813819885254, + "learning_rate": 2.8588073706325726e-06, + "loss": 1.8627, + "step": 3708 + }, + { + "epoch": 2.282285978001692, + "grad_norm": 6.2647833824157715, + "learning_rate": 2.854157529444781e-06, + "loss": 1.7778, + "step": 3709 + }, + { + "epoch": 2.2829013152834396, + "grad_norm": 7.2511210441589355, + "learning_rate": 2.8495108432487707e-06, + "loss": 1.825, + "step": 3710 + }, + { + "epoch": 2.2835166525651873, + "grad_norm": 7.265307903289795, + "learning_rate": 2.844867314096115e-06, + "loss": 1.5151, + "step": 3711 + }, + { + "epoch": 2.284131989846935, + "grad_norm": 7.44661283493042, + "learning_rate": 2.8402269440370025e-06, + "loss": 1.7188, + "step": 3712 + }, + { + "epoch": 2.2847473271286822, + "grad_norm": 6.224918365478516, + "learning_rate": 2.835589735120213e-06, + "loss": 1.861, + "step": 3713 + }, + { + "epoch": 2.28536266441043, + "grad_norm": 6.208709239959717, + "learning_rate": 2.8309556893931522e-06, + "loss": 1.8374, + "step": 3714 + }, + { + "epoch": 2.2859780016921776, + "grad_norm": 7.478386878967285, + "learning_rate": 2.8263248089018116e-06, + "loss": 1.7284, + "step": 3715 + }, + { + "epoch": 2.2865933389739252, + "grad_norm": 6.710507869720459, + "learning_rate": 2.821697095690793e-06, + "loss": 1.7056, + "step": 3716 + }, + { + "epoch": 2.2872086762556725, + "grad_norm": 6.288590431213379, + "learning_rate": 2.8170725518032983e-06, + "loss": 1.7721, + "step": 3717 + }, + { + "epoch": 2.28782401353742, + "grad_norm": 9.303751945495605, + "learning_rate": 2.812451179281129e-06, + "loss": 1.926, + "step": 3718 + }, + { + "epoch": 2.288439350819168, + "grad_norm": 6.187589168548584, + "learning_rate": 2.8078329801646876e-06, + "loss": 1.747, + "step": 3719 + }, + { + "epoch": 2.2890546881009155, + "grad_norm": 5.700725078582764, + "learning_rate": 2.803217956492974e-06, + "loss": 1.6877, + "step": 3720 + }, + { + "epoch": 2.2896700253826627, + "grad_norm": 6.127201080322266, + "learning_rate": 2.798606110303589e-06, + "loss": 1.7653, + "step": 3721 + }, + { + "epoch": 2.2902853626644104, + "grad_norm": 8.023165702819824, + "learning_rate": 2.7939974436327277e-06, + "loss": 1.8238, + "step": 3722 + }, + { + "epoch": 2.290900699946158, + "grad_norm": 6.18065071105957, + "learning_rate": 2.789391958515183e-06, + "loss": 1.7571, + "step": 3723 + }, + { + "epoch": 2.2915160372279058, + "grad_norm": 7.043874740600586, + "learning_rate": 2.7847896569843422e-06, + "loss": 1.8179, + "step": 3724 + }, + { + "epoch": 2.292131374509653, + "grad_norm": 8.25831413269043, + "learning_rate": 2.7801905410721875e-06, + "loss": 2.0141, + "step": 3725 + }, + { + "epoch": 2.2927467117914007, + "grad_norm": 6.850857257843018, + "learning_rate": 2.7755946128092935e-06, + "loss": 1.718, + "step": 3726 + }, + { + "epoch": 2.2933620490731483, + "grad_norm": 5.67594575881958, + "learning_rate": 2.77100187422483e-06, + "loss": 1.9865, + "step": 3727 + }, + { + "epoch": 2.2939773863548956, + "grad_norm": 5.94866418838501, + "learning_rate": 2.766412327346556e-06, + "loss": 1.8358, + "step": 3728 + }, + { + "epoch": 2.2945927236366432, + "grad_norm": 6.939367294311523, + "learning_rate": 2.7618259742008226e-06, + "loss": 1.9153, + "step": 3729 + }, + { + "epoch": 2.295208060918391, + "grad_norm": 5.86646842956543, + "learning_rate": 2.7572428168125654e-06, + "loss": 1.7845, + "step": 3730 + }, + { + "epoch": 2.2958233982001386, + "grad_norm": 6.012625217437744, + "learning_rate": 2.7526628572053227e-06, + "loss": 1.5808, + "step": 3731 + }, + { + "epoch": 2.296438735481886, + "grad_norm": 6.366248607635498, + "learning_rate": 2.74808609740121e-06, + "loss": 1.594, + "step": 3732 + }, + { + "epoch": 2.2970540727636335, + "grad_norm": 8.346105575561523, + "learning_rate": 2.7435125394209283e-06, + "loss": 1.9464, + "step": 3733 + }, + { + "epoch": 2.297669410045381, + "grad_norm": 6.041011333465576, + "learning_rate": 2.7389421852837685e-06, + "loss": 1.7685, + "step": 3734 + }, + { + "epoch": 2.298284747327129, + "grad_norm": 7.01996374130249, + "learning_rate": 2.7343750370076127e-06, + "loss": 1.7615, + "step": 3735 + }, + { + "epoch": 2.298900084608876, + "grad_norm": 5.985525608062744, + "learning_rate": 2.729811096608922e-06, + "loss": 1.8342, + "step": 3736 + }, + { + "epoch": 2.2995154218906237, + "grad_norm": 5.67297887802124, + "learning_rate": 2.725250366102733e-06, + "loss": 1.7786, + "step": 3737 + }, + { + "epoch": 2.3001307591723714, + "grad_norm": 6.503062725067139, + "learning_rate": 2.7206928475026828e-06, + "loss": 1.585, + "step": 3738 + }, + { + "epoch": 2.300746096454119, + "grad_norm": 7.320706844329834, + "learning_rate": 2.7161385428209773e-06, + "loss": 1.9162, + "step": 3739 + }, + { + "epoch": 2.3013614337358663, + "grad_norm": 5.595752716064453, + "learning_rate": 2.7115874540684106e-06, + "loss": 1.9542, + "step": 3740 + }, + { + "epoch": 2.301976771017614, + "grad_norm": 6.344609260559082, + "learning_rate": 2.707039583254346e-06, + "loss": 1.7576, + "step": 3741 + }, + { + "epoch": 2.3025921082993617, + "grad_norm": 6.704031467437744, + "learning_rate": 2.7024949323867398e-06, + "loss": 1.8256, + "step": 3742 + }, + { + "epoch": 2.3032074455811093, + "grad_norm": 5.959109783172607, + "learning_rate": 2.6979535034721195e-06, + "loss": 1.9601, + "step": 3743 + }, + { + "epoch": 2.3038227828628566, + "grad_norm": 7.418654918670654, + "learning_rate": 2.6934152985155904e-06, + "loss": 1.8721, + "step": 3744 + }, + { + "epoch": 2.3044381201446043, + "grad_norm": 6.419052600860596, + "learning_rate": 2.6888803195208355e-06, + "loss": 1.7806, + "step": 3745 + }, + { + "epoch": 2.305053457426352, + "grad_norm": 5.873384952545166, + "learning_rate": 2.6843485684901118e-06, + "loss": 1.8668, + "step": 3746 + }, + { + "epoch": 2.305668794708099, + "grad_norm": 6.376941680908203, + "learning_rate": 2.679820047424253e-06, + "loss": 1.7871, + "step": 3747 + }, + { + "epoch": 2.306284131989847, + "grad_norm": 8.54140853881836, + "learning_rate": 2.6752947583226665e-06, + "loss": 1.6923, + "step": 3748 + }, + { + "epoch": 2.3068994692715945, + "grad_norm": 6.022964954376221, + "learning_rate": 2.6707727031833306e-06, + "loss": 1.727, + "step": 3749 + }, + { + "epoch": 2.307514806553342, + "grad_norm": 6.821130275726318, + "learning_rate": 2.6662538840027984e-06, + "loss": 1.8196, + "step": 3750 + }, + { + "epoch": 2.30813014383509, + "grad_norm": 7.283360958099365, + "learning_rate": 2.6617383027761944e-06, + "loss": 1.7976, + "step": 3751 + }, + { + "epoch": 2.308745481116837, + "grad_norm": 5.7682204246521, + "learning_rate": 2.6572259614972107e-06, + "loss": 1.8502, + "step": 3752 + }, + { + "epoch": 2.3093608183985848, + "grad_norm": 6.652133941650391, + "learning_rate": 2.6527168621581114e-06, + "loss": 1.7931, + "step": 3753 + }, + { + "epoch": 2.3099761556803324, + "grad_norm": 7.051815986633301, + "learning_rate": 2.648211006749728e-06, + "loss": 1.7101, + "step": 3754 + }, + { + "epoch": 2.3105914929620797, + "grad_norm": 7.458127498626709, + "learning_rate": 2.6437083972614575e-06, + "loss": 1.6203, + "step": 3755 + }, + { + "epoch": 2.3112068302438273, + "grad_norm": 5.850643157958984, + "learning_rate": 2.639209035681276e-06, + "loss": 1.8203, + "step": 3756 + }, + { + "epoch": 2.311822167525575, + "grad_norm": 6.622825622558594, + "learning_rate": 2.6347129239957058e-06, + "loss": 1.7708, + "step": 3757 + }, + { + "epoch": 2.3124375048073227, + "grad_norm": 6.224090099334717, + "learning_rate": 2.6302200641898444e-06, + "loss": 1.8812, + "step": 3758 + }, + { + "epoch": 2.31305284208907, + "grad_norm": 5.363020420074463, + "learning_rate": 2.625730458247362e-06, + "loss": 1.8411, + "step": 3759 + }, + { + "epoch": 2.3136681793708176, + "grad_norm": 5.361053466796875, + "learning_rate": 2.6212441081504812e-06, + "loss": 2.0094, + "step": 3760 + }, + { + "epoch": 2.3142835166525653, + "grad_norm": 8.73542594909668, + "learning_rate": 2.616761015879986e-06, + "loss": 1.7944, + "step": 3761 + }, + { + "epoch": 2.314898853934313, + "grad_norm": 6.817964553833008, + "learning_rate": 2.6122811834152253e-06, + "loss": 1.7715, + "step": 3762 + }, + { + "epoch": 2.31551419121606, + "grad_norm": 5.750774383544922, + "learning_rate": 2.6078046127341138e-06, + "loss": 1.677, + "step": 3763 + }, + { + "epoch": 2.316129528497808, + "grad_norm": 6.230947494506836, + "learning_rate": 2.6033313058131238e-06, + "loss": 1.8602, + "step": 3764 + }, + { + "epoch": 2.3167448657795555, + "grad_norm": 6.308515548706055, + "learning_rate": 2.5988612646272735e-06, + "loss": 1.706, + "step": 3765 + }, + { + "epoch": 2.3173602030613027, + "grad_norm": 6.189061164855957, + "learning_rate": 2.5943944911501596e-06, + "loss": 1.8146, + "step": 3766 + }, + { + "epoch": 2.3179755403430504, + "grad_norm": 6.451207160949707, + "learning_rate": 2.5899309873539245e-06, + "loss": 1.6692, + "step": 3767 + }, + { + "epoch": 2.318590877624798, + "grad_norm": 8.471429824829102, + "learning_rate": 2.5854707552092705e-06, + "loss": 1.7836, + "step": 3768 + }, + { + "epoch": 2.3192062149065458, + "grad_norm": 6.858111381530762, + "learning_rate": 2.5810137966854466e-06, + "loss": 1.7395, + "step": 3769 + }, + { + "epoch": 2.3198215521882934, + "grad_norm": 6.74513053894043, + "learning_rate": 2.576560113750272e-06, + "loss": 1.7931, + "step": 3770 + }, + { + "epoch": 2.3204368894700407, + "grad_norm": 5.220582485198975, + "learning_rate": 2.5721097083701085e-06, + "loss": 1.6926, + "step": 3771 + }, + { + "epoch": 2.3210522267517883, + "grad_norm": 6.685003280639648, + "learning_rate": 2.567662582509873e-06, + "loss": 1.6478, + "step": 3772 + }, + { + "epoch": 2.321667564033536, + "grad_norm": 6.895858287811279, + "learning_rate": 2.5632187381330366e-06, + "loss": 1.7739, + "step": 3773 + }, + { + "epoch": 2.3222829013152833, + "grad_norm": 6.475818157196045, + "learning_rate": 2.5587781772016196e-06, + "loss": 1.561, + "step": 3774 + }, + { + "epoch": 2.322898238597031, + "grad_norm": 6.819135665893555, + "learning_rate": 2.5543409016761935e-06, + "loss": 1.7532, + "step": 3775 + }, + { + "epoch": 2.3235135758787786, + "grad_norm": 7.109718322753906, + "learning_rate": 2.549906913515878e-06, + "loss": 1.732, + "step": 3776 + }, + { + "epoch": 2.3241289131605263, + "grad_norm": 6.486110210418701, + "learning_rate": 2.545476214678344e-06, + "loss": 1.9557, + "step": 3777 + }, + { + "epoch": 2.3247442504422735, + "grad_norm": 5.904752254486084, + "learning_rate": 2.541048807119807e-06, + "loss": 1.5983, + "step": 3778 + }, + { + "epoch": 2.325359587724021, + "grad_norm": 7.526213645935059, + "learning_rate": 2.5366246927950287e-06, + "loss": 1.8004, + "step": 3779 + }, + { + "epoch": 2.325974925005769, + "grad_norm": 6.928223133087158, + "learning_rate": 2.5322038736573273e-06, + "loss": 1.7259, + "step": 3780 + }, + { + "epoch": 2.3265902622875165, + "grad_norm": 5.784457683563232, + "learning_rate": 2.52778635165855e-06, + "loss": 1.8171, + "step": 3781 + }, + { + "epoch": 2.3272055995692638, + "grad_norm": 5.650483131408691, + "learning_rate": 2.5233721287490977e-06, + "loss": 1.7714, + "step": 3782 + }, + { + "epoch": 2.3278209368510114, + "grad_norm": 6.473956108093262, + "learning_rate": 2.518961206877911e-06, + "loss": 1.7839, + "step": 3783 + }, + { + "epoch": 2.328436274132759, + "grad_norm": 7.030654430389404, + "learning_rate": 2.5145535879924832e-06, + "loss": 1.8086, + "step": 3784 + }, + { + "epoch": 2.3290516114145063, + "grad_norm": 6.655571937561035, + "learning_rate": 2.5101492740388324e-06, + "loss": 1.7645, + "step": 3785 + }, + { + "epoch": 2.329666948696254, + "grad_norm": 6.215628623962402, + "learning_rate": 2.5057482669615263e-06, + "loss": 1.7438, + "step": 3786 + }, + { + "epoch": 2.3302822859780017, + "grad_norm": 6.603691101074219, + "learning_rate": 2.5013505687036787e-06, + "loss": 1.6821, + "step": 3787 + }, + { + "epoch": 2.3308976232597494, + "grad_norm": 6.393417835235596, + "learning_rate": 2.4969561812069367e-06, + "loss": 1.7226, + "step": 3788 + }, + { + "epoch": 2.331512960541497, + "grad_norm": 7.320276260375977, + "learning_rate": 2.4925651064114788e-06, + "loss": 1.7468, + "step": 3789 + }, + { + "epoch": 2.3321282978232443, + "grad_norm": 6.0905842781066895, + "learning_rate": 2.4881773462560267e-06, + "loss": 1.9429, + "step": 3790 + }, + { + "epoch": 2.332743635104992, + "grad_norm": 8.111294746398926, + "learning_rate": 2.483792902677847e-06, + "loss": 1.8017, + "step": 3791 + }, + { + "epoch": 2.3333589723867396, + "grad_norm": 5.692609786987305, + "learning_rate": 2.4794117776127337e-06, + "loss": 1.5232, + "step": 3792 + }, + { + "epoch": 2.333974309668487, + "grad_norm": 5.399902820587158, + "learning_rate": 2.475033972995006e-06, + "loss": 1.7843, + "step": 3793 + }, + { + "epoch": 2.3345896469502345, + "grad_norm": 5.963212490081787, + "learning_rate": 2.4706594907575377e-06, + "loss": 1.6922, + "step": 3794 + }, + { + "epoch": 2.335204984231982, + "grad_norm": 6.127442836761475, + "learning_rate": 2.466288332831722e-06, + "loss": 1.6662, + "step": 3795 + }, + { + "epoch": 2.33582032151373, + "grad_norm": 7.2954840660095215, + "learning_rate": 2.461920501147491e-06, + "loss": 1.8237, + "step": 3796 + }, + { + "epoch": 2.336435658795477, + "grad_norm": 7.388814926147461, + "learning_rate": 2.4575559976332964e-06, + "loss": 1.6686, + "step": 3797 + }, + { + "epoch": 2.3370509960772248, + "grad_norm": 6.276309013366699, + "learning_rate": 2.4531948242161365e-06, + "loss": 1.8823, + "step": 3798 + }, + { + "epoch": 2.3376663333589724, + "grad_norm": 6.827302932739258, + "learning_rate": 2.4488369828215286e-06, + "loss": 1.89, + "step": 3799 + }, + { + "epoch": 2.33828167064072, + "grad_norm": 6.6297101974487305, + "learning_rate": 2.4444824753735243e-06, + "loss": 1.8633, + "step": 3800 + }, + { + "epoch": 2.3388970079224674, + "grad_norm": 7.052006244659424, + "learning_rate": 2.4401313037946993e-06, + "loss": 1.8805, + "step": 3801 + }, + { + "epoch": 2.339512345204215, + "grad_norm": 9.630632400512695, + "learning_rate": 2.4357834700061588e-06, + "loss": 1.9975, + "step": 3802 + }, + { + "epoch": 2.3401276824859627, + "grad_norm": 5.8609137535095215, + "learning_rate": 2.4314389759275334e-06, + "loss": 2.0529, + "step": 3803 + }, + { + "epoch": 2.34074301976771, + "grad_norm": 6.186786651611328, + "learning_rate": 2.4270978234769794e-06, + "loss": 1.7546, + "step": 3804 + }, + { + "epoch": 2.3413583570494576, + "grad_norm": 6.255424976348877, + "learning_rate": 2.4227600145711784e-06, + "loss": 1.8632, + "step": 3805 + }, + { + "epoch": 2.3419736943312053, + "grad_norm": 6.022165775299072, + "learning_rate": 2.4184255511253342e-06, + "loss": 2.0336, + "step": 3806 + }, + { + "epoch": 2.342589031612953, + "grad_norm": 6.864768028259277, + "learning_rate": 2.4140944350531714e-06, + "loss": 1.7331, + "step": 3807 + }, + { + "epoch": 2.3432043688947006, + "grad_norm": 5.97355842590332, + "learning_rate": 2.4097666682669496e-06, + "loss": 1.787, + "step": 3808 + }, + { + "epoch": 2.343819706176448, + "grad_norm": 5.743616104125977, + "learning_rate": 2.4054422526774314e-06, + "loss": 1.6589, + "step": 3809 + }, + { + "epoch": 2.3444350434581955, + "grad_norm": 5.520223140716553, + "learning_rate": 2.401121190193909e-06, + "loss": 1.879, + "step": 3810 + }, + { + "epoch": 2.345050380739943, + "grad_norm": 6.9200897216796875, + "learning_rate": 2.3968034827241926e-06, + "loss": 1.9578, + "step": 3811 + }, + { + "epoch": 2.3456657180216904, + "grad_norm": 6.864612102508545, + "learning_rate": 2.392489132174619e-06, + "loss": 1.5208, + "step": 3812 + }, + { + "epoch": 2.346281055303438, + "grad_norm": 7.895955562591553, + "learning_rate": 2.3881781404500284e-06, + "loss": 1.7099, + "step": 3813 + }, + { + "epoch": 2.346896392585186, + "grad_norm": 5.198067665100098, + "learning_rate": 2.383870509453785e-06, + "loss": 1.6383, + "step": 3814 + }, + { + "epoch": 2.3475117298669335, + "grad_norm": 6.294742584228516, + "learning_rate": 2.379566241087774e-06, + "loss": 1.7876, + "step": 3815 + }, + { + "epoch": 2.3481270671486807, + "grad_norm": 6.531567096710205, + "learning_rate": 2.3752653372523937e-06, + "loss": 1.721, + "step": 3816 + }, + { + "epoch": 2.3487424044304284, + "grad_norm": 6.795579433441162, + "learning_rate": 2.3709677998465476e-06, + "loss": 1.5957, + "step": 3817 + }, + { + "epoch": 2.349357741712176, + "grad_norm": 5.831236362457275, + "learning_rate": 2.366673630767661e-06, + "loss": 1.7262, + "step": 3818 + }, + { + "epoch": 2.3499730789939237, + "grad_norm": 7.038916110992432, + "learning_rate": 2.362382831911675e-06, + "loss": 2.0022, + "step": 3819 + }, + { + "epoch": 2.350588416275671, + "grad_norm": 5.7848801612854, + "learning_rate": 2.3580954051730396e-06, + "loss": 1.8742, + "step": 3820 + }, + { + "epoch": 2.3512037535574186, + "grad_norm": 9.13796329498291, + "learning_rate": 2.3538113524447083e-06, + "loss": 1.8449, + "step": 3821 + }, + { + "epoch": 2.3518190908391663, + "grad_norm": 5.5253143310546875, + "learning_rate": 2.349530675618158e-06, + "loss": 1.8027, + "step": 3822 + }, + { + "epoch": 2.352434428120914, + "grad_norm": 5.416574954986572, + "learning_rate": 2.345253376583366e-06, + "loss": 1.9211, + "step": 3823 + }, + { + "epoch": 2.353049765402661, + "grad_norm": 6.821833610534668, + "learning_rate": 2.3409794572288226e-06, + "loss": 1.6331, + "step": 3824 + }, + { + "epoch": 2.353665102684409, + "grad_norm": 6.470282077789307, + "learning_rate": 2.3367089194415226e-06, + "loss": 1.6863, + "step": 3825 + }, + { + "epoch": 2.3542804399661565, + "grad_norm": 6.352871894836426, + "learning_rate": 2.332441765106972e-06, + "loss": 1.8439, + "step": 3826 + }, + { + "epoch": 2.354895777247904, + "grad_norm": 7.408091068267822, + "learning_rate": 2.3281779961091777e-06, + "loss": 2.1232, + "step": 3827 + }, + { + "epoch": 2.3555111145296515, + "grad_norm": 7.6368021965026855, + "learning_rate": 2.3239176143306576e-06, + "loss": 1.6579, + "step": 3828 + }, + { + "epoch": 2.356126451811399, + "grad_norm": 6.235464096069336, + "learning_rate": 2.3196606216524286e-06, + "loss": 1.8371, + "step": 3829 + }, + { + "epoch": 2.356741789093147, + "grad_norm": 5.495739459991455, + "learning_rate": 2.315407019954016e-06, + "loss": 1.7017, + "step": 3830 + }, + { + "epoch": 2.357357126374894, + "grad_norm": 5.398624420166016, + "learning_rate": 2.311156811113444e-06, + "loss": 1.5815, + "step": 3831 + }, + { + "epoch": 2.3579724636566417, + "grad_norm": 5.687252998352051, + "learning_rate": 2.306909997007243e-06, + "loss": 1.8501, + "step": 3832 + }, + { + "epoch": 2.3585878009383894, + "grad_norm": 6.640395641326904, + "learning_rate": 2.30266657951044e-06, + "loss": 1.8681, + "step": 3833 + }, + { + "epoch": 2.359203138220137, + "grad_norm": 6.705996513366699, + "learning_rate": 2.298426560496566e-06, + "loss": 1.8377, + "step": 3834 + }, + { + "epoch": 2.3598184755018843, + "grad_norm": 7.987049579620361, + "learning_rate": 2.294189941837647e-06, + "loss": 1.8528, + "step": 3835 + }, + { + "epoch": 2.360433812783632, + "grad_norm": 5.153289794921875, + "learning_rate": 2.2899567254042187e-06, + "loss": 1.7066, + "step": 3836 + }, + { + "epoch": 2.3610491500653796, + "grad_norm": 5.79223108291626, + "learning_rate": 2.2857269130652983e-06, + "loss": 1.7868, + "step": 3837 + }, + { + "epoch": 2.3616644873471273, + "grad_norm": 5.690798282623291, + "learning_rate": 2.281500506688412e-06, + "loss": 1.9008, + "step": 3838 + }, + { + "epoch": 2.3622798246288745, + "grad_norm": 6.2251458168029785, + "learning_rate": 2.277277508139576e-06, + "loss": 1.8033, + "step": 3839 + }, + { + "epoch": 2.362895161910622, + "grad_norm": 5.639513969421387, + "learning_rate": 2.2730579192833124e-06, + "loss": 1.7778, + "step": 3840 + }, + { + "epoch": 2.36351049919237, + "grad_norm": 6.252905368804932, + "learning_rate": 2.2688417419826212e-06, + "loss": 1.6499, + "step": 3841 + }, + { + "epoch": 2.3641258364741176, + "grad_norm": 6.102060794830322, + "learning_rate": 2.2646289780990073e-06, + "loss": 1.8105, + "step": 3842 + }, + { + "epoch": 2.364741173755865, + "grad_norm": 6.109442234039307, + "learning_rate": 2.2604196294924696e-06, + "loss": 1.9639, + "step": 3843 + }, + { + "epoch": 2.3653565110376125, + "grad_norm": 5.857573986053467, + "learning_rate": 2.2562136980214977e-06, + "loss": 1.8782, + "step": 3844 + }, + { + "epoch": 2.36597184831936, + "grad_norm": 6.468822002410889, + "learning_rate": 2.2520111855430625e-06, + "loss": 1.9219, + "step": 3845 + }, + { + "epoch": 2.366587185601108, + "grad_norm": 6.070867538452148, + "learning_rate": 2.2478120939126357e-06, + "loss": 1.905, + "step": 3846 + }, + { + "epoch": 2.367202522882855, + "grad_norm": 5.997870922088623, + "learning_rate": 2.2436164249841806e-06, + "loss": 1.9176, + "step": 3847 + }, + { + "epoch": 2.3678178601646027, + "grad_norm": 7.132257461547852, + "learning_rate": 2.2394241806101425e-06, + "loss": 1.633, + "step": 3848 + }, + { + "epoch": 2.3684331974463504, + "grad_norm": 5.999013900756836, + "learning_rate": 2.235235362641458e-06, + "loss": 1.7299, + "step": 3849 + }, + { + "epoch": 2.3690485347280976, + "grad_norm": 6.429450035095215, + "learning_rate": 2.23104997292755e-06, + "loss": 1.7339, + "step": 3850 + }, + { + "epoch": 2.3696638720098453, + "grad_norm": 5.886294841766357, + "learning_rate": 2.226868013316328e-06, + "loss": 2.0273, + "step": 3851 + }, + { + "epoch": 2.370279209291593, + "grad_norm": 6.059146404266357, + "learning_rate": 2.222689485654187e-06, + "loss": 1.8722, + "step": 3852 + }, + { + "epoch": 2.3708945465733406, + "grad_norm": 5.988180160522461, + "learning_rate": 2.218514391786006e-06, + "loss": 1.6321, + "step": 3853 + }, + { + "epoch": 2.3715098838550883, + "grad_norm": 7.237381935119629, + "learning_rate": 2.2143427335551505e-06, + "loss": 1.7375, + "step": 3854 + }, + { + "epoch": 2.3721252211368355, + "grad_norm": 6.233421325683594, + "learning_rate": 2.2101745128034657e-06, + "loss": 1.7939, + "step": 3855 + }, + { + "epoch": 2.3727405584185832, + "grad_norm": 7.4597063064575195, + "learning_rate": 2.206009731371281e-06, + "loss": 1.8582, + "step": 3856 + }, + { + "epoch": 2.373355895700331, + "grad_norm": 6.687112331390381, + "learning_rate": 2.2018483910974077e-06, + "loss": 1.6898, + "step": 3857 + }, + { + "epoch": 2.373971232982078, + "grad_norm": 6.468547344207764, + "learning_rate": 2.1976904938191366e-06, + "loss": 1.6449, + "step": 3858 + }, + { + "epoch": 2.374586570263826, + "grad_norm": 5.048986911773682, + "learning_rate": 2.1935360413722397e-06, + "loss": 1.9457, + "step": 3859 + }, + { + "epoch": 2.3752019075455735, + "grad_norm": 5.848048210144043, + "learning_rate": 2.189385035590966e-06, + "loss": 1.7188, + "step": 3860 + }, + { + "epoch": 2.375817244827321, + "grad_norm": 5.771446228027344, + "learning_rate": 2.1852374783080443e-06, + "loss": 1.5754, + "step": 3861 + }, + { + "epoch": 2.3764325821090684, + "grad_norm": 6.357528209686279, + "learning_rate": 2.181093371354681e-06, + "loss": 1.8968, + "step": 3862 + }, + { + "epoch": 2.377047919390816, + "grad_norm": 5.246901988983154, + "learning_rate": 2.1769527165605563e-06, + "loss": 1.8523, + "step": 3863 + }, + { + "epoch": 2.3776632566725637, + "grad_norm": 5.976222515106201, + "learning_rate": 2.1728155157538367e-06, + "loss": 1.5917, + "step": 3864 + }, + { + "epoch": 2.3782785939543114, + "grad_norm": 5.98018217086792, + "learning_rate": 2.168681770761146e-06, + "loss": 1.8125, + "step": 3865 + }, + { + "epoch": 2.3788939312360586, + "grad_norm": 5.766759395599365, + "learning_rate": 2.1645514834075955e-06, + "loss": 1.8212, + "step": 3866 + }, + { + "epoch": 2.3795092685178063, + "grad_norm": 6.205371379852295, + "learning_rate": 2.160424655516764e-06, + "loss": 1.813, + "step": 3867 + }, + { + "epoch": 2.380124605799554, + "grad_norm": 6.291733264923096, + "learning_rate": 2.1563012889107125e-06, + "loss": 1.66, + "step": 3868 + }, + { + "epoch": 2.380739943081301, + "grad_norm": 6.449747085571289, + "learning_rate": 2.152181385409958e-06, + "loss": 1.6595, + "step": 3869 + }, + { + "epoch": 2.381355280363049, + "grad_norm": 6.19244909286499, + "learning_rate": 2.148064946833498e-06, + "loss": 1.7315, + "step": 3870 + }, + { + "epoch": 2.3819706176447966, + "grad_norm": 6.6307597160339355, + "learning_rate": 2.1439519749988045e-06, + "loss": 1.9727, + "step": 3871 + }, + { + "epoch": 2.3825859549265442, + "grad_norm": 7.1495466232299805, + "learning_rate": 2.139842471721809e-06, + "loss": 2.0217, + "step": 3872 + }, + { + "epoch": 2.383201292208292, + "grad_norm": 7.43621826171875, + "learning_rate": 2.135736438816921e-06, + "loss": 2.031, + "step": 3873 + }, + { + "epoch": 2.383816629490039, + "grad_norm": 5.78187370300293, + "learning_rate": 2.1316338780970026e-06, + "loss": 1.7181, + "step": 3874 + }, + { + "epoch": 2.384431966771787, + "grad_norm": 5.630723476409912, + "learning_rate": 2.1275347913734023e-06, + "loss": 1.8001, + "step": 3875 + }, + { + "epoch": 2.3850473040535345, + "grad_norm": 6.5691022872924805, + "learning_rate": 2.123439180455924e-06, + "loss": 1.8104, + "step": 3876 + }, + { + "epoch": 2.3856626413352817, + "grad_norm": 6.988039016723633, + "learning_rate": 2.1193470471528364e-06, + "loss": 1.6988, + "step": 3877 + }, + { + "epoch": 2.3862779786170294, + "grad_norm": 6.226778030395508, + "learning_rate": 2.115258393270876e-06, + "loss": 1.7802, + "step": 3878 + }, + { + "epoch": 2.386893315898777, + "grad_norm": 7.566223621368408, + "learning_rate": 2.1111732206152424e-06, + "loss": 1.8738, + "step": 3879 + }, + { + "epoch": 2.3875086531805247, + "grad_norm": 6.730465412139893, + "learning_rate": 2.1070915309895967e-06, + "loss": 1.8576, + "step": 3880 + }, + { + "epoch": 2.388123990462272, + "grad_norm": 8.035234451293945, + "learning_rate": 2.103013326196064e-06, + "loss": 1.6956, + "step": 3881 + }, + { + "epoch": 2.3887393277440196, + "grad_norm": 6.703423500061035, + "learning_rate": 2.0989386080352304e-06, + "loss": 1.8563, + "step": 3882 + }, + { + "epoch": 2.3893546650257673, + "grad_norm": 6.480556964874268, + "learning_rate": 2.0948673783061424e-06, + "loss": 1.6263, + "step": 3883 + }, + { + "epoch": 2.389970002307515, + "grad_norm": 8.01681900024414, + "learning_rate": 2.0907996388063056e-06, + "loss": 1.7935, + "step": 3884 + }, + { + "epoch": 2.3905853395892622, + "grad_norm": 6.662353038787842, + "learning_rate": 2.086735391331686e-06, + "loss": 1.9042, + "step": 3885 + }, + { + "epoch": 2.39120067687101, + "grad_norm": 5.407891273498535, + "learning_rate": 2.082674637676706e-06, + "loss": 1.9048, + "step": 3886 + }, + { + "epoch": 2.3918160141527576, + "grad_norm": 6.3840718269348145, + "learning_rate": 2.0786173796342468e-06, + "loss": 1.9369, + "step": 3887 + }, + { + "epoch": 2.392431351434505, + "grad_norm": 4.803826808929443, + "learning_rate": 2.0745636189956465e-06, + "loss": 1.8093, + "step": 3888 + }, + { + "epoch": 2.3930466887162525, + "grad_norm": 5.879518985748291, + "learning_rate": 2.0705133575506976e-06, + "loss": 1.6966, + "step": 3889 + }, + { + "epoch": 2.393662025998, + "grad_norm": 6.618448257446289, + "learning_rate": 2.0664665970876496e-06, + "loss": 1.6871, + "step": 3890 + }, + { + "epoch": 2.394277363279748, + "grad_norm": 6.575707912445068, + "learning_rate": 2.0624233393932024e-06, + "loss": 1.8116, + "step": 3891 + }, + { + "epoch": 2.3948927005614955, + "grad_norm": 7.48482084274292, + "learning_rate": 2.058383586252519e-06, + "loss": 1.9381, + "step": 3892 + }, + { + "epoch": 2.3955080378432427, + "grad_norm": 7.564310550689697, + "learning_rate": 2.0543473394492017e-06, + "loss": 1.7756, + "step": 3893 + }, + { + "epoch": 2.3961233751249904, + "grad_norm": 6.897456645965576, + "learning_rate": 2.050314600765313e-06, + "loss": 1.6777, + "step": 3894 + }, + { + "epoch": 2.396738712406738, + "grad_norm": 5.86969518661499, + "learning_rate": 2.0462853719813624e-06, + "loss": 1.8074, + "step": 3895 + }, + { + "epoch": 2.3973540496884853, + "grad_norm": 6.516570091247559, + "learning_rate": 2.0422596548763173e-06, + "loss": 1.6752, + "step": 3896 + }, + { + "epoch": 2.397969386970233, + "grad_norm": 6.31430721282959, + "learning_rate": 2.038237451227589e-06, + "loss": 1.6775, + "step": 3897 + }, + { + "epoch": 2.3985847242519807, + "grad_norm": 7.033838272094727, + "learning_rate": 2.034218762811032e-06, + "loss": 1.5318, + "step": 3898 + }, + { + "epoch": 2.3992000615337283, + "grad_norm": 6.915124893188477, + "learning_rate": 2.03020359140096e-06, + "loss": 1.7282, + "step": 3899 + }, + { + "epoch": 2.3998153988154756, + "grad_norm": 5.415728569030762, + "learning_rate": 2.026191938770128e-06, + "loss": 2.0203, + "step": 3900 + }, + { + "epoch": 2.4004307360972232, + "grad_norm": 12.032731056213379, + "learning_rate": 2.0221838066897404e-06, + "loss": 1.7904, + "step": 3901 + }, + { + "epoch": 2.401046073378971, + "grad_norm": 5.962379455566406, + "learning_rate": 2.018179196929436e-06, + "loss": 1.6708, + "step": 3902 + }, + { + "epoch": 2.4016614106607186, + "grad_norm": 5.8238115310668945, + "learning_rate": 2.0141781112573155e-06, + "loss": 1.835, + "step": 3903 + }, + { + "epoch": 2.402276747942466, + "grad_norm": 6.458102226257324, + "learning_rate": 2.0101805514399143e-06, + "loss": 1.7639, + "step": 3904 + }, + { + "epoch": 2.4028920852242135, + "grad_norm": 5.033464431762695, + "learning_rate": 2.0061865192422115e-06, + "loss": 1.8887, + "step": 3905 + }, + { + "epoch": 2.403507422505961, + "grad_norm": 6.53121280670166, + "learning_rate": 2.002196016427629e-06, + "loss": 1.7561, + "step": 3906 + }, + { + "epoch": 2.4041227597877084, + "grad_norm": 7.234846591949463, + "learning_rate": 1.9982090447580305e-06, + "loss": 1.9489, + "step": 3907 + }, + { + "epoch": 2.404738097069456, + "grad_norm": 8.920746803283691, + "learning_rate": 1.9942256059937225e-06, + "loss": 2.0007, + "step": 3908 + }, + { + "epoch": 2.4053534343512037, + "grad_norm": 6.770198345184326, + "learning_rate": 1.9902457018934496e-06, + "loss": 1.6753, + "step": 3909 + }, + { + "epoch": 2.4059687716329514, + "grad_norm": 8.049036026000977, + "learning_rate": 1.9862693342143966e-06, + "loss": 1.9862, + "step": 3910 + }, + { + "epoch": 2.406584108914699, + "grad_norm": 6.544292449951172, + "learning_rate": 1.9822965047121854e-06, + "loss": 1.7908, + "step": 3911 + }, + { + "epoch": 2.4071994461964463, + "grad_norm": 7.758020877838135, + "learning_rate": 1.9783272151408805e-06, + "loss": 1.5886, + "step": 3912 + }, + { + "epoch": 2.407814783478194, + "grad_norm": 6.1880059242248535, + "learning_rate": 1.9743614672529763e-06, + "loss": 1.7598, + "step": 3913 + }, + { + "epoch": 2.4084301207599417, + "grad_norm": 7.211880207061768, + "learning_rate": 1.9703992627994097e-06, + "loss": 1.8665, + "step": 3914 + }, + { + "epoch": 2.409045458041689, + "grad_norm": 5.789796352386475, + "learning_rate": 1.9664406035295493e-06, + "loss": 1.9423, + "step": 3915 + }, + { + "epoch": 2.4096607953234366, + "grad_norm": 6.388701915740967, + "learning_rate": 1.9624854911912006e-06, + "loss": 1.7612, + "step": 3916 + }, + { + "epoch": 2.4102761326051843, + "grad_norm": 5.608272552490234, + "learning_rate": 1.9585339275306025e-06, + "loss": 1.8455, + "step": 3917 + }, + { + "epoch": 2.410891469886932, + "grad_norm": 5.272069454193115, + "learning_rate": 1.954585914292426e-06, + "loss": 1.8519, + "step": 3918 + }, + { + "epoch": 2.411506807168679, + "grad_norm": 6.665712833404541, + "learning_rate": 1.950641453219775e-06, + "loss": 1.7566, + "step": 3919 + }, + { + "epoch": 2.412122144450427, + "grad_norm": 5.712517738342285, + "learning_rate": 1.9467005460541877e-06, + "loss": 1.8316, + "step": 3920 + }, + { + "epoch": 2.4127374817321745, + "grad_norm": 7.117879390716553, + "learning_rate": 1.942763194535633e-06, + "loss": 1.9722, + "step": 3921 + }, + { + "epoch": 2.413352819013922, + "grad_norm": 6.0778021812438965, + "learning_rate": 1.9388294004025023e-06, + "loss": 1.6944, + "step": 3922 + }, + { + "epoch": 2.4139681562956694, + "grad_norm": 5.861502647399902, + "learning_rate": 1.934899165391623e-06, + "loss": 1.6656, + "step": 3923 + }, + { + "epoch": 2.414583493577417, + "grad_norm": 6.507849216461182, + "learning_rate": 1.930972491238253e-06, + "loss": 1.8158, + "step": 3924 + }, + { + "epoch": 2.4151988308591648, + "grad_norm": 5.417990684509277, + "learning_rate": 1.927049379676078e-06, + "loss": 1.7744, + "step": 3925 + }, + { + "epoch": 2.415814168140912, + "grad_norm": 6.015923500061035, + "learning_rate": 1.9231298324371982e-06, + "loss": 1.65, + "step": 3926 + }, + { + "epoch": 2.4164295054226597, + "grad_norm": 6.269348621368408, + "learning_rate": 1.9192138512521585e-06, + "loss": 1.8214, + "step": 3927 + }, + { + "epoch": 2.4170448427044073, + "grad_norm": 6.467380046844482, + "learning_rate": 1.915301437849917e-06, + "loss": 1.7631, + "step": 3928 + }, + { + "epoch": 2.417660179986155, + "grad_norm": 7.180147171020508, + "learning_rate": 1.911392593957865e-06, + "loss": 1.6514, + "step": 3929 + }, + { + "epoch": 2.4182755172679027, + "grad_norm": 6.196415424346924, + "learning_rate": 1.907487321301802e-06, + "loss": 1.7991, + "step": 3930 + }, + { + "epoch": 2.41889085454965, + "grad_norm": 5.849812984466553, + "learning_rate": 1.9035856216059724e-06, + "loss": 1.5571, + "step": 3931 + }, + { + "epoch": 2.4195061918313976, + "grad_norm": 5.849486827850342, + "learning_rate": 1.899687496593029e-06, + "loss": 1.7422, + "step": 3932 + }, + { + "epoch": 2.4201215291131453, + "grad_norm": 5.54823112487793, + "learning_rate": 1.8957929479840497e-06, + "loss": 1.721, + "step": 3933 + }, + { + "epoch": 2.4207368663948925, + "grad_norm": 6.892828941345215, + "learning_rate": 1.8919019774985336e-06, + "loss": 1.765, + "step": 3934 + }, + { + "epoch": 2.42135220367664, + "grad_norm": 6.099164009094238, + "learning_rate": 1.8880145868543997e-06, + "loss": 1.8186, + "step": 3935 + }, + { + "epoch": 2.421967540958388, + "grad_norm": 8.31813907623291, + "learning_rate": 1.8841307777679874e-06, + "loss": 1.7429, + "step": 3936 + }, + { + "epoch": 2.4225828782401355, + "grad_norm": 6.027548313140869, + "learning_rate": 1.8802505519540525e-06, + "loss": 2.1488, + "step": 3937 + }, + { + "epoch": 2.4231982155218827, + "grad_norm": 7.048625469207764, + "learning_rate": 1.8763739111257718e-06, + "loss": 1.9509, + "step": 3938 + }, + { + "epoch": 2.4238135528036304, + "grad_norm": 6.1500701904296875, + "learning_rate": 1.8725008569947366e-06, + "loss": 1.7993, + "step": 3939 + }, + { + "epoch": 2.424428890085378, + "grad_norm": 6.025331020355225, + "learning_rate": 1.8686313912709564e-06, + "loss": 1.8498, + "step": 3940 + }, + { + "epoch": 2.4250442273671258, + "grad_norm": 6.6982855796813965, + "learning_rate": 1.8647655156628552e-06, + "loss": 1.7755, + "step": 3941 + }, + { + "epoch": 2.425659564648873, + "grad_norm": 6.244536876678467, + "learning_rate": 1.8609032318772735e-06, + "loss": 1.7909, + "step": 3942 + }, + { + "epoch": 2.4262749019306207, + "grad_norm": 6.8660125732421875, + "learning_rate": 1.8570445416194639e-06, + "loss": 1.6811, + "step": 3943 + }, + { + "epoch": 2.4268902392123683, + "grad_norm": 5.991332054138184, + "learning_rate": 1.8531894465930911e-06, + "loss": 1.8691, + "step": 3944 + }, + { + "epoch": 2.427505576494116, + "grad_norm": 6.613326072692871, + "learning_rate": 1.8493379485002427e-06, + "loss": 1.8487, + "step": 3945 + }, + { + "epoch": 2.4281209137758633, + "grad_norm": 6.799866199493408, + "learning_rate": 1.8454900490414019e-06, + "loss": 1.8682, + "step": 3946 + }, + { + "epoch": 2.428736251057611, + "grad_norm": 4.950675010681152, + "learning_rate": 1.8416457499154727e-06, + "loss": 2.075, + "step": 3947 + }, + { + "epoch": 2.4293515883393586, + "grad_norm": 6.441240310668945, + "learning_rate": 1.8378050528197722e-06, + "loss": 1.7146, + "step": 3948 + }, + { + "epoch": 2.4299669256211063, + "grad_norm": 5.463958740234375, + "learning_rate": 1.8339679594500225e-06, + "loss": 1.7994, + "step": 3949 + }, + { + "epoch": 2.4305822629028535, + "grad_norm": 6.645144462585449, + "learning_rate": 1.8301344715003522e-06, + "loss": 1.7843, + "step": 3950 + }, + { + "epoch": 2.431197600184601, + "grad_norm": 7.644711017608643, + "learning_rate": 1.8263045906633004e-06, + "loss": 1.8694, + "step": 3951 + }, + { + "epoch": 2.431812937466349, + "grad_norm": 5.476267337799072, + "learning_rate": 1.8224783186298177e-06, + "loss": 1.8599, + "step": 3952 + }, + { + "epoch": 2.432428274748096, + "grad_norm": 5.6869940757751465, + "learning_rate": 1.818655657089261e-06, + "loss": 1.7443, + "step": 3953 + }, + { + "epoch": 2.4330436120298438, + "grad_norm": 6.0985541343688965, + "learning_rate": 1.81483660772938e-06, + "loss": 1.8488, + "step": 3954 + }, + { + "epoch": 2.4336589493115914, + "grad_norm": 6.234722137451172, + "learning_rate": 1.811021172236348e-06, + "loss": 1.9475, + "step": 3955 + }, + { + "epoch": 2.434274286593339, + "grad_norm": 6.399834632873535, + "learning_rate": 1.807209352294732e-06, + "loss": 1.5639, + "step": 3956 + }, + { + "epoch": 2.4348896238750863, + "grad_norm": 6.112963676452637, + "learning_rate": 1.8034011495875082e-06, + "loss": 1.6626, + "step": 3957 + }, + { + "epoch": 2.435504961156834, + "grad_norm": 5.041589736938477, + "learning_rate": 1.7995965657960446e-06, + "loss": 1.7106, + "step": 3958 + }, + { + "epoch": 2.4361202984385817, + "grad_norm": 6.203758239746094, + "learning_rate": 1.7957956026001256e-06, + "loss": 1.545, + "step": 3959 + }, + { + "epoch": 2.4367356357203294, + "grad_norm": 6.551009654998779, + "learning_rate": 1.7919982616779308e-06, + "loss": 1.8236, + "step": 3960 + }, + { + "epoch": 2.4373509730020766, + "grad_norm": 6.164464950561523, + "learning_rate": 1.7882045447060393e-06, + "loss": 1.7427, + "step": 3961 + }, + { + "epoch": 2.4379663102838243, + "grad_norm": 5.7443084716796875, + "learning_rate": 1.7844144533594321e-06, + "loss": 1.7365, + "step": 3962 + }, + { + "epoch": 2.438581647565572, + "grad_norm": 7.053566932678223, + "learning_rate": 1.7806279893114874e-06, + "loss": 1.7977, + "step": 3963 + }, + { + "epoch": 2.4391969848473196, + "grad_norm": 7.247527599334717, + "learning_rate": 1.7768451542339848e-06, + "loss": 1.5989, + "step": 3964 + }, + { + "epoch": 2.439812322129067, + "grad_norm": 7.302156448364258, + "learning_rate": 1.7730659497970991e-06, + "loss": 1.6009, + "step": 3965 + }, + { + "epoch": 2.4404276594108145, + "grad_norm": 7.225002288818359, + "learning_rate": 1.7692903776694037e-06, + "loss": 1.579, + "step": 3966 + }, + { + "epoch": 2.441042996692562, + "grad_norm": 5.954132080078125, + "learning_rate": 1.7655184395178683e-06, + "loss": 1.8004, + "step": 3967 + }, + { + "epoch": 2.44165833397431, + "grad_norm": 6.415463447570801, + "learning_rate": 1.7617501370078538e-06, + "loss": 1.8644, + "step": 3968 + }, + { + "epoch": 2.442273671256057, + "grad_norm": 6.976324081420898, + "learning_rate": 1.7579854718031285e-06, + "loss": 1.92, + "step": 3969 + }, + { + "epoch": 2.4428890085378048, + "grad_norm": 7.014214992523193, + "learning_rate": 1.7542244455658374e-06, + "loss": 1.7537, + "step": 3970 + }, + { + "epoch": 2.4435043458195524, + "grad_norm": 6.68618631362915, + "learning_rate": 1.750467059956531e-06, + "loss": 1.8456, + "step": 3971 + }, + { + "epoch": 2.4441196831012997, + "grad_norm": 6.570301532745361, + "learning_rate": 1.7467133166341454e-06, + "loss": 1.8296, + "step": 3972 + }, + { + "epoch": 2.4447350203830474, + "grad_norm": 6.101358890533447, + "learning_rate": 1.7429632172560207e-06, + "loss": 1.6664, + "step": 3973 + }, + { + "epoch": 2.445350357664795, + "grad_norm": 5.881378650665283, + "learning_rate": 1.73921676347787e-06, + "loss": 1.8611, + "step": 3974 + }, + { + "epoch": 2.4459656949465427, + "grad_norm": 5.546141624450684, + "learning_rate": 1.7354739569538081e-06, + "loss": 1.7079, + "step": 3975 + }, + { + "epoch": 2.4465810322282904, + "grad_norm": 6.262315273284912, + "learning_rate": 1.731734799336342e-06, + "loss": 1.9231, + "step": 3976 + }, + { + "epoch": 2.4471963695100376, + "grad_norm": 5.520010471343994, + "learning_rate": 1.7279992922763644e-06, + "loss": 1.6246, + "step": 3977 + }, + { + "epoch": 2.4478117067917853, + "grad_norm": 5.7122416496276855, + "learning_rate": 1.7242674374231483e-06, + "loss": 1.8265, + "step": 3978 + }, + { + "epoch": 2.448427044073533, + "grad_norm": 5.976895809173584, + "learning_rate": 1.7205392364243623e-06, + "loss": 1.7179, + "step": 3979 + }, + { + "epoch": 2.44904238135528, + "grad_norm": 7.668937683105469, + "learning_rate": 1.716814690926064e-06, + "loss": 1.8286, + "step": 3980 + }, + { + "epoch": 2.449657718637028, + "grad_norm": 5.872436046600342, + "learning_rate": 1.7130938025726963e-06, + "loss": 1.7424, + "step": 3981 + }, + { + "epoch": 2.4502730559187755, + "grad_norm": 5.808379650115967, + "learning_rate": 1.7093765730070743e-06, + "loss": 1.6359, + "step": 3982 + }, + { + "epoch": 2.450888393200523, + "grad_norm": 6.606114864349365, + "learning_rate": 1.705663003870418e-06, + "loss": 1.9599, + "step": 3983 + }, + { + "epoch": 2.4515037304822704, + "grad_norm": 6.847923755645752, + "learning_rate": 1.701953096802318e-06, + "loss": 1.8116, + "step": 3984 + }, + { + "epoch": 2.452119067764018, + "grad_norm": 5.52977991104126, + "learning_rate": 1.6982468534407527e-06, + "loss": 1.8226, + "step": 3985 + }, + { + "epoch": 2.452734405045766, + "grad_norm": 6.235499382019043, + "learning_rate": 1.6945442754220753e-06, + "loss": 1.8511, + "step": 3986 + }, + { + "epoch": 2.4533497423275135, + "grad_norm": 8.320939064025879, + "learning_rate": 1.6908453643810342e-06, + "loss": 1.6469, + "step": 3987 + }, + { + "epoch": 2.4539650796092607, + "grad_norm": 7.975067138671875, + "learning_rate": 1.687150121950749e-06, + "loss": 1.7707, + "step": 3988 + }, + { + "epoch": 2.4545804168910084, + "grad_norm": 6.397903919219971, + "learning_rate": 1.6834585497627233e-06, + "loss": 1.8065, + "step": 3989 + }, + { + "epoch": 2.455195754172756, + "grad_norm": 7.109915256500244, + "learning_rate": 1.679770649446837e-06, + "loss": 1.8692, + "step": 3990 + }, + { + "epoch": 2.4558110914545033, + "grad_norm": 5.923258304595947, + "learning_rate": 1.6760864226313534e-06, + "loss": 1.8543, + "step": 3991 + }, + { + "epoch": 2.456426428736251, + "grad_norm": 7.58585262298584, + "learning_rate": 1.6724058709429058e-06, + "loss": 1.7857, + "step": 3992 + }, + { + "epoch": 2.4570417660179986, + "grad_norm": 7.2270097732543945, + "learning_rate": 1.6687289960065213e-06, + "loss": 1.8776, + "step": 3993 + }, + { + "epoch": 2.4576571032997463, + "grad_norm": 6.597254753112793, + "learning_rate": 1.665055799445583e-06, + "loss": 1.7991, + "step": 3994 + }, + { + "epoch": 2.458272440581494, + "grad_norm": 7.485265731811523, + "learning_rate": 1.6613862828818628e-06, + "loss": 1.7277, + "step": 3995 + }, + { + "epoch": 2.458887777863241, + "grad_norm": 5.942963600158691, + "learning_rate": 1.6577204479355026e-06, + "loss": 1.7297, + "step": 3996 + }, + { + "epoch": 2.459503115144989, + "grad_norm": 5.747087001800537, + "learning_rate": 1.6540582962250284e-06, + "loss": 1.6573, + "step": 3997 + }, + { + "epoch": 2.4601184524267365, + "grad_norm": 6.805125713348389, + "learning_rate": 1.6503998293673252e-06, + "loss": 1.899, + "step": 3998 + }, + { + "epoch": 2.4607337897084838, + "grad_norm": 7.090007305145264, + "learning_rate": 1.6467450489776581e-06, + "loss": 1.6989, + "step": 3999 + }, + { + "epoch": 2.4613491269902315, + "grad_norm": 7.009407043457031, + "learning_rate": 1.6430939566696713e-06, + "loss": 1.9889, + "step": 4000 + }, + { + "epoch": 2.461964464271979, + "grad_norm": 5.9119343757629395, + "learning_rate": 1.6394465540553727e-06, + "loss": 1.8651, + "step": 4001 + }, + { + "epoch": 2.462579801553727, + "grad_norm": 7.638962745666504, + "learning_rate": 1.6358028427451379e-06, + "loss": 1.7117, + "step": 4002 + }, + { + "epoch": 2.463195138835474, + "grad_norm": 5.7242231369018555, + "learning_rate": 1.6321628243477194e-06, + "loss": 1.8024, + "step": 4003 + }, + { + "epoch": 2.4638104761172217, + "grad_norm": 7.404674530029297, + "learning_rate": 1.628526500470241e-06, + "loss": 1.7605, + "step": 4004 + }, + { + "epoch": 2.4644258133989694, + "grad_norm": 5.902932167053223, + "learning_rate": 1.6248938727181917e-06, + "loss": 1.7134, + "step": 4005 + }, + { + "epoch": 2.465041150680717, + "grad_norm": 6.0384111404418945, + "learning_rate": 1.6212649426954208e-06, + "loss": 1.8474, + "step": 4006 + }, + { + "epoch": 2.4656564879624643, + "grad_norm": 5.564664363861084, + "learning_rate": 1.617639712004162e-06, + "loss": 1.8519, + "step": 4007 + }, + { + "epoch": 2.466271825244212, + "grad_norm": 7.603436470031738, + "learning_rate": 1.614018182245003e-06, + "loss": 1.6931, + "step": 4008 + }, + { + "epoch": 2.4668871625259596, + "grad_norm": 8.214187622070312, + "learning_rate": 1.6104003550169034e-06, + "loss": 1.7549, + "step": 4009 + }, + { + "epoch": 2.467502499807707, + "grad_norm": 7.571796417236328, + "learning_rate": 1.6067862319171778e-06, + "loss": 1.7217, + "step": 4010 + }, + { + "epoch": 2.4681178370894545, + "grad_norm": 5.566411972045898, + "learning_rate": 1.6031758145415222e-06, + "loss": 1.7415, + "step": 4011 + }, + { + "epoch": 2.468733174371202, + "grad_norm": 7.007628440856934, + "learning_rate": 1.5995691044839845e-06, + "loss": 2.0297, + "step": 4012 + }, + { + "epoch": 2.46934851165295, + "grad_norm": 7.095859050750732, + "learning_rate": 1.5959661033369777e-06, + "loss": 1.7709, + "step": 4013 + }, + { + "epoch": 2.4699638489346976, + "grad_norm": 6.659950256347656, + "learning_rate": 1.5923668126912806e-06, + "loss": 1.8592, + "step": 4014 + }, + { + "epoch": 2.470579186216445, + "grad_norm": 6.212173938751221, + "learning_rate": 1.5887712341360294e-06, + "loss": 1.8245, + "step": 4015 + }, + { + "epoch": 2.4711945234981925, + "grad_norm": 5.446131229400635, + "learning_rate": 1.585179369258726e-06, + "loss": 1.8303, + "step": 4016 + }, + { + "epoch": 2.47180986077994, + "grad_norm": 7.090719223022461, + "learning_rate": 1.5815912196452265e-06, + "loss": 1.8486, + "step": 4017 + }, + { + "epoch": 2.4724251980616874, + "grad_norm": 7.158158302307129, + "learning_rate": 1.5780067868797545e-06, + "loss": 1.5797, + "step": 4018 + }, + { + "epoch": 2.473040535343435, + "grad_norm": 5.819427490234375, + "learning_rate": 1.5744260725448845e-06, + "loss": 1.8374, + "step": 4019 + }, + { + "epoch": 2.4736558726251827, + "grad_norm": 5.414078235626221, + "learning_rate": 1.5708490782215535e-06, + "loss": 1.7753, + "step": 4020 + }, + { + "epoch": 2.4742712099069304, + "grad_norm": 6.275355815887451, + "learning_rate": 1.5672758054890624e-06, + "loss": 1.7044, + "step": 4021 + }, + { + "epoch": 2.4748865471886776, + "grad_norm": 5.433122158050537, + "learning_rate": 1.5637062559250537e-06, + "loss": 1.9757, + "step": 4022 + }, + { + "epoch": 2.4755018844704253, + "grad_norm": 5.347390651702881, + "learning_rate": 1.560140431105539e-06, + "loss": 1.7825, + "step": 4023 + }, + { + "epoch": 2.476117221752173, + "grad_norm": 6.807344436645508, + "learning_rate": 1.5565783326048777e-06, + "loss": 1.9655, + "step": 4024 + }, + { + "epoch": 2.4767325590339206, + "grad_norm": 6.883098125457764, + "learning_rate": 1.5530199619957943e-06, + "loss": 1.7573, + "step": 4025 + }, + { + "epoch": 2.477347896315668, + "grad_norm": 7.492992401123047, + "learning_rate": 1.5494653208493527e-06, + "loss": 1.8956, + "step": 4026 + }, + { + "epoch": 2.4779632335974155, + "grad_norm": 5.662383079528809, + "learning_rate": 1.5459144107349788e-06, + "loss": 1.7546, + "step": 4027 + }, + { + "epoch": 2.4785785708791632, + "grad_norm": 6.910243988037109, + "learning_rate": 1.5423672332204552e-06, + "loss": 1.7656, + "step": 4028 + }, + { + "epoch": 2.4791939081609105, + "grad_norm": 7.09584379196167, + "learning_rate": 1.5388237898719105e-06, + "loss": 1.9401, + "step": 4029 + }, + { + "epoch": 2.479809245442658, + "grad_norm": 5.930286407470703, + "learning_rate": 1.535284082253823e-06, + "loss": 1.9026, + "step": 4030 + }, + { + "epoch": 2.480424582724406, + "grad_norm": 6.667765140533447, + "learning_rate": 1.531748111929021e-06, + "loss": 1.6366, + "step": 4031 + }, + { + "epoch": 2.4810399200061535, + "grad_norm": 7.182280540466309, + "learning_rate": 1.5282158804586934e-06, + "loss": 1.8079, + "step": 4032 + }, + { + "epoch": 2.481655257287901, + "grad_norm": 5.90341329574585, + "learning_rate": 1.5246873894023706e-06, + "loss": 1.8439, + "step": 4033 + }, + { + "epoch": 2.4822705945696484, + "grad_norm": 6.508640766143799, + "learning_rate": 1.5211626403179236e-06, + "loss": 1.6256, + "step": 4034 + }, + { + "epoch": 2.482885931851396, + "grad_norm": 5.922172546386719, + "learning_rate": 1.5176416347615886e-06, + "loss": 1.7657, + "step": 4035 + }, + { + "epoch": 2.4835012691331437, + "grad_norm": 6.803853988647461, + "learning_rate": 1.5141243742879363e-06, + "loss": 1.8789, + "step": 4036 + }, + { + "epoch": 2.484116606414891, + "grad_norm": 6.094958782196045, + "learning_rate": 1.510610860449887e-06, + "loss": 1.9184, + "step": 4037 + }, + { + "epoch": 2.4847319436966386, + "grad_norm": 7.1039652824401855, + "learning_rate": 1.5071010947987085e-06, + "loss": 1.7703, + "step": 4038 + }, + { + "epoch": 2.4853472809783863, + "grad_norm": 8.314252853393555, + "learning_rate": 1.5035950788840125e-06, + "loss": 1.6429, + "step": 4039 + }, + { + "epoch": 2.485962618260134, + "grad_norm": 6.098066806793213, + "learning_rate": 1.5000928142537552e-06, + "loss": 1.7168, + "step": 4040 + }, + { + "epoch": 2.486577955541881, + "grad_norm": 7.199650764465332, + "learning_rate": 1.4965943024542361e-06, + "loss": 1.5259, + "step": 4041 + }, + { + "epoch": 2.487193292823629, + "grad_norm": 6.601684093475342, + "learning_rate": 1.4930995450300977e-06, + "loss": 1.8413, + "step": 4042 + }, + { + "epoch": 2.4878086301053766, + "grad_norm": 7.748021125793457, + "learning_rate": 1.4896085435243279e-06, + "loss": 1.7446, + "step": 4043 + }, + { + "epoch": 2.4884239673871242, + "grad_norm": 5.8311848640441895, + "learning_rate": 1.4861212994782514e-06, + "loss": 1.7172, + "step": 4044 + }, + { + "epoch": 2.4890393046688715, + "grad_norm": 6.982073783874512, + "learning_rate": 1.4826378144315368e-06, + "loss": 1.788, + "step": 4045 + }, + { + "epoch": 2.489654641950619, + "grad_norm": 5.256682395935059, + "learning_rate": 1.4791580899221936e-06, + "loss": 1.8183, + "step": 4046 + }, + { + "epoch": 2.490269979232367, + "grad_norm": 6.061534404754639, + "learning_rate": 1.4756821274865696e-06, + "loss": 1.7179, + "step": 4047 + }, + { + "epoch": 2.4908853165141145, + "grad_norm": 5.396890163421631, + "learning_rate": 1.4722099286593494e-06, + "loss": 1.7904, + "step": 4048 + }, + { + "epoch": 2.4915006537958617, + "grad_norm": 7.624649524688721, + "learning_rate": 1.4687414949735646e-06, + "loss": 1.6813, + "step": 4049 + }, + { + "epoch": 2.4921159910776094, + "grad_norm": 6.9591965675354, + "learning_rate": 1.4652768279605734e-06, + "loss": 1.8074, + "step": 4050 + }, + { + "epoch": 2.492731328359357, + "grad_norm": 5.602869033813477, + "learning_rate": 1.4618159291500777e-06, + "loss": 1.843, + "step": 4051 + }, + { + "epoch": 2.4933466656411047, + "grad_norm": 6.445555686950684, + "learning_rate": 1.45835880007011e-06, + "loss": 1.8259, + "step": 4052 + }, + { + "epoch": 2.493962002922852, + "grad_norm": 6.570901393890381, + "learning_rate": 1.454905442247051e-06, + "loss": 1.7535, + "step": 4053 + }, + { + "epoch": 2.4945773402045996, + "grad_norm": 7.277472496032715, + "learning_rate": 1.4514558572055993e-06, + "loss": 1.7409, + "step": 4054 + }, + { + "epoch": 2.4951926774863473, + "grad_norm": 6.873239517211914, + "learning_rate": 1.4480100464687973e-06, + "loss": 1.8648, + "step": 4055 + }, + { + "epoch": 2.4958080147680946, + "grad_norm": 7.202642917633057, + "learning_rate": 1.444568011558023e-06, + "loss": 1.6347, + "step": 4056 + }, + { + "epoch": 2.4964233520498422, + "grad_norm": 6.610955715179443, + "learning_rate": 1.441129753992986e-06, + "loss": 1.7818, + "step": 4057 + }, + { + "epoch": 2.49703868933159, + "grad_norm": 5.525655269622803, + "learning_rate": 1.4376952752917206e-06, + "loss": 1.7608, + "step": 4058 + }, + { + "epoch": 2.4976540266133376, + "grad_norm": 6.251297950744629, + "learning_rate": 1.4342645769705977e-06, + "loss": 1.6618, + "step": 4059 + }, + { + "epoch": 2.498269363895085, + "grad_norm": 5.992161750793457, + "learning_rate": 1.4308376605443253e-06, + "loss": 1.8559, + "step": 4060 + }, + { + "epoch": 2.4988847011768325, + "grad_norm": 6.296798229217529, + "learning_rate": 1.4274145275259333e-06, + "loss": 1.6667, + "step": 4061 + }, + { + "epoch": 2.49950003845858, + "grad_norm": 5.5973052978515625, + "learning_rate": 1.4239951794267848e-06, + "loss": 1.6366, + "step": 4062 + }, + { + "epoch": 2.500115375740328, + "grad_norm": 6.590826988220215, + "learning_rate": 1.4205796177565688e-06, + "loss": 1.9246, + "step": 4063 + }, + { + "epoch": 2.500730713022075, + "grad_norm": 5.3784308433532715, + "learning_rate": 1.4171678440233072e-06, + "loss": 1.7734, + "step": 4064 + }, + { + "epoch": 2.5013460503038227, + "grad_norm": 5.764796733856201, + "learning_rate": 1.4137598597333458e-06, + "loss": 1.8255, + "step": 4065 + }, + { + "epoch": 2.5019613875855704, + "grad_norm": 7.340298175811768, + "learning_rate": 1.4103556663913586e-06, + "loss": 1.6583, + "step": 4066 + }, + { + "epoch": 2.5025767248673176, + "grad_norm": 5.363152503967285, + "learning_rate": 1.406955265500346e-06, + "loss": 1.7724, + "step": 4067 + }, + { + "epoch": 2.5031920621490653, + "grad_norm": 8.327474594116211, + "learning_rate": 1.4035586585616323e-06, + "loss": 1.8017, + "step": 4068 + }, + { + "epoch": 2.503807399430813, + "grad_norm": 5.712497711181641, + "learning_rate": 1.4001658470748703e-06, + "loss": 1.9487, + "step": 4069 + }, + { + "epoch": 2.5044227367125607, + "grad_norm": 6.899074554443359, + "learning_rate": 1.3967768325380327e-06, + "loss": 1.6422, + "step": 4070 + }, + { + "epoch": 2.5050380739943083, + "grad_norm": 5.434085845947266, + "learning_rate": 1.3933916164474193e-06, + "loss": 1.8379, + "step": 4071 + }, + { + "epoch": 2.5056534112760556, + "grad_norm": 6.081887722015381, + "learning_rate": 1.3900102002976512e-06, + "loss": 1.8731, + "step": 4072 + }, + { + "epoch": 2.5062687485578032, + "grad_norm": 6.443476676940918, + "learning_rate": 1.386632585581672e-06, + "loss": 1.8575, + "step": 4073 + }, + { + "epoch": 2.506884085839551, + "grad_norm": 5.832664489746094, + "learning_rate": 1.3832587737907476e-06, + "loss": 1.762, + "step": 4074 + }, + { + "epoch": 2.507499423121298, + "grad_norm": 7.333042621612549, + "learning_rate": 1.3798887664144634e-06, + "loss": 1.9359, + "step": 4075 + }, + { + "epoch": 2.508114760403046, + "grad_norm": 6.306722640991211, + "learning_rate": 1.3765225649407232e-06, + "loss": 1.7315, + "step": 4076 + }, + { + "epoch": 2.5087300976847935, + "grad_norm": 5.731489658355713, + "learning_rate": 1.3731601708557608e-06, + "loss": 1.8368, + "step": 4077 + }, + { + "epoch": 2.509345434966541, + "grad_norm": 6.045281410217285, + "learning_rate": 1.3698015856441138e-06, + "loss": 1.8142, + "step": 4078 + }, + { + "epoch": 2.509960772248289, + "grad_norm": 7.192347049713135, + "learning_rate": 1.3664468107886496e-06, + "loss": 1.7677, + "step": 4079 + }, + { + "epoch": 2.510576109530036, + "grad_norm": 6.25596284866333, + "learning_rate": 1.3630958477705448e-06, + "loss": 1.899, + "step": 4080 + }, + { + "epoch": 2.5111914468117837, + "grad_norm": 6.6646294593811035, + "learning_rate": 1.3597486980693065e-06, + "loss": 1.7926, + "step": 4081 + }, + { + "epoch": 2.5118067840935314, + "grad_norm": 5.974234104156494, + "learning_rate": 1.3564053631627406e-06, + "loss": 1.7439, + "step": 4082 + }, + { + "epoch": 2.5124221213752786, + "grad_norm": 6.50294303894043, + "learning_rate": 1.3530658445269784e-06, + "loss": 1.7802, + "step": 4083 + }, + { + "epoch": 2.5130374586570263, + "grad_norm": 7.915854454040527, + "learning_rate": 1.3497301436364696e-06, + "loss": 1.9039, + "step": 4084 + }, + { + "epoch": 2.513652795938774, + "grad_norm": 6.785024642944336, + "learning_rate": 1.3463982619639704e-06, + "loss": 1.7626, + "step": 4085 + }, + { + "epoch": 2.5142681332205212, + "grad_norm": 6.2841105461120605, + "learning_rate": 1.3430702009805586e-06, + "loss": 1.6137, + "step": 4086 + }, + { + "epoch": 2.514883470502269, + "grad_norm": 6.649860858917236, + "learning_rate": 1.339745962155613e-06, + "loss": 1.9555, + "step": 4087 + }, + { + "epoch": 2.5154988077840166, + "grad_norm": 5.377979278564453, + "learning_rate": 1.33642554695684e-06, + "loss": 1.8402, + "step": 4088 + }, + { + "epoch": 2.5161141450657643, + "grad_norm": 6.1735100746154785, + "learning_rate": 1.3331089568502465e-06, + "loss": 1.7618, + "step": 4089 + }, + { + "epoch": 2.516729482347512, + "grad_norm": 7.199767112731934, + "learning_rate": 1.329796193300157e-06, + "loss": 1.6284, + "step": 4090 + }, + { + "epoch": 2.517344819629259, + "grad_norm": 6.607974529266357, + "learning_rate": 1.3264872577692022e-06, + "loss": 1.8803, + "step": 4091 + }, + { + "epoch": 2.517960156911007, + "grad_norm": 7.4250712394714355, + "learning_rate": 1.323182151718324e-06, + "loss": 1.7262, + "step": 4092 + }, + { + "epoch": 2.5185754941927545, + "grad_norm": 7.862689971923828, + "learning_rate": 1.319880876606776e-06, + "loss": 1.8068, + "step": 4093 + }, + { + "epoch": 2.5191908314745017, + "grad_norm": 7.042996406555176, + "learning_rate": 1.316583433892118e-06, + "loss": 1.7381, + "step": 4094 + }, + { + "epoch": 2.5198061687562494, + "grad_norm": 5.479574203491211, + "learning_rate": 1.3132898250302173e-06, + "loss": 1.626, + "step": 4095 + }, + { + "epoch": 2.520421506037997, + "grad_norm": 6.142526149749756, + "learning_rate": 1.3100000514752497e-06, + "loss": 1.755, + "step": 4096 + }, + { + "epoch": 2.5210368433197448, + "grad_norm": 7.062384605407715, + "learning_rate": 1.3067141146796968e-06, + "loss": 1.8686, + "step": 4097 + }, + { + "epoch": 2.5216521806014924, + "grad_norm": 7.666657447814941, + "learning_rate": 1.303432016094348e-06, + "loss": 1.8958, + "step": 4098 + }, + { + "epoch": 2.5222675178832397, + "grad_norm": 7.044920444488525, + "learning_rate": 1.3001537571682965e-06, + "loss": 1.9807, + "step": 4099 + }, + { + "epoch": 2.5228828551649873, + "grad_norm": 7.197801113128662, + "learning_rate": 1.2968793393489398e-06, + "loss": 1.6958, + "step": 4100 + }, + { + "epoch": 2.523498192446735, + "grad_norm": 7.051377773284912, + "learning_rate": 1.2936087640819805e-06, + "loss": 1.6357, + "step": 4101 + }, + { + "epoch": 2.5241135297284822, + "grad_norm": 5.801903247833252, + "learning_rate": 1.2903420328114247e-06, + "loss": 1.7488, + "step": 4102 + }, + { + "epoch": 2.52472886701023, + "grad_norm": 5.933209419250488, + "learning_rate": 1.2870791469795818e-06, + "loss": 1.7206, + "step": 4103 + }, + { + "epoch": 2.5253442042919776, + "grad_norm": 6.531556606292725, + "learning_rate": 1.2838201080270585e-06, + "loss": 1.7765, + "step": 4104 + }, + { + "epoch": 2.5259595415737253, + "grad_norm": 6.3857421875, + "learning_rate": 1.280564917392776e-06, + "loss": 1.6379, + "step": 4105 + }, + { + "epoch": 2.5265748788554725, + "grad_norm": 6.634007453918457, + "learning_rate": 1.2773135765139389e-06, + "loss": 1.8558, + "step": 4106 + }, + { + "epoch": 2.52719021613722, + "grad_norm": 7.17467737197876, + "learning_rate": 1.2740660868260634e-06, + "loss": 1.753, + "step": 4107 + }, + { + "epoch": 2.527805553418968, + "grad_norm": 5.519010066986084, + "learning_rate": 1.2708224497629617e-06, + "loss": 1.7169, + "step": 4108 + }, + { + "epoch": 2.5284208907007155, + "grad_norm": 7.232912063598633, + "learning_rate": 1.2675826667567504e-06, + "loss": 1.6572, + "step": 4109 + }, + { + "epoch": 2.5290362279824627, + "grad_norm": 6.507946014404297, + "learning_rate": 1.2643467392378383e-06, + "loss": 1.846, + "step": 4110 + }, + { + "epoch": 2.5296515652642104, + "grad_norm": 6.512472152709961, + "learning_rate": 1.2611146686349284e-06, + "loss": 1.7548, + "step": 4111 + }, + { + "epoch": 2.530266902545958, + "grad_norm": 6.9173688888549805, + "learning_rate": 1.2578864563750326e-06, + "loss": 1.8942, + "step": 4112 + }, + { + "epoch": 2.5308822398277053, + "grad_norm": 5.3936614990234375, + "learning_rate": 1.2546621038834506e-06, + "loss": 1.696, + "step": 4113 + }, + { + "epoch": 2.531497577109453, + "grad_norm": 5.950006008148193, + "learning_rate": 1.2514416125837825e-06, + "loss": 1.8186, + "step": 4114 + }, + { + "epoch": 2.5321129143912007, + "grad_norm": 5.9526519775390625, + "learning_rate": 1.2482249838979143e-06, + "loss": 1.9573, + "step": 4115 + }, + { + "epoch": 2.5327282516729483, + "grad_norm": 6.135133743286133, + "learning_rate": 1.2450122192460412e-06, + "loss": 1.7878, + "step": 4116 + }, + { + "epoch": 2.533343588954696, + "grad_norm": 5.614802837371826, + "learning_rate": 1.2418033200466418e-06, + "loss": 1.7601, + "step": 4117 + }, + { + "epoch": 2.5339589262364433, + "grad_norm": 5.9602508544921875, + "learning_rate": 1.2385982877164905e-06, + "loss": 1.7252, + "step": 4118 + }, + { + "epoch": 2.534574263518191, + "grad_norm": 6.721235275268555, + "learning_rate": 1.2353971236706564e-06, + "loss": 1.8162, + "step": 4119 + }, + { + "epoch": 2.5351896007999386, + "grad_norm": 6.526131629943848, + "learning_rate": 1.232199829322498e-06, + "loss": 1.8636, + "step": 4120 + }, + { + "epoch": 2.535804938081686, + "grad_norm": 6.227874279022217, + "learning_rate": 1.2290064060836671e-06, + "loss": 1.9588, + "step": 4121 + }, + { + "epoch": 2.5364202753634335, + "grad_norm": 5.758062362670898, + "learning_rate": 1.2258168553641058e-06, + "loss": 1.934, + "step": 4122 + }, + { + "epoch": 2.537035612645181, + "grad_norm": 7.232816696166992, + "learning_rate": 1.2226311785720468e-06, + "loss": 1.8241, + "step": 4123 + }, + { + "epoch": 2.537650949926929, + "grad_norm": 6.333841800689697, + "learning_rate": 1.2194493771140115e-06, + "loss": 1.884, + "step": 4124 + }, + { + "epoch": 2.5382662872086765, + "grad_norm": 8.433918952941895, + "learning_rate": 1.2162714523948104e-06, + "loss": 1.4225, + "step": 4125 + }, + { + "epoch": 2.5388816244904238, + "grad_norm": 7.255174160003662, + "learning_rate": 1.2130974058175438e-06, + "loss": 1.6662, + "step": 4126 + }, + { + "epoch": 2.5394969617721714, + "grad_norm": 7.263703346252441, + "learning_rate": 1.209927238783598e-06, + "loss": 1.753, + "step": 4127 + }, + { + "epoch": 2.540112299053919, + "grad_norm": 6.831658840179443, + "learning_rate": 1.206760952692647e-06, + "loss": 1.7191, + "step": 4128 + }, + { + "epoch": 2.5407276363356663, + "grad_norm": 6.586546421051025, + "learning_rate": 1.2035985489426526e-06, + "loss": 1.7241, + "step": 4129 + }, + { + "epoch": 2.541342973617414, + "grad_norm": 6.865005970001221, + "learning_rate": 1.2004400289298602e-06, + "loss": 1.6373, + "step": 4130 + }, + { + "epoch": 2.5419583108991617, + "grad_norm": 6.933180332183838, + "learning_rate": 1.1972853940488017e-06, + "loss": 1.909, + "step": 4131 + }, + { + "epoch": 2.542573648180909, + "grad_norm": 5.051992893218994, + "learning_rate": 1.194134645692292e-06, + "loss": 1.8889, + "step": 4132 + }, + { + "epoch": 2.5431889854626566, + "grad_norm": 5.827881813049316, + "learning_rate": 1.1909877852514351e-06, + "loss": 1.8537, + "step": 4133 + }, + { + "epoch": 2.5438043227444043, + "grad_norm": 7.358208656311035, + "learning_rate": 1.1878448141156162e-06, + "loss": 1.7321, + "step": 4134 + }, + { + "epoch": 2.544419660026152, + "grad_norm": 6.752117156982422, + "learning_rate": 1.1847057336724965e-06, + "loss": 1.796, + "step": 4135 + }, + { + "epoch": 2.5450349973078996, + "grad_norm": 6.5308685302734375, + "learning_rate": 1.181570545308025e-06, + "loss": 1.7602, + "step": 4136 + }, + { + "epoch": 2.545650334589647, + "grad_norm": 6.010915279388428, + "learning_rate": 1.1784392504064378e-06, + "loss": 1.7477, + "step": 4137 + }, + { + "epoch": 2.5462656718713945, + "grad_norm": 7.357639312744141, + "learning_rate": 1.1753118503502447e-06, + "loss": 1.691, + "step": 4138 + }, + { + "epoch": 2.546881009153142, + "grad_norm": 5.617232322692871, + "learning_rate": 1.1721883465202333e-06, + "loss": 1.8284, + "step": 4139 + }, + { + "epoch": 2.5474963464348894, + "grad_norm": 6.320753574371338, + "learning_rate": 1.1690687402954792e-06, + "loss": 1.7748, + "step": 4140 + }, + { + "epoch": 2.548111683716637, + "grad_norm": 6.926965713500977, + "learning_rate": 1.1659530330533331e-06, + "loss": 1.6283, + "step": 4141 + }, + { + "epoch": 2.5487270209983848, + "grad_norm": 6.189023017883301, + "learning_rate": 1.1628412261694256e-06, + "loss": 1.7729, + "step": 4142 + }, + { + "epoch": 2.5493423582801324, + "grad_norm": 5.72829532623291, + "learning_rate": 1.1597333210176587e-06, + "loss": 1.819, + "step": 4143 + }, + { + "epoch": 2.54995769556188, + "grad_norm": 9.32161808013916, + "learning_rate": 1.1566293189702215e-06, + "loss": 1.781, + "step": 4144 + }, + { + "epoch": 2.5505730328436274, + "grad_norm": 7.289495468139648, + "learning_rate": 1.1535292213975747e-06, + "loss": 1.5886, + "step": 4145 + }, + { + "epoch": 2.551188370125375, + "grad_norm": 5.744442939758301, + "learning_rate": 1.1504330296684552e-06, + "loss": 1.8103, + "step": 4146 + }, + { + "epoch": 2.5518037074071227, + "grad_norm": 6.212698936462402, + "learning_rate": 1.1473407451498752e-06, + "loss": 1.8051, + "step": 4147 + }, + { + "epoch": 2.55241904468887, + "grad_norm": 5.673927307128906, + "learning_rate": 1.144252369207125e-06, + "loss": 1.602, + "step": 4148 + }, + { + "epoch": 2.5530343819706176, + "grad_norm": 6.507323741912842, + "learning_rate": 1.1411679032037636e-06, + "loss": 1.7538, + "step": 4149 + }, + { + "epoch": 2.5536497192523653, + "grad_norm": 5.215931415557861, + "learning_rate": 1.1380873485016297e-06, + "loss": 1.741, + "step": 4150 + }, + { + "epoch": 2.5542650565341125, + "grad_norm": 7.122503757476807, + "learning_rate": 1.1350107064608317e-06, + "loss": 1.6085, + "step": 4151 + }, + { + "epoch": 2.55488039381586, + "grad_norm": 6.917306900024414, + "learning_rate": 1.1319379784397499e-06, + "loss": 1.7817, + "step": 4152 + }, + { + "epoch": 2.555495731097608, + "grad_norm": 7.950695991516113, + "learning_rate": 1.128869165795039e-06, + "loss": 1.7332, + "step": 4153 + }, + { + "epoch": 2.5561110683793555, + "grad_norm": 5.63156270980835, + "learning_rate": 1.1258042698816218e-06, + "loss": 1.8125, + "step": 4154 + }, + { + "epoch": 2.556726405661103, + "grad_norm": 6.009623050689697, + "learning_rate": 1.122743292052697e-06, + "loss": 1.8075, + "step": 4155 + }, + { + "epoch": 2.5573417429428504, + "grad_norm": 6.992203712463379, + "learning_rate": 1.119686233659727e-06, + "loss": 1.9523, + "step": 4156 + }, + { + "epoch": 2.557957080224598, + "grad_norm": 6.242934703826904, + "learning_rate": 1.1166330960524464e-06, + "loss": 1.55, + "step": 4157 + }, + { + "epoch": 2.558572417506346, + "grad_norm": 6.48717737197876, + "learning_rate": 1.1135838805788646e-06, + "loss": 1.6441, + "step": 4158 + }, + { + "epoch": 2.559187754788093, + "grad_norm": 8.113075256347656, + "learning_rate": 1.1105385885852483e-06, + "loss": 1.6688, + "step": 4159 + }, + { + "epoch": 2.5598030920698407, + "grad_norm": 6.7107110023498535, + "learning_rate": 1.1074972214161384e-06, + "loss": 1.6608, + "step": 4160 + }, + { + "epoch": 2.5604184293515884, + "grad_norm": 6.798482894897461, + "learning_rate": 1.1044597804143441e-06, + "loss": 1.8122, + "step": 4161 + }, + { + "epoch": 2.561033766633336, + "grad_norm": 4.968344211578369, + "learning_rate": 1.1014262669209397e-06, + "loss": 1.801, + "step": 4162 + }, + { + "epoch": 2.5616491039150837, + "grad_norm": 5.332957744598389, + "learning_rate": 1.0983966822752624e-06, + "loss": 1.6309, + "step": 4163 + }, + { + "epoch": 2.562264441196831, + "grad_norm": 6.047231197357178, + "learning_rate": 1.0953710278149155e-06, + "loss": 1.6842, + "step": 4164 + }, + { + "epoch": 2.5628797784785786, + "grad_norm": 7.621622085571289, + "learning_rate": 1.0923493048757728e-06, + "loss": 1.6186, + "step": 4165 + }, + { + "epoch": 2.5634951157603263, + "grad_norm": 8.029854774475098, + "learning_rate": 1.089331514791969e-06, + "loss": 1.8926, + "step": 4166 + }, + { + "epoch": 2.5641104530420735, + "grad_norm": 6.61060094833374, + "learning_rate": 1.0863176588958957e-06, + "loss": 1.9157, + "step": 4167 + }, + { + "epoch": 2.564725790323821, + "grad_norm": 6.966330528259277, + "learning_rate": 1.083307738518219e-06, + "loss": 1.6553, + "step": 4168 + }, + { + "epoch": 2.565341127605569, + "grad_norm": 5.811124801635742, + "learning_rate": 1.0803017549878592e-06, + "loss": 1.7612, + "step": 4169 + }, + { + "epoch": 2.565956464887316, + "grad_norm": 6.4157185554504395, + "learning_rate": 1.0772997096320049e-06, + "loss": 1.8346, + "step": 4170 + }, + { + "epoch": 2.5665718021690638, + "grad_norm": 6.470607757568359, + "learning_rate": 1.0743016037760946e-06, + "loss": 1.8401, + "step": 4171 + }, + { + "epoch": 2.5671871394508115, + "grad_norm": 6.488245964050293, + "learning_rate": 1.0713074387438405e-06, + "loss": 1.6528, + "step": 4172 + }, + { + "epoch": 2.567802476732559, + "grad_norm": 7.467713356018066, + "learning_rate": 1.0683172158572097e-06, + "loss": 1.7491, + "step": 4173 + }, + { + "epoch": 2.568417814014307, + "grad_norm": 5.319357872009277, + "learning_rate": 1.065330936436426e-06, + "loss": 1.841, + "step": 4174 + }, + { + "epoch": 2.569033151296054, + "grad_norm": 6.01048469543457, + "learning_rate": 1.0623486017999762e-06, + "loss": 1.8831, + "step": 4175 + }, + { + "epoch": 2.5696484885778017, + "grad_norm": 8.029783248901367, + "learning_rate": 1.0593702132646023e-06, + "loss": 1.7042, + "step": 4176 + }, + { + "epoch": 2.5702638258595494, + "grad_norm": 5.201828479766846, + "learning_rate": 1.0563957721453067e-06, + "loss": 1.8278, + "step": 4177 + }, + { + "epoch": 2.5708791631412966, + "grad_norm": 6.103088855743408, + "learning_rate": 1.0534252797553468e-06, + "loss": 1.5824, + "step": 4178 + }, + { + "epoch": 2.5714945004230443, + "grad_norm": 6.424221992492676, + "learning_rate": 1.0504587374062392e-06, + "loss": 1.8579, + "step": 4179 + }, + { + "epoch": 2.572109837704792, + "grad_norm": 7.792397499084473, + "learning_rate": 1.0474961464077527e-06, + "loss": 1.6918, + "step": 4180 + }, + { + "epoch": 2.5727251749865396, + "grad_norm": 6.5456061363220215, + "learning_rate": 1.0445375080679122e-06, + "loss": 1.817, + "step": 4181 + }, + { + "epoch": 2.5733405122682873, + "grad_norm": 6.511673450469971, + "learning_rate": 1.0415828236930059e-06, + "loss": 1.8218, + "step": 4182 + }, + { + "epoch": 2.5739558495500345, + "grad_norm": 5.406862258911133, + "learning_rate": 1.0386320945875627e-06, + "loss": 1.9012, + "step": 4183 + }, + { + "epoch": 2.574571186831782, + "grad_norm": 6.190816402435303, + "learning_rate": 1.0356853220543727e-06, + "loss": 1.856, + "step": 4184 + }, + { + "epoch": 2.57518652411353, + "grad_norm": 6.068096160888672, + "learning_rate": 1.0327425073944785e-06, + "loss": 1.6763, + "step": 4185 + }, + { + "epoch": 2.575801861395277, + "grad_norm": 7.489601135253906, + "learning_rate": 1.0298036519071785e-06, + "loss": 1.8137, + "step": 4186 + }, + { + "epoch": 2.576417198677025, + "grad_norm": 6.367162227630615, + "learning_rate": 1.0268687568900159e-06, + "loss": 1.6202, + "step": 4187 + }, + { + "epoch": 2.5770325359587725, + "grad_norm": 5.794342994689941, + "learning_rate": 1.0239378236387864e-06, + "loss": 1.7442, + "step": 4188 + }, + { + "epoch": 2.5776478732405197, + "grad_norm": 5.626938343048096, + "learning_rate": 1.0210108534475438e-06, + "loss": 1.7282, + "step": 4189 + }, + { + "epoch": 2.5782632105222674, + "grad_norm": 7.206967353820801, + "learning_rate": 1.0180878476085886e-06, + "loss": 1.7145, + "step": 4190 + }, + { + "epoch": 2.578878547804015, + "grad_norm": 5.568320274353027, + "learning_rate": 1.0151688074124645e-06, + "loss": 1.6553, + "step": 4191 + }, + { + "epoch": 2.5794938850857627, + "grad_norm": 5.7400126457214355, + "learning_rate": 1.01225373414797e-06, + "loss": 1.871, + "step": 4192 + }, + { + "epoch": 2.5801092223675104, + "grad_norm": 6.79650354385376, + "learning_rate": 1.0093426291021557e-06, + "loss": 1.8234, + "step": 4193 + }, + { + "epoch": 2.5807245596492576, + "grad_norm": 5.838551044464111, + "learning_rate": 1.006435493560315e-06, + "loss": 1.9553, + "step": 4194 + }, + { + "epoch": 2.5813398969310053, + "grad_norm": 5.939212799072266, + "learning_rate": 1.0035323288059861e-06, + "loss": 1.8022, + "step": 4195 + }, + { + "epoch": 2.581955234212753, + "grad_norm": 6.8916850090026855, + "learning_rate": 1.000633136120962e-06, + "loss": 1.9687, + "step": 4196 + }, + { + "epoch": 2.5825705714945, + "grad_norm": 6.291812419891357, + "learning_rate": 9.977379167852763e-07, + "loss": 1.8698, + "step": 4197 + }, + { + "epoch": 2.583185908776248, + "grad_norm": 6.67784309387207, + "learning_rate": 9.94846672077212e-07, + "loss": 1.8424, + "step": 4198 + }, + { + "epoch": 2.5838012460579955, + "grad_norm": 6.0853471755981445, + "learning_rate": 9.919594032732893e-07, + "loss": 1.8225, + "step": 4199 + }, + { + "epoch": 2.5844165833397432, + "grad_norm": 6.006263732910156, + "learning_rate": 9.89076111648284e-07, + "loss": 1.8751, + "step": 4200 + }, + { + "epoch": 2.585031920621491, + "grad_norm": 6.8698530197143555, + "learning_rate": 9.861967984752086e-07, + "loss": 1.624, + "step": 4201 + }, + { + "epoch": 2.585647257903238, + "grad_norm": 5.597245216369629, + "learning_rate": 9.83321465025322e-07, + "loss": 1.633, + "step": 4202 + }, + { + "epoch": 2.586262595184986, + "grad_norm": 5.9872026443481445, + "learning_rate": 9.804501125681243e-07, + "loss": 1.805, + "step": 4203 + }, + { + "epoch": 2.5868779324667335, + "grad_norm": 5.77107048034668, + "learning_rate": 9.77582742371359e-07, + "loss": 1.5129, + "step": 4204 + }, + { + "epoch": 2.5874932697484807, + "grad_norm": 5.787739276885986, + "learning_rate": 9.747193557010114e-07, + "loss": 1.7413, + "step": 4205 + }, + { + "epoch": 2.5881086070302284, + "grad_norm": 7.320919036865234, + "learning_rate": 9.71859953821307e-07, + "loss": 1.8537, + "step": 4206 + }, + { + "epoch": 2.588723944311976, + "grad_norm": 6.407787799835205, + "learning_rate": 9.690045379947134e-07, + "loss": 1.7515, + "step": 4207 + }, + { + "epoch": 2.5893392815937233, + "grad_norm": 6.668667316436768, + "learning_rate": 9.661531094819366e-07, + "loss": 1.944, + "step": 4208 + }, + { + "epoch": 2.589954618875471, + "grad_norm": 5.278316974639893, + "learning_rate": 9.633056695419229e-07, + "loss": 1.765, + "step": 4209 + }, + { + "epoch": 2.5905699561572186, + "grad_norm": 8.620208740234375, + "learning_rate": 9.604622194318625e-07, + "loss": 1.7903, + "step": 4210 + }, + { + "epoch": 2.5911852934389663, + "grad_norm": 7.895908355712891, + "learning_rate": 9.576227604071731e-07, + "loss": 1.6449, + "step": 4211 + }, + { + "epoch": 2.591800630720714, + "grad_norm": 7.571836471557617, + "learning_rate": 9.547872937215187e-07, + "loss": 2.0668, + "step": 4212 + }, + { + "epoch": 2.592415968002461, + "grad_norm": 7.039986610412598, + "learning_rate": 9.519558206267964e-07, + "loss": 1.667, + "step": 4213 + }, + { + "epoch": 2.593031305284209, + "grad_norm": 6.147831439971924, + "learning_rate": 9.491283423731501e-07, + "loss": 1.7639, + "step": 4214 + }, + { + "epoch": 2.5936466425659566, + "grad_norm": 6.8246564865112305, + "learning_rate": 9.463048602089431e-07, + "loss": 1.886, + "step": 4215 + }, + { + "epoch": 2.594261979847704, + "grad_norm": 7.2729997634887695, + "learning_rate": 9.434853753807838e-07, + "loss": 1.6939, + "step": 4216 + }, + { + "epoch": 2.5948773171294515, + "grad_norm": 5.566476345062256, + "learning_rate": 9.406698891335209e-07, + "loss": 1.7426, + "step": 4217 + }, + { + "epoch": 2.595492654411199, + "grad_norm": 6.9734601974487305, + "learning_rate": 9.3785840271023e-07, + "loss": 1.8328, + "step": 4218 + }, + { + "epoch": 2.596107991692947, + "grad_norm": 5.574838638305664, + "learning_rate": 9.350509173522193e-07, + "loss": 1.9735, + "step": 4219 + }, + { + "epoch": 2.5967233289746945, + "grad_norm": 6.480015277862549, + "learning_rate": 9.322474342990339e-07, + "loss": 1.8045, + "step": 4220 + }, + { + "epoch": 2.5973386662564417, + "grad_norm": 7.92214298248291, + "learning_rate": 9.294479547884561e-07, + "loss": 1.9924, + "step": 4221 + }, + { + "epoch": 2.5979540035381894, + "grad_norm": 7.472610950469971, + "learning_rate": 9.266524800564947e-07, + "loss": 1.6978, + "step": 4222 + }, + { + "epoch": 2.598569340819937, + "grad_norm": 6.016477108001709, + "learning_rate": 9.23861011337387e-07, + "loss": 1.7565, + "step": 4223 + }, + { + "epoch": 2.5991846781016843, + "grad_norm": 6.158195495605469, + "learning_rate": 9.210735498636125e-07, + "loss": 1.771, + "step": 4224 + }, + { + "epoch": 2.599800015383432, + "grad_norm": 6.462884902954102, + "learning_rate": 9.182900968658736e-07, + "loss": 1.9665, + "step": 4225 + }, + { + "epoch": 2.6004153526651796, + "grad_norm": 9.081789016723633, + "learning_rate": 9.155106535731029e-07, + "loss": 1.7369, + "step": 4226 + }, + { + "epoch": 2.6010306899469273, + "grad_norm": 7.202375411987305, + "learning_rate": 9.127352212124663e-07, + "loss": 1.9308, + "step": 4227 + }, + { + "epoch": 2.6016460272286746, + "grad_norm": 7.235840797424316, + "learning_rate": 9.09963801009357e-07, + "loss": 1.8912, + "step": 4228 + }, + { + "epoch": 2.6022613645104222, + "grad_norm": 6.252805233001709, + "learning_rate": 9.071963941873952e-07, + "loss": 1.8542, + "step": 4229 + }, + { + "epoch": 2.60287670179217, + "grad_norm": 5.60051965713501, + "learning_rate": 9.044330019684311e-07, + "loss": 1.7402, + "step": 4230 + }, + { + "epoch": 2.6034920390739176, + "grad_norm": 7.1512451171875, + "learning_rate": 9.016736255725434e-07, + "loss": 1.7233, + "step": 4231 + }, + { + "epoch": 2.604107376355665, + "grad_norm": 7.647735118865967, + "learning_rate": 8.98918266218034e-07, + "loss": 1.8184, + "step": 4232 + }, + { + "epoch": 2.6047227136374125, + "grad_norm": 5.607512950897217, + "learning_rate": 8.961669251214356e-07, + "loss": 1.747, + "step": 4233 + }, + { + "epoch": 2.60533805091916, + "grad_norm": 6.181026935577393, + "learning_rate": 8.934196034975028e-07, + "loss": 1.9267, + "step": 4234 + }, + { + "epoch": 2.6059533882009074, + "grad_norm": 8.626343727111816, + "learning_rate": 8.906763025592191e-07, + "loss": 1.6852, + "step": 4235 + }, + { + "epoch": 2.606568725482655, + "grad_norm": 5.8370256423950195, + "learning_rate": 8.879370235177909e-07, + "loss": 1.7892, + "step": 4236 + }, + { + "epoch": 2.6071840627644027, + "grad_norm": 6.202864646911621, + "learning_rate": 8.852017675826463e-07, + "loss": 1.7956, + "step": 4237 + }, + { + "epoch": 2.6077994000461504, + "grad_norm": 7.0579633712768555, + "learning_rate": 8.824705359614461e-07, + "loss": 1.7669, + "step": 4238 + }, + { + "epoch": 2.608414737327898, + "grad_norm": 5.604804039001465, + "learning_rate": 8.797433298600622e-07, + "loss": 2.0392, + "step": 4239 + }, + { + "epoch": 2.6090300746096453, + "grad_norm": 7.693241596221924, + "learning_rate": 8.77020150482597e-07, + "loss": 1.8698, + "step": 4240 + }, + { + "epoch": 2.609645411891393, + "grad_norm": 6.17128849029541, + "learning_rate": 8.743009990313723e-07, + "loss": 1.8311, + "step": 4241 + }, + { + "epoch": 2.6102607491731407, + "grad_norm": 6.8051557540893555, + "learning_rate": 8.715858767069374e-07, + "loss": 1.9344, + "step": 4242 + }, + { + "epoch": 2.610876086454888, + "grad_norm": 6.061171054840088, + "learning_rate": 8.688747847080514e-07, + "loss": 1.9173, + "step": 4243 + }, + { + "epoch": 2.6114914237366356, + "grad_norm": 6.012004375457764, + "learning_rate": 8.661677242317024e-07, + "loss": 1.7577, + "step": 4244 + }, + { + "epoch": 2.6121067610183832, + "grad_norm": 6.577425479888916, + "learning_rate": 8.634646964730975e-07, + "loss": 1.905, + "step": 4245 + }, + { + "epoch": 2.612722098300131, + "grad_norm": 6.5390472412109375, + "learning_rate": 8.607657026256633e-07, + "loss": 1.6936, + "step": 4246 + }, + { + "epoch": 2.6133374355818786, + "grad_norm": 5.40130615234375, + "learning_rate": 8.580707438810398e-07, + "loss": 1.8893, + "step": 4247 + }, + { + "epoch": 2.613952772863626, + "grad_norm": 7.455695152282715, + "learning_rate": 8.55379821429092e-07, + "loss": 1.6529, + "step": 4248 + }, + { + "epoch": 2.6145681101453735, + "grad_norm": 6.760003566741943, + "learning_rate": 8.526929364579018e-07, + "loss": 1.8257, + "step": 4249 + }, + { + "epoch": 2.615183447427121, + "grad_norm": 6.151179790496826, + "learning_rate": 8.500100901537678e-07, + "loss": 1.9744, + "step": 4250 + }, + { + "epoch": 2.6157987847088684, + "grad_norm": 6.62774133682251, + "learning_rate": 8.473312837012027e-07, + "loss": 1.8122, + "step": 4251 + }, + { + "epoch": 2.616414121990616, + "grad_norm": 6.376801013946533, + "learning_rate": 8.446565182829403e-07, + "loss": 1.7669, + "step": 4252 + }, + { + "epoch": 2.6170294592723637, + "grad_norm": 6.820733070373535, + "learning_rate": 8.419857950799259e-07, + "loss": 1.6908, + "step": 4253 + }, + { + "epoch": 2.617644796554111, + "grad_norm": 5.486757755279541, + "learning_rate": 8.393191152713231e-07, + "loss": 1.8397, + "step": 4254 + }, + { + "epoch": 2.6182601338358586, + "grad_norm": 5.772697448730469, + "learning_rate": 8.366564800345089e-07, + "loss": 1.7965, + "step": 4255 + }, + { + "epoch": 2.6188754711176063, + "grad_norm": 5.888457298278809, + "learning_rate": 8.339978905450752e-07, + "loss": 1.8645, + "step": 4256 + }, + { + "epoch": 2.619490808399354, + "grad_norm": 5.750851154327393, + "learning_rate": 8.313433479768262e-07, + "loss": 2.0146, + "step": 4257 + }, + { + "epoch": 2.6201061456811017, + "grad_norm": 5.920788288116455, + "learning_rate": 8.286928535017824e-07, + "loss": 1.5907, + "step": 4258 + }, + { + "epoch": 2.620721482962849, + "grad_norm": 6.548920631408691, + "learning_rate": 8.260464082901732e-07, + "loss": 1.9728, + "step": 4259 + }, + { + "epoch": 2.6213368202445966, + "grad_norm": 6.509479522705078, + "learning_rate": 8.234040135104437e-07, + "loss": 1.7242, + "step": 4260 + }, + { + "epoch": 2.6219521575263443, + "grad_norm": 5.9722819328308105, + "learning_rate": 8.207656703292477e-07, + "loss": 1.7237, + "step": 4261 + }, + { + "epoch": 2.6225674948080915, + "grad_norm": 6.7902913093566895, + "learning_rate": 8.181313799114509e-07, + "loss": 1.8345, + "step": 4262 + }, + { + "epoch": 2.623182832089839, + "grad_norm": 6.082269191741943, + "learning_rate": 8.155011434201332e-07, + "loss": 1.8501, + "step": 4263 + }, + { + "epoch": 2.623798169371587, + "grad_norm": 7.628478527069092, + "learning_rate": 8.128749620165788e-07, + "loss": 1.651, + "step": 4264 + }, + { + "epoch": 2.6244135066533345, + "grad_norm": 7.207968711853027, + "learning_rate": 8.102528368602835e-07, + "loss": 2.0424, + "step": 4265 + }, + { + "epoch": 2.625028843935082, + "grad_norm": 6.245079040527344, + "learning_rate": 8.076347691089592e-07, + "loss": 1.8737, + "step": 4266 + }, + { + "epoch": 2.6256441812168294, + "grad_norm": 7.93409538269043, + "learning_rate": 8.050207599185134e-07, + "loss": 1.8236, + "step": 4267 + }, + { + "epoch": 2.626259518498577, + "grad_norm": 6.677114963531494, + "learning_rate": 8.024108104430717e-07, + "loss": 1.8245, + "step": 4268 + }, + { + "epoch": 2.6268748557803248, + "grad_norm": 6.860392093658447, + "learning_rate": 7.998049218349624e-07, + "loss": 1.757, + "step": 4269 + }, + { + "epoch": 2.627490193062072, + "grad_norm": 6.117133140563965, + "learning_rate": 7.972030952447274e-07, + "loss": 1.7674, + "step": 4270 + }, + { + "epoch": 2.6281055303438197, + "grad_norm": 6.134293556213379, + "learning_rate": 7.946053318211045e-07, + "loss": 1.6951, + "step": 4271 + }, + { + "epoch": 2.6287208676255673, + "grad_norm": 6.184544563293457, + "learning_rate": 7.920116327110449e-07, + "loss": 1.6561, + "step": 4272 + }, + { + "epoch": 2.6293362049073146, + "grad_norm": 7.049651145935059, + "learning_rate": 7.894219990597063e-07, + "loss": 1.728, + "step": 4273 + }, + { + "epoch": 2.6299515421890622, + "grad_norm": 7.190492153167725, + "learning_rate": 7.868364320104482e-07, + "loss": 1.6147, + "step": 4274 + }, + { + "epoch": 2.63056687947081, + "grad_norm": 6.168651580810547, + "learning_rate": 7.842549327048366e-07, + "loss": 1.9453, + "step": 4275 + }, + { + "epoch": 2.6311822167525576, + "grad_norm": 6.174099445343018, + "learning_rate": 7.816775022826339e-07, + "loss": 1.702, + "step": 4276 + }, + { + "epoch": 2.6317975540343053, + "grad_norm": 6.510141372680664, + "learning_rate": 7.791041418818213e-07, + "loss": 1.7665, + "step": 4277 + }, + { + "epoch": 2.6324128913160525, + "grad_norm": 6.2254228591918945, + "learning_rate": 7.765348526385685e-07, + "loss": 1.7662, + "step": 4278 + }, + { + "epoch": 2.6330282285978, + "grad_norm": 7.173844814300537, + "learning_rate": 7.739696356872562e-07, + "loss": 1.904, + "step": 4279 + }, + { + "epoch": 2.633643565879548, + "grad_norm": 5.771015644073486, + "learning_rate": 7.714084921604636e-07, + "loss": 1.9442, + "step": 4280 + }, + { + "epoch": 2.634258903161295, + "grad_norm": 5.204777240753174, + "learning_rate": 7.688514231889721e-07, + "loss": 1.8528, + "step": 4281 + }, + { + "epoch": 2.6348742404430427, + "grad_norm": 6.775053024291992, + "learning_rate": 7.66298429901765e-07, + "loss": 1.83, + "step": 4282 + }, + { + "epoch": 2.6354895777247904, + "grad_norm": 6.577121734619141, + "learning_rate": 7.637495134260242e-07, + "loss": 1.8004, + "step": 4283 + }, + { + "epoch": 2.636104915006538, + "grad_norm": 7.259607791900635, + "learning_rate": 7.612046748871327e-07, + "loss": 1.872, + "step": 4284 + }, + { + "epoch": 2.6367202522882858, + "grad_norm": 6.054201126098633, + "learning_rate": 7.58663915408675e-07, + "loss": 1.8645, + "step": 4285 + }, + { + "epoch": 2.637335589570033, + "grad_norm": 6.716041088104248, + "learning_rate": 7.561272361124306e-07, + "loss": 1.9639, + "step": 4286 + }, + { + "epoch": 2.6379509268517807, + "grad_norm": 5.4171905517578125, + "learning_rate": 7.535946381183812e-07, + "loss": 1.8679, + "step": 4287 + }, + { + "epoch": 2.6385662641335283, + "grad_norm": 6.711225509643555, + "learning_rate": 7.510661225447057e-07, + "loss": 1.6901, + "step": 4288 + }, + { + "epoch": 2.6391816014152756, + "grad_norm": 7.435968399047852, + "learning_rate": 7.48541690507778e-07, + "loss": 1.8846, + "step": 4289 + }, + { + "epoch": 2.6397969386970233, + "grad_norm": 5.864813327789307, + "learning_rate": 7.460213431221719e-07, + "loss": 1.7608, + "step": 4290 + }, + { + "epoch": 2.640412275978771, + "grad_norm": 5.909487247467041, + "learning_rate": 7.435050815006562e-07, + "loss": 1.8795, + "step": 4291 + }, + { + "epoch": 2.641027613260518, + "grad_norm": 6.186119079589844, + "learning_rate": 7.409929067541977e-07, + "loss": 1.7261, + "step": 4292 + }, + { + "epoch": 2.641642950542266, + "grad_norm": 9.145379066467285, + "learning_rate": 7.384848199919558e-07, + "loss": 1.8866, + "step": 4293 + }, + { + "epoch": 2.6422582878240135, + "grad_norm": 7.17814302444458, + "learning_rate": 7.359808223212905e-07, + "loss": 1.7643, + "step": 4294 + }, + { + "epoch": 2.642873625105761, + "grad_norm": 6.265758991241455, + "learning_rate": 7.334809148477484e-07, + "loss": 1.6897, + "step": 4295 + }, + { + "epoch": 2.643488962387509, + "grad_norm": 6.261251926422119, + "learning_rate": 7.309850986750766e-07, + "loss": 1.8918, + "step": 4296 + }, + { + "epoch": 2.644104299669256, + "grad_norm": 6.927257061004639, + "learning_rate": 7.284933749052103e-07, + "loss": 1.5625, + "step": 4297 + }, + { + "epoch": 2.6447196369510038, + "grad_norm": 7.575664043426514, + "learning_rate": 7.260057446382862e-07, + "loss": 1.7219, + "step": 4298 + }, + { + "epoch": 2.6453349742327514, + "grad_norm": 7.540083885192871, + "learning_rate": 7.23522208972628e-07, + "loss": 1.4142, + "step": 4299 + }, + { + "epoch": 2.6459503115144987, + "grad_norm": 7.002643585205078, + "learning_rate": 7.210427690047484e-07, + "loss": 1.7019, + "step": 4300 + }, + { + "epoch": 2.6465656487962463, + "grad_norm": 5.48466157913208, + "learning_rate": 7.185674258293584e-07, + "loss": 1.8223, + "step": 4301 + }, + { + "epoch": 2.647180986077994, + "grad_norm": 6.054097652435303, + "learning_rate": 7.160961805393574e-07, + "loss": 1.8064, + "step": 4302 + }, + { + "epoch": 2.6477963233597417, + "grad_norm": 7.095670223236084, + "learning_rate": 7.136290342258378e-07, + "loss": 1.8942, + "step": 4303 + }, + { + "epoch": 2.6484116606414894, + "grad_norm": 6.822598457336426, + "learning_rate": 7.11165987978073e-07, + "loss": 1.9118, + "step": 4304 + }, + { + "epoch": 2.6490269979232366, + "grad_norm": 6.867780685424805, + "learning_rate": 7.087070428835397e-07, + "loss": 1.6477, + "step": 4305 + }, + { + "epoch": 2.6496423352049843, + "grad_norm": 5.868463516235352, + "learning_rate": 7.062522000278959e-07, + "loss": 1.8325, + "step": 4306 + }, + { + "epoch": 2.650257672486732, + "grad_norm": 7.023681163787842, + "learning_rate": 7.038014604949883e-07, + "loss": 1.678, + "step": 4307 + }, + { + "epoch": 2.650873009768479, + "grad_norm": 6.7221198081970215, + "learning_rate": 7.013548253668556e-07, + "loss": 1.849, + "step": 4308 + }, + { + "epoch": 2.651488347050227, + "grad_norm": 7.389273643493652, + "learning_rate": 6.989122957237204e-07, + "loss": 1.8442, + "step": 4309 + }, + { + "epoch": 2.6521036843319745, + "grad_norm": 7.581671237945557, + "learning_rate": 6.964738726439957e-07, + "loss": 1.6567, + "step": 4310 + }, + { + "epoch": 2.6527190216137218, + "grad_norm": 5.327346324920654, + "learning_rate": 6.940395572042791e-07, + "loss": 1.8192, + "step": 4311 + }, + { + "epoch": 2.6533343588954694, + "grad_norm": 6.076255798339844, + "learning_rate": 6.916093504793564e-07, + "loss": 1.6587, + "step": 4312 + }, + { + "epoch": 2.653949696177217, + "grad_norm": 7.301446914672852, + "learning_rate": 6.891832535421994e-07, + "loss": 2.0271, + "step": 4313 + }, + { + "epoch": 2.6545650334589648, + "grad_norm": 7.914007663726807, + "learning_rate": 6.867612674639634e-07, + "loss": 1.9134, + "step": 4314 + }, + { + "epoch": 2.6551803707407124, + "grad_norm": 6.371395111083984, + "learning_rate": 6.843433933139909e-07, + "loss": 1.78, + "step": 4315 + }, + { + "epoch": 2.6557957080224597, + "grad_norm": 6.42514181137085, + "learning_rate": 6.819296321598067e-07, + "loss": 1.7406, + "step": 4316 + }, + { + "epoch": 2.6564110453042074, + "grad_norm": 6.4979400634765625, + "learning_rate": 6.79519985067123e-07, + "loss": 1.8206, + "step": 4317 + }, + { + "epoch": 2.657026382585955, + "grad_norm": 6.017232418060303, + "learning_rate": 6.771144530998308e-07, + "loss": 1.7972, + "step": 4318 + }, + { + "epoch": 2.6576417198677023, + "grad_norm": 7.042778491973877, + "learning_rate": 6.747130373200095e-07, + "loss": 1.8683, + "step": 4319 + }, + { + "epoch": 2.65825705714945, + "grad_norm": 5.435171604156494, + "learning_rate": 6.723157387879164e-07, + "loss": 1.6744, + "step": 4320 + }, + { + "epoch": 2.6588723944311976, + "grad_norm": 5.309959411621094, + "learning_rate": 6.699225585619928e-07, + "loss": 1.7582, + "step": 4321 + }, + { + "epoch": 2.6594877317129453, + "grad_norm": 5.9647440910339355, + "learning_rate": 6.675334976988656e-07, + "loss": 1.6913, + "step": 4322 + }, + { + "epoch": 2.660103068994693, + "grad_norm": 6.629495143890381, + "learning_rate": 6.651485572533379e-07, + "loss": 1.7824, + "step": 4323 + }, + { + "epoch": 2.66071840627644, + "grad_norm": 6.376944065093994, + "learning_rate": 6.627677382783926e-07, + "loss": 1.7821, + "step": 4324 + }, + { + "epoch": 2.661333743558188, + "grad_norm": 5.4804205894470215, + "learning_rate": 6.603910418251958e-07, + "loss": 1.7727, + "step": 4325 + }, + { + "epoch": 2.6619490808399355, + "grad_norm": 7.765096187591553, + "learning_rate": 6.580184689430958e-07, + "loss": 1.8295, + "step": 4326 + }, + { + "epoch": 2.6625644181216828, + "grad_norm": 6.491728782653809, + "learning_rate": 6.556500206796179e-07, + "loss": 1.7319, + "step": 4327 + }, + { + "epoch": 2.6631797554034304, + "grad_norm": 6.785852432250977, + "learning_rate": 6.532856980804602e-07, + "loss": 1.9261, + "step": 4328 + }, + { + "epoch": 2.663795092685178, + "grad_norm": 6.097753524780273, + "learning_rate": 6.509255021895111e-07, + "loss": 1.9105, + "step": 4329 + }, + { + "epoch": 2.664410429966926, + "grad_norm": 6.057290077209473, + "learning_rate": 6.48569434048828e-07, + "loss": 1.92, + "step": 4330 + }, + { + "epoch": 2.665025767248673, + "grad_norm": 8.148725509643555, + "learning_rate": 6.462174946986511e-07, + "loss": 1.717, + "step": 4331 + }, + { + "epoch": 2.6656411045304207, + "grad_norm": 6.567966461181641, + "learning_rate": 6.438696851773906e-07, + "loss": 2.0712, + "step": 4332 + }, + { + "epoch": 2.6662564418121684, + "grad_norm": 5.712863445281982, + "learning_rate": 6.415260065216422e-07, + "loss": 1.6564, + "step": 4333 + }, + { + "epoch": 2.666871779093916, + "grad_norm": 6.704305171966553, + "learning_rate": 6.391864597661723e-07, + "loss": 1.9258, + "step": 4334 + }, + { + "epoch": 2.6674871163756633, + "grad_norm": 6.13756799697876, + "learning_rate": 6.368510459439248e-07, + "loss": 1.6446, + "step": 4335 + }, + { + "epoch": 2.668102453657411, + "grad_norm": 7.9060750007629395, + "learning_rate": 6.34519766086017e-07, + "loss": 1.5896, + "step": 4336 + }, + { + "epoch": 2.6687177909391586, + "grad_norm": 8.71943473815918, + "learning_rate": 6.321926212217432e-07, + "loss": 1.7601, + "step": 4337 + }, + { + "epoch": 2.669333128220906, + "grad_norm": 6.175576686859131, + "learning_rate": 6.298696123785719e-07, + "loss": 1.8432, + "step": 4338 + }, + { + "epoch": 2.6699484655026535, + "grad_norm": 7.038666725158691, + "learning_rate": 6.275507405821435e-07, + "loss": 1.7734, + "step": 4339 + }, + { + "epoch": 2.670563802784401, + "grad_norm": 6.203588008880615, + "learning_rate": 6.252360068562735e-07, + "loss": 1.8614, + "step": 4340 + }, + { + "epoch": 2.671179140066149, + "grad_norm": 6.999863147735596, + "learning_rate": 6.2292541222295e-07, + "loss": 1.6648, + "step": 4341 + }, + { + "epoch": 2.6717944773478965, + "grad_norm": 6.28922700881958, + "learning_rate": 6.206189577023347e-07, + "loss": 1.6776, + "step": 4342 + }, + { + "epoch": 2.6724098146296438, + "grad_norm": 6.516401767730713, + "learning_rate": 6.183166443127587e-07, + "loss": 1.8244, + "step": 4343 + }, + { + "epoch": 2.6730251519113915, + "grad_norm": 6.19266414642334, + "learning_rate": 6.160184730707264e-07, + "loss": 1.8671, + "step": 4344 + }, + { + "epoch": 2.673640489193139, + "grad_norm": 5.3545732498168945, + "learning_rate": 6.137244449909153e-07, + "loss": 1.8233, + "step": 4345 + }, + { + "epoch": 2.6742558264748864, + "grad_norm": 6.897907733917236, + "learning_rate": 6.114345610861672e-07, + "loss": 1.9606, + "step": 4346 + }, + { + "epoch": 2.674871163756634, + "grad_norm": 5.9486260414123535, + "learning_rate": 6.091488223675058e-07, + "loss": 1.6592, + "step": 4347 + }, + { + "epoch": 2.6754865010383817, + "grad_norm": 7.822542190551758, + "learning_rate": 6.068672298441114e-07, + "loss": 1.8572, + "step": 4348 + }, + { + "epoch": 2.6761018383201294, + "grad_norm": 7.222146034240723, + "learning_rate": 6.045897845233395e-07, + "loss": 1.7731, + "step": 4349 + }, + { + "epoch": 2.676717175601877, + "grad_norm": 5.77739953994751, + "learning_rate": 6.023164874107202e-07, + "loss": 1.7312, + "step": 4350 + }, + { + "epoch": 2.6773325128836243, + "grad_norm": 6.659698009490967, + "learning_rate": 6.000473395099438e-07, + "loss": 1.8077, + "step": 4351 + }, + { + "epoch": 2.677947850165372, + "grad_norm": 6.583667278289795, + "learning_rate": 5.977823418228701e-07, + "loss": 1.8324, + "step": 4352 + }, + { + "epoch": 2.6785631874471196, + "grad_norm": 5.130437850952148, + "learning_rate": 5.95521495349527e-07, + "loss": 1.8575, + "step": 4353 + }, + { + "epoch": 2.679178524728867, + "grad_norm": 5.9315643310546875, + "learning_rate": 5.932648010881159e-07, + "loss": 1.844, + "step": 4354 + }, + { + "epoch": 2.6797938620106145, + "grad_norm": 6.762899875640869, + "learning_rate": 5.910122600349966e-07, + "loss": 1.6119, + "step": 4355 + }, + { + "epoch": 2.680409199292362, + "grad_norm": 7.00297737121582, + "learning_rate": 5.887638731846967e-07, + "loss": 1.9281, + "step": 4356 + }, + { + "epoch": 2.6810245365741094, + "grad_norm": 6.152677059173584, + "learning_rate": 5.865196415299135e-07, + "loss": 1.8087, + "step": 4357 + }, + { + "epoch": 2.681639873855857, + "grad_norm": 8.291614532470703, + "learning_rate": 5.842795660615064e-07, + "loss": 1.6566, + "step": 4358 + }, + { + "epoch": 2.682255211137605, + "grad_norm": 6.967887878417969, + "learning_rate": 5.820436477685021e-07, + "loss": 1.8781, + "step": 4359 + }, + { + "epoch": 2.6828705484193525, + "grad_norm": 6.332274436950684, + "learning_rate": 5.798118876380876e-07, + "loss": 1.9416, + "step": 4360 + }, + { + "epoch": 2.6834858857011, + "grad_norm": 6.37031364440918, + "learning_rate": 5.775842866556191e-07, + "loss": 1.9911, + "step": 4361 + }, + { + "epoch": 2.6841012229828474, + "grad_norm": 7.661913871765137, + "learning_rate": 5.753608458046145e-07, + "loss": 1.8507, + "step": 4362 + }, + { + "epoch": 2.684716560264595, + "grad_norm": 6.28947114944458, + "learning_rate": 5.73141566066755e-07, + "loss": 1.7187, + "step": 4363 + }, + { + "epoch": 2.6853318975463427, + "grad_norm": 5.666639804840088, + "learning_rate": 5.709264484218835e-07, + "loss": 1.8231, + "step": 4364 + }, + { + "epoch": 2.68594723482809, + "grad_norm": 6.97182559967041, + "learning_rate": 5.687154938480055e-07, + "loss": 1.6661, + "step": 4365 + }, + { + "epoch": 2.6865625721098376, + "grad_norm": 5.723969459533691, + "learning_rate": 5.665087033212902e-07, + "loss": 1.7437, + "step": 4366 + }, + { + "epoch": 2.6871779093915853, + "grad_norm": 6.024521827697754, + "learning_rate": 5.64306077816068e-07, + "loss": 1.9389, + "step": 4367 + }, + { + "epoch": 2.687793246673333, + "grad_norm": 5.670562744140625, + "learning_rate": 5.62107618304828e-07, + "loss": 1.8684, + "step": 4368 + }, + { + "epoch": 2.6884085839550806, + "grad_norm": 5.853975296020508, + "learning_rate": 5.599133257582212e-07, + "loss": 1.6767, + "step": 4369 + }, + { + "epoch": 2.689023921236828, + "grad_norm": 5.4195637702941895, + "learning_rate": 5.577232011450595e-07, + "loss": 1.7652, + "step": 4370 + }, + { + "epoch": 2.6896392585185755, + "grad_norm": 7.387396335601807, + "learning_rate": 5.555372454323182e-07, + "loss": 1.6403, + "step": 4371 + }, + { + "epoch": 2.6902545958003232, + "grad_norm": 5.024430274963379, + "learning_rate": 5.533554595851231e-07, + "loss": 1.7411, + "step": 4372 + }, + { + "epoch": 2.6908699330820705, + "grad_norm": 6.326539039611816, + "learning_rate": 5.511778445667648e-07, + "loss": 1.5012, + "step": 4373 + }, + { + "epoch": 2.691485270363818, + "grad_norm": 5.254311561584473, + "learning_rate": 5.490044013386919e-07, + "loss": 1.7571, + "step": 4374 + }, + { + "epoch": 2.692100607645566, + "grad_norm": 5.486382007598877, + "learning_rate": 5.468351308605135e-07, + "loss": 1.9042, + "step": 4375 + }, + { + "epoch": 2.692715944927313, + "grad_norm": 6.92351770401001, + "learning_rate": 5.446700340899891e-07, + "loss": 1.8899, + "step": 4376 + }, + { + "epoch": 2.6933312822090607, + "grad_norm": 6.441606044769287, + "learning_rate": 5.425091119830417e-07, + "loss": 1.8524, + "step": 4377 + }, + { + "epoch": 2.6939466194908084, + "grad_norm": 5.663888931274414, + "learning_rate": 5.403523654937504e-07, + "loss": 1.9419, + "step": 4378 + }, + { + "epoch": 2.694561956772556, + "grad_norm": 6.5376410484313965, + "learning_rate": 5.3819979557435e-07, + "loss": 1.5236, + "step": 4379 + }, + { + "epoch": 2.6951772940543037, + "grad_norm": 7.125683307647705, + "learning_rate": 5.360514031752284e-07, + "loss": 1.8262, + "step": 4380 + }, + { + "epoch": 2.695792631336051, + "grad_norm": 4.89567232131958, + "learning_rate": 5.339071892449321e-07, + "loss": 1.796, + "step": 4381 + }, + { + "epoch": 2.6964079686177986, + "grad_norm": 6.341275215148926, + "learning_rate": 5.317671547301651e-07, + "loss": 1.7251, + "step": 4382 + }, + { + "epoch": 2.6970233058995463, + "grad_norm": 6.209078788757324, + "learning_rate": 5.296313005757814e-07, + "loss": 1.921, + "step": 4383 + }, + { + "epoch": 2.6976386431812935, + "grad_norm": 5.45692253112793, + "learning_rate": 5.27499627724789e-07, + "loss": 1.6319, + "step": 4384 + }, + { + "epoch": 2.698253980463041, + "grad_norm": 6.191665172576904, + "learning_rate": 5.253721371183563e-07, + "loss": 1.9512, + "step": 4385 + }, + { + "epoch": 2.698869317744789, + "grad_norm": 6.488903045654297, + "learning_rate": 5.232488296957983e-07, + "loss": 1.8328, + "step": 4386 + }, + { + "epoch": 2.6994846550265366, + "grad_norm": 6.506208419799805, + "learning_rate": 5.211297063945875e-07, + "loss": 1.8233, + "step": 4387 + }, + { + "epoch": 2.7000999923082842, + "grad_norm": 6.95944356918335, + "learning_rate": 5.190147681503421e-07, + "loss": 1.6297, + "step": 4388 + }, + { + "epoch": 2.7007153295900315, + "grad_norm": 6.933115005493164, + "learning_rate": 5.169040158968431e-07, + "loss": 1.8388, + "step": 4389 + }, + { + "epoch": 2.701330666871779, + "grad_norm": 7.4567365646362305, + "learning_rate": 5.14797450566017e-07, + "loss": 1.8658, + "step": 4390 + }, + { + "epoch": 2.701946004153527, + "grad_norm": 6.165879249572754, + "learning_rate": 5.126950730879399e-07, + "loss": 1.952, + "step": 4391 + }, + { + "epoch": 2.702561341435274, + "grad_norm": 6.176125526428223, + "learning_rate": 5.105968843908449e-07, + "loss": 1.7757, + "step": 4392 + }, + { + "epoch": 2.7031766787170217, + "grad_norm": 6.212268829345703, + "learning_rate": 5.085028854011098e-07, + "loss": 1.8998, + "step": 4393 + }, + { + "epoch": 2.7037920159987694, + "grad_norm": 9.149742126464844, + "learning_rate": 5.064130770432651e-07, + "loss": 1.7228, + "step": 4394 + }, + { + "epoch": 2.7044073532805166, + "grad_norm": 7.668326377868652, + "learning_rate": 5.043274602399939e-07, + "loss": 1.6896, + "step": 4395 + }, + { + "epoch": 2.7050226905622643, + "grad_norm": 6.622905254364014, + "learning_rate": 5.022460359121228e-07, + "loss": 1.6688, + "step": 4396 + }, + { + "epoch": 2.705638027844012, + "grad_norm": 5.296874046325684, + "learning_rate": 5.001688049786324e-07, + "loss": 1.9427, + "step": 4397 + }, + { + "epoch": 2.7062533651257596, + "grad_norm": 6.725078582763672, + "learning_rate": 4.980957683566468e-07, + "loss": 1.8126, + "step": 4398 + }, + { + "epoch": 2.7068687024075073, + "grad_norm": 5.979475021362305, + "learning_rate": 4.96026926961447e-07, + "loss": 1.7855, + "step": 4399 + }, + { + "epoch": 2.7074840396892546, + "grad_norm": 6.2777533531188965, + "learning_rate": 4.939622817064516e-07, + "loss": 1.6778, + "step": 4400 + }, + { + "epoch": 2.7080993769710022, + "grad_norm": 6.269004821777344, + "learning_rate": 4.919018335032322e-07, + "loss": 1.6609, + "step": 4401 + }, + { + "epoch": 2.70871471425275, + "grad_norm": 6.488715171813965, + "learning_rate": 4.898455832615057e-07, + "loss": 1.5954, + "step": 4402 + }, + { + "epoch": 2.709330051534497, + "grad_norm": 6.032607078552246, + "learning_rate": 4.877935318891381e-07, + "loss": 1.8254, + "step": 4403 + }, + { + "epoch": 2.709945388816245, + "grad_norm": 5.702697277069092, + "learning_rate": 4.857456802921368e-07, + "loss": 1.7404, + "step": 4404 + }, + { + "epoch": 2.7105607260979925, + "grad_norm": 6.400503158569336, + "learning_rate": 4.837020293746575e-07, + "loss": 1.8071, + "step": 4405 + }, + { + "epoch": 2.71117606337974, + "grad_norm": 7.0312042236328125, + "learning_rate": 4.81662580039004e-07, + "loss": 1.747, + "step": 4406 + }, + { + "epoch": 2.711791400661488, + "grad_norm": 7.444106578826904, + "learning_rate": 4.796273331856227e-07, + "loss": 1.8554, + "step": 4407 + }, + { + "epoch": 2.712406737943235, + "grad_norm": 6.569211483001709, + "learning_rate": 4.775962897131004e-07, + "loss": 1.7513, + "step": 4408 + }, + { + "epoch": 2.7130220752249827, + "grad_norm": 5.83116340637207, + "learning_rate": 4.755694505181729e-07, + "loss": 1.9127, + "step": 4409 + }, + { + "epoch": 2.7136374125067304, + "grad_norm": 6.297616958618164, + "learning_rate": 4.735468164957213e-07, + "loss": 1.6697, + "step": 4410 + }, + { + "epoch": 2.7142527497884776, + "grad_norm": 6.0088982582092285, + "learning_rate": 4.715283885387678e-07, + "loss": 1.6779, + "step": 4411 + }, + { + "epoch": 2.7148680870702253, + "grad_norm": 6.047365188598633, + "learning_rate": 4.695141675384729e-07, + "loss": 1.8881, + "step": 4412 + }, + { + "epoch": 2.715483424351973, + "grad_norm": 5.807113170623779, + "learning_rate": 4.6750415438414745e-07, + "loss": 1.6672, + "step": 4413 + }, + { + "epoch": 2.71609876163372, + "grad_norm": 6.102477550506592, + "learning_rate": 4.654983499632393e-07, + "loss": 1.8719, + "step": 4414 + }, + { + "epoch": 2.716714098915468, + "grad_norm": 6.738120079040527, + "learning_rate": 4.634967551613423e-07, + "loss": 1.6547, + "step": 4415 + }, + { + "epoch": 2.7173294361972156, + "grad_norm": 5.66143798828125, + "learning_rate": 4.614993708621862e-07, + "loss": 1.8012, + "step": 4416 + }, + { + "epoch": 2.7179447734789632, + "grad_norm": 5.869951248168945, + "learning_rate": 4.5950619794764674e-07, + "loss": 1.8653, + "step": 4417 + }, + { + "epoch": 2.718560110760711, + "grad_norm": 7.043490886688232, + "learning_rate": 4.5751723729773766e-07, + "loss": 1.8141, + "step": 4418 + }, + { + "epoch": 2.719175448042458, + "grad_norm": 7.703203201293945, + "learning_rate": 4.555324897906133e-07, + "loss": 1.6654, + "step": 4419 + }, + { + "epoch": 2.719790785324206, + "grad_norm": 6.845963478088379, + "learning_rate": 4.535519563025692e-07, + "loss": 1.8836, + "step": 4420 + }, + { + "epoch": 2.7204061226059535, + "grad_norm": 6.8815436363220215, + "learning_rate": 4.5157563770803826e-07, + "loss": 1.8488, + "step": 4421 + }, + { + "epoch": 2.7210214598877007, + "grad_norm": 6.4598774909973145, + "learning_rate": 4.4960353487959354e-07, + "loss": 1.8732, + "step": 4422 + }, + { + "epoch": 2.7216367971694484, + "grad_norm": 6.271141529083252, + "learning_rate": 4.476356486879474e-07, + "loss": 2.0151, + "step": 4423 + }, + { + "epoch": 2.722252134451196, + "grad_norm": 7.343278884887695, + "learning_rate": 4.456719800019482e-07, + "loss": 1.9489, + "step": 4424 + }, + { + "epoch": 2.7228674717329437, + "grad_norm": 5.756350994110107, + "learning_rate": 4.437125296885847e-07, + "loss": 1.9109, + "step": 4425 + }, + { + "epoch": 2.7234828090146914, + "grad_norm": 7.252222537994385, + "learning_rate": 4.4175729861298144e-07, + "loss": 1.6647, + "step": 4426 + }, + { + "epoch": 2.7240981462964386, + "grad_norm": 7.559205055236816, + "learning_rate": 4.398062876384046e-07, + "loss": 1.6325, + "step": 4427 + }, + { + "epoch": 2.7247134835781863, + "grad_norm": 6.360380172729492, + "learning_rate": 4.378594976262496e-07, + "loss": 1.8668, + "step": 4428 + }, + { + "epoch": 2.725328820859934, + "grad_norm": 7.158263683319092, + "learning_rate": 4.3591692943605323e-07, + "loss": 1.8758, + "step": 4429 + }, + { + "epoch": 2.7259441581416812, + "grad_norm": 6.634328842163086, + "learning_rate": 4.339785839254851e-07, + "loss": 1.7559, + "step": 4430 + }, + { + "epoch": 2.726559495423429, + "grad_norm": 6.3224358558654785, + "learning_rate": 4.320444619503583e-07, + "loss": 1.8675, + "step": 4431 + }, + { + "epoch": 2.7271748327051766, + "grad_norm": 5.865268707275391, + "learning_rate": 4.3011456436460985e-07, + "loss": 2.0199, + "step": 4432 + }, + { + "epoch": 2.727790169986924, + "grad_norm": 6.673404216766357, + "learning_rate": 4.28188892020317e-07, + "loss": 1.6597, + "step": 4433 + }, + { + "epoch": 2.7284055072686715, + "grad_norm": 5.153737545013428, + "learning_rate": 4.262674457676952e-07, + "loss": 1.8426, + "step": 4434 + }, + { + "epoch": 2.729020844550419, + "grad_norm": 10.12548828125, + "learning_rate": 4.243502264550903e-07, + "loss": 1.6469, + "step": 4435 + }, + { + "epoch": 2.729636181832167, + "grad_norm": 6.472949504852295, + "learning_rate": 4.224372349289796e-07, + "loss": 1.9109, + "step": 4436 + }, + { + "epoch": 2.7302515191139145, + "grad_norm": 5.6896467208862305, + "learning_rate": 4.205284720339764e-07, + "loss": 1.7845, + "step": 4437 + }, + { + "epoch": 2.7308668563956617, + "grad_norm": 5.3740315437316895, + "learning_rate": 4.186239386128288e-07, + "loss": 1.8261, + "step": 4438 + }, + { + "epoch": 2.7314821936774094, + "grad_norm": 5.778078079223633, + "learning_rate": 4.1672363550641415e-07, + "loss": 1.9476, + "step": 4439 + }, + { + "epoch": 2.732097530959157, + "grad_norm": 5.563880443572998, + "learning_rate": 4.1482756355374463e-07, + "loss": 1.6353, + "step": 4440 + }, + { + "epoch": 2.7327128682409043, + "grad_norm": 5.875546932220459, + "learning_rate": 4.129357235919618e-07, + "loss": 1.7873, + "step": 4441 + }, + { + "epoch": 2.733328205522652, + "grad_norm": 5.526837348937988, + "learning_rate": 4.110481164563407e-07, + "loss": 1.7846, + "step": 4442 + }, + { + "epoch": 2.7339435428043997, + "grad_norm": 6.560861110687256, + "learning_rate": 4.0916474298028694e-07, + "loss": 1.8861, + "step": 4443 + }, + { + "epoch": 2.7345588800861473, + "grad_norm": 6.67637825012207, + "learning_rate": 4.072856039953366e-07, + "loss": 1.6077, + "step": 4444 + }, + { + "epoch": 2.735174217367895, + "grad_norm": 6.130690097808838, + "learning_rate": 4.0541070033115804e-07, + "loss": 1.6958, + "step": 4445 + }, + { + "epoch": 2.7357895546496422, + "grad_norm": 5.9361748695373535, + "learning_rate": 4.0354003281554586e-07, + "loss": 1.7902, + "step": 4446 + }, + { + "epoch": 2.73640489193139, + "grad_norm": 8.047869682312012, + "learning_rate": 4.016736022744272e-07, + "loss": 1.7568, + "step": 4447 + }, + { + "epoch": 2.7370202292131376, + "grad_norm": 7.714299201965332, + "learning_rate": 3.998114095318584e-07, + "loss": 1.6283, + "step": 4448 + }, + { + "epoch": 2.737635566494885, + "grad_norm": 6.3952202796936035, + "learning_rate": 3.9795345541002395e-07, + "loss": 1.9272, + "step": 4449 + }, + { + "epoch": 2.7382509037766325, + "grad_norm": 7.280806064605713, + "learning_rate": 3.960997407292366e-07, + "loss": 1.8924, + "step": 4450 + }, + { + "epoch": 2.73886624105838, + "grad_norm": 7.1218085289001465, + "learning_rate": 3.942502663079395e-07, + "loss": 1.7735, + "step": 4451 + }, + { + "epoch": 2.739481578340128, + "grad_norm": 6.618092060089111, + "learning_rate": 3.924050329627005e-07, + "loss": 1.98, + "step": 4452 + }, + { + "epoch": 2.7400969156218755, + "grad_norm": 7.055030822753906, + "learning_rate": 3.9056404150821793e-07, + "loss": 1.5609, + "step": 4453 + }, + { + "epoch": 2.7407122529036227, + "grad_norm": 6.028870582580566, + "learning_rate": 3.887272927573127e-07, + "loss": 1.876, + "step": 4454 + }, + { + "epoch": 2.7413275901853704, + "grad_norm": 5.759708404541016, + "learning_rate": 3.8689478752094167e-07, + "loss": 1.7654, + "step": 4455 + }, + { + "epoch": 2.741942927467118, + "grad_norm": 7.273279190063477, + "learning_rate": 3.850665266081766e-07, + "loss": 1.864, + "step": 4456 + }, + { + "epoch": 2.7425582647488653, + "grad_norm": 5.9201555252075195, + "learning_rate": 3.8324251082622276e-07, + "loss": 1.8352, + "step": 4457 + }, + { + "epoch": 2.743173602030613, + "grad_norm": 6.731646537780762, + "learning_rate": 3.814227409804094e-07, + "loss": 1.9026, + "step": 4458 + }, + { + "epoch": 2.7437889393123607, + "grad_norm": 6.536458492279053, + "learning_rate": 3.7960721787419164e-07, + "loss": 1.7827, + "step": 4459 + }, + { + "epoch": 2.744404276594108, + "grad_norm": 6.127935409545898, + "learning_rate": 3.777959423091482e-07, + "loss": 1.7874, + "step": 4460 + }, + { + "epoch": 2.7450196138758556, + "grad_norm": 6.845181465148926, + "learning_rate": 3.759889150849805e-07, + "loss": 1.751, + "step": 4461 + }, + { + "epoch": 2.7456349511576033, + "grad_norm": 6.6616010665893555, + "learning_rate": 3.7418613699952255e-07, + "loss": 1.7484, + "step": 4462 + }, + { + "epoch": 2.746250288439351, + "grad_norm": 5.400940418243408, + "learning_rate": 3.7238760884872216e-07, + "loss": 1.6719, + "step": 4463 + }, + { + "epoch": 2.7468656257210986, + "grad_norm": 7.300556659698486, + "learning_rate": 3.7059333142665854e-07, + "loss": 1.6633, + "step": 4464 + }, + { + "epoch": 2.747480963002846, + "grad_norm": 6.134734630584717, + "learning_rate": 3.688033055255269e-07, + "loss": 1.9422, + "step": 4465 + }, + { + "epoch": 2.7480963002845935, + "grad_norm": 6.7610979080200195, + "learning_rate": 3.670175319356528e-07, + "loss": 1.7677, + "step": 4466 + }, + { + "epoch": 2.748711637566341, + "grad_norm": 5.697208404541016, + "learning_rate": 3.6523601144548003e-07, + "loss": 1.8154, + "step": 4467 + }, + { + "epoch": 2.7493269748480884, + "grad_norm": 5.679961204528809, + "learning_rate": 3.634587448415738e-07, + "loss": 1.8395, + "step": 4468 + }, + { + "epoch": 2.749942312129836, + "grad_norm": 6.787731647491455, + "learning_rate": 3.616857329086254e-07, + "loss": 1.6623, + "step": 4469 + }, + { + "epoch": 2.7505576494115838, + "grad_norm": 6.951547622680664, + "learning_rate": 3.599169764294419e-07, + "loss": 1.6132, + "step": 4470 + }, + { + "epoch": 2.7511729866933314, + "grad_norm": 6.554044723510742, + "learning_rate": 3.5815247618495753e-07, + "loss": 1.6884, + "step": 4471 + }, + { + "epoch": 2.751788323975079, + "grad_norm": 6.424379348754883, + "learning_rate": 3.5639223295422245e-07, + "loss": 1.8124, + "step": 4472 + }, + { + "epoch": 2.7524036612568263, + "grad_norm": 5.94189977645874, + "learning_rate": 3.546362475144105e-07, + "loss": 1.8271, + "step": 4473 + }, + { + "epoch": 2.753018998538574, + "grad_norm": 7.937407493591309, + "learning_rate": 3.5288452064081267e-07, + "loss": 1.9294, + "step": 4474 + }, + { + "epoch": 2.7536343358203217, + "grad_norm": 7.247797012329102, + "learning_rate": 3.5113705310684363e-07, + "loss": 1.8296, + "step": 4475 + }, + { + "epoch": 2.754249673102069, + "grad_norm": 6.5120768547058105, + "learning_rate": 3.49393845684034e-07, + "loss": 1.622, + "step": 4476 + }, + { + "epoch": 2.7548650103838166, + "grad_norm": 5.791457176208496, + "learning_rate": 3.476548991420359e-07, + "loss": 1.8591, + "step": 4477 + }, + { + "epoch": 2.7554803476655643, + "grad_norm": 9.234393119812012, + "learning_rate": 3.4592021424861735e-07, + "loss": 1.8251, + "step": 4478 + }, + { + "epoch": 2.7560956849473115, + "grad_norm": 6.959224700927734, + "learning_rate": 3.441897917696679e-07, + "loss": 1.6946, + "step": 4479 + }, + { + "epoch": 2.756711022229059, + "grad_norm": 5.597395896911621, + "learning_rate": 3.424636324691932e-07, + "loss": 1.6208, + "step": 4480 + }, + { + "epoch": 2.757326359510807, + "grad_norm": 6.205461502075195, + "learning_rate": 3.4074173710931804e-07, + "loss": 1.7594, + "step": 4481 + }, + { + "epoch": 2.7579416967925545, + "grad_norm": 6.8126678466796875, + "learning_rate": 3.3902410645028197e-07, + "loss": 1.7231, + "step": 4482 + }, + { + "epoch": 2.758557034074302, + "grad_norm": 6.411548614501953, + "learning_rate": 3.373107412504473e-07, + "loss": 1.933, + "step": 4483 + }, + { + "epoch": 2.7591723713560494, + "grad_norm": 5.3708271980285645, + "learning_rate": 3.3560164226628666e-07, + "loss": 1.8677, + "step": 4484 + }, + { + "epoch": 2.759787708637797, + "grad_norm": 6.937884330749512, + "learning_rate": 3.3389681025239095e-07, + "loss": 1.9156, + "step": 4485 + }, + { + "epoch": 2.7604030459195448, + "grad_norm": 6.299782752990723, + "learning_rate": 3.3219624596146806e-07, + "loss": 1.8669, + "step": 4486 + }, + { + "epoch": 2.761018383201292, + "grad_norm": 6.735232353210449, + "learning_rate": 3.30499950144344e-07, + "loss": 1.9037, + "step": 4487 + }, + { + "epoch": 2.7616337204830397, + "grad_norm": 6.460624694824219, + "learning_rate": 3.288079235499575e-07, + "loss": 1.864, + "step": 4488 + }, + { + "epoch": 2.7622490577647874, + "grad_norm": 6.402883052825928, + "learning_rate": 3.2712016692535877e-07, + "loss": 1.6306, + "step": 4489 + }, + { + "epoch": 2.762864395046535, + "grad_norm": 7.489129066467285, + "learning_rate": 3.2543668101571946e-07, + "loss": 1.7294, + "step": 4490 + }, + { + "epoch": 2.7634797323282827, + "grad_norm": 6.731449127197266, + "learning_rate": 3.2375746656432284e-07, + "loss": 1.8683, + "step": 4491 + }, + { + "epoch": 2.76409506961003, + "grad_norm": 7.001004219055176, + "learning_rate": 3.2208252431256694e-07, + "loss": 1.8202, + "step": 4492 + }, + { + "epoch": 2.7647104068917776, + "grad_norm": 5.787037372589111, + "learning_rate": 3.2041185499995797e-07, + "loss": 1.788, + "step": 4493 + }, + { + "epoch": 2.7653257441735253, + "grad_norm": 6.0310750007629395, + "learning_rate": 3.187454593641248e-07, + "loss": 1.82, + "step": 4494 + }, + { + "epoch": 2.7659410814552725, + "grad_norm": 5.244279384613037, + "learning_rate": 3.170833381408045e-07, + "loss": 1.8066, + "step": 4495 + }, + { + "epoch": 2.76655641873702, + "grad_norm": 6.822119235992432, + "learning_rate": 3.1542549206384666e-07, + "loss": 1.4519, + "step": 4496 + }, + { + "epoch": 2.767171756018768, + "grad_norm": 6.313270568847656, + "learning_rate": 3.1377192186521357e-07, + "loss": 1.6385, + "step": 4497 + }, + { + "epoch": 2.767787093300515, + "grad_norm": 6.014891147613525, + "learning_rate": 3.1212262827498117e-07, + "loss": 1.999, + "step": 4498 + }, + { + "epoch": 2.7684024305822628, + "grad_norm": 6.683852195739746, + "learning_rate": 3.10477612021336e-07, + "loss": 1.846, + "step": 4499 + }, + { + "epoch": 2.7690177678640104, + "grad_norm": 6.010873317718506, + "learning_rate": 3.088368738305747e-07, + "loss": 1.696, + "step": 4500 + }, + { + "epoch": 2.769633105145758, + "grad_norm": 5.198248863220215, + "learning_rate": 3.07200414427109e-07, + "loss": 1.8032, + "step": 4501 + }, + { + "epoch": 2.770248442427506, + "grad_norm": 6.820478439331055, + "learning_rate": 3.0556823453345765e-07, + "loss": 1.7419, + "step": 4502 + }, + { + "epoch": 2.770863779709253, + "grad_norm": 5.9583916664123535, + "learning_rate": 3.03940334870253e-07, + "loss": 1.6776, + "step": 4503 + }, + { + "epoch": 2.7714791169910007, + "grad_norm": 7.065822601318359, + "learning_rate": 3.023167161562335e-07, + "loss": 1.7789, + "step": 4504 + }, + { + "epoch": 2.7720944542727484, + "grad_norm": 6.148684501647949, + "learning_rate": 3.0069737910825235e-07, + "loss": 1.8289, + "step": 4505 + }, + { + "epoch": 2.7727097915544956, + "grad_norm": 6.6742706298828125, + "learning_rate": 2.9908232444126993e-07, + "loss": 1.81, + "step": 4506 + }, + { + "epoch": 2.7733251288362433, + "grad_norm": 6.747837066650391, + "learning_rate": 2.974715528683547e-07, + "loss": 1.7474, + "step": 4507 + }, + { + "epoch": 2.773940466117991, + "grad_norm": 6.288238048553467, + "learning_rate": 2.958650651006856e-07, + "loss": 1.6611, + "step": 4508 + }, + { + "epoch": 2.7745558033997386, + "grad_norm": 5.994616508483887, + "learning_rate": 2.942628618475507e-07, + "loss": 1.7095, + "step": 4509 + }, + { + "epoch": 2.7751711406814863, + "grad_norm": 7.1365227699279785, + "learning_rate": 2.9266494381634427e-07, + "loss": 1.6855, + "step": 4510 + }, + { + "epoch": 2.7757864779632335, + "grad_norm": 6.523978233337402, + "learning_rate": 2.910713117125719e-07, + "loss": 1.8428, + "step": 4511 + }, + { + "epoch": 2.776401815244981, + "grad_norm": 6.415614128112793, + "learning_rate": 2.894819662398463e-07, + "loss": 1.8994, + "step": 4512 + }, + { + "epoch": 2.777017152526729, + "grad_norm": 6.01107931137085, + "learning_rate": 2.878969080998817e-07, + "loss": 1.8611, + "step": 4513 + }, + { + "epoch": 2.777632489808476, + "grad_norm": 6.176811695098877, + "learning_rate": 2.8631613799250615e-07, + "loss": 1.9254, + "step": 4514 + }, + { + "epoch": 2.7782478270902238, + "grad_norm": 7.679055213928223, + "learning_rate": 2.8473965661565353e-07, + "loss": 1.7208, + "step": 4515 + }, + { + "epoch": 2.7788631643719715, + "grad_norm": 6.030190467834473, + "learning_rate": 2.8316746466536373e-07, + "loss": 2.089, + "step": 4516 + }, + { + "epoch": 2.7794785016537187, + "grad_norm": 5.8066229820251465, + "learning_rate": 2.815995628357771e-07, + "loss": 1.6466, + "step": 4517 + }, + { + "epoch": 2.7800938389354664, + "grad_norm": 7.334859371185303, + "learning_rate": 2.8003595181914866e-07, + "loss": 1.8366, + "step": 4518 + }, + { + "epoch": 2.780709176217214, + "grad_norm": 8.693629264831543, + "learning_rate": 2.784766323058352e-07, + "loss": 1.6808, + "step": 4519 + }, + { + "epoch": 2.7813245134989617, + "grad_norm": 5.372369289398193, + "learning_rate": 2.769216049842982e-07, + "loss": 1.7149, + "step": 4520 + }, + { + "epoch": 2.7819398507807094, + "grad_norm": 6.98655891418457, + "learning_rate": 2.753708705411018e-07, + "loss": 1.8706, + "step": 4521 + }, + { + "epoch": 2.7825551880624566, + "grad_norm": 5.821020126342773, + "learning_rate": 2.738244296609216e-07, + "loss": 1.9199, + "step": 4522 + }, + { + "epoch": 2.7831705253442043, + "grad_norm": 6.069058895111084, + "learning_rate": 2.7228228302653037e-07, + "loss": 1.7857, + "step": 4523 + }, + { + "epoch": 2.783785862625952, + "grad_norm": 5.64937162399292, + "learning_rate": 2.707444313188101e-07, + "loss": 1.9216, + "step": 4524 + }, + { + "epoch": 2.784401199907699, + "grad_norm": 6.1329145431518555, + "learning_rate": 2.6921087521674214e-07, + "loss": 1.8491, + "step": 4525 + }, + { + "epoch": 2.785016537189447, + "grad_norm": 5.442582130432129, + "learning_rate": 2.676816153974149e-07, + "loss": 1.9217, + "step": 4526 + }, + { + "epoch": 2.7856318744711945, + "grad_norm": 7.0707173347473145, + "learning_rate": 2.661566525360193e-07, + "loss": 1.7895, + "step": 4527 + }, + { + "epoch": 2.786247211752942, + "grad_norm": 6.0508880615234375, + "learning_rate": 2.646359873058457e-07, + "loss": 1.6585, + "step": 4528 + }, + { + "epoch": 2.78686254903469, + "grad_norm": 5.65071964263916, + "learning_rate": 2.631196203782915e-07, + "loss": 1.821, + "step": 4529 + }, + { + "epoch": 2.787477886316437, + "grad_norm": 7.268174171447754, + "learning_rate": 2.616075524228545e-07, + "loss": 1.8366, + "step": 4530 + }, + { + "epoch": 2.788093223598185, + "grad_norm": 7.841803073883057, + "learning_rate": 2.600997841071329e-07, + "loss": 1.9035, + "step": 4531 + }, + { + "epoch": 2.7887085608799325, + "grad_norm": 5.887470245361328, + "learning_rate": 2.5859631609682966e-07, + "loss": 1.9382, + "step": 4532 + }, + { + "epoch": 2.7893238981616797, + "grad_norm": 5.766026973724365, + "learning_rate": 2.570971490557461e-07, + "loss": 1.8501, + "step": 4533 + }, + { + "epoch": 2.7899392354434274, + "grad_norm": 5.538708686828613, + "learning_rate": 2.556022836457861e-07, + "loss": 1.7697, + "step": 4534 + }, + { + "epoch": 2.790554572725175, + "grad_norm": 5.756974220275879, + "learning_rate": 2.54111720526955e-07, + "loss": 1.8638, + "step": 4535 + }, + { + "epoch": 2.7911699100069223, + "grad_norm": 5.282552719116211, + "learning_rate": 2.526254603573586e-07, + "loss": 1.7387, + "step": 4536 + }, + { + "epoch": 2.79178524728867, + "grad_norm": 5.481592655181885, + "learning_rate": 2.511435037931986e-07, + "loss": 1.9747, + "step": 4537 + }, + { + "epoch": 2.7924005845704176, + "grad_norm": 6.47569465637207, + "learning_rate": 2.4966585148878287e-07, + "loss": 1.609, + "step": 4538 + }, + { + "epoch": 2.7930159218521653, + "grad_norm": 6.860456943511963, + "learning_rate": 2.4819250409651605e-07, + "loss": 1.8199, + "step": 4539 + }, + { + "epoch": 2.793631259133913, + "grad_norm": 8.161920547485352, + "learning_rate": 2.4672346226690123e-07, + "loss": 1.6729, + "step": 4540 + }, + { + "epoch": 2.79424659641566, + "grad_norm": 6.647477626800537, + "learning_rate": 2.452587266485418e-07, + "loss": 1.9423, + "step": 4541 + }, + { + "epoch": 2.794861933697408, + "grad_norm": 6.743884563446045, + "learning_rate": 2.4379829788813815e-07, + "loss": 1.7377, + "step": 4542 + }, + { + "epoch": 2.7954772709791555, + "grad_norm": 7.348205089569092, + "learning_rate": 2.423421766304934e-07, + "loss": 1.6739, + "step": 4543 + }, + { + "epoch": 2.796092608260903, + "grad_norm": 8.462030410766602, + "learning_rate": 2.4089036351850447e-07, + "loss": 1.5204, + "step": 4544 + }, + { + "epoch": 2.7967079455426505, + "grad_norm": 6.686819553375244, + "learning_rate": 2.3944285919316525e-07, + "loss": 1.9395, + "step": 4545 + }, + { + "epoch": 2.797323282824398, + "grad_norm": 7.086431503295898, + "learning_rate": 2.3799966429357335e-07, + "loss": 1.7015, + "step": 4546 + }, + { + "epoch": 2.797938620106146, + "grad_norm": 5.408543109893799, + "learning_rate": 2.3656077945691802e-07, + "loss": 1.7434, + "step": 4547 + }, + { + "epoch": 2.7985539573878935, + "grad_norm": 8.077384948730469, + "learning_rate": 2.3512620531848885e-07, + "loss": 1.7241, + "step": 4548 + }, + { + "epoch": 2.7991692946696407, + "grad_norm": 6.117712497711182, + "learning_rate": 2.3369594251166804e-07, + "loss": 1.7708, + "step": 4549 + }, + { + "epoch": 2.7997846319513884, + "grad_norm": 5.892947673797607, + "learning_rate": 2.322699916679394e-07, + "loss": 1.6519, + "step": 4550 + }, + { + "epoch": 2.800399969233136, + "grad_norm": 6.854185581207275, + "learning_rate": 2.308483534168815e-07, + "loss": 1.7361, + "step": 4551 + }, + { + "epoch": 2.8010153065148833, + "grad_norm": 6.5089287757873535, + "learning_rate": 2.294310283861656e-07, + "loss": 1.7914, + "step": 4552 + }, + { + "epoch": 2.801630643796631, + "grad_norm": 6.191528797149658, + "learning_rate": 2.2801801720156224e-07, + "loss": 1.741, + "step": 4553 + }, + { + "epoch": 2.8022459810783786, + "grad_norm": 6.881898403167725, + "learning_rate": 2.2660932048693686e-07, + "loss": 1.785, + "step": 4554 + }, + { + "epoch": 2.8028613183601263, + "grad_norm": 7.1322340965271, + "learning_rate": 2.2520493886424743e-07, + "loss": 1.853, + "step": 4555 + }, + { + "epoch": 2.8034766556418735, + "grad_norm": 7.537688255310059, + "learning_rate": 2.2380487295355025e-07, + "loss": 1.7522, + "step": 4556 + }, + { + "epoch": 2.804091992923621, + "grad_norm": 8.573515892028809, + "learning_rate": 2.2240912337299304e-07, + "loss": 1.8965, + "step": 4557 + }, + { + "epoch": 2.804707330205369, + "grad_norm": 5.467584133148193, + "learning_rate": 2.210176907388195e-07, + "loss": 1.7959, + "step": 4558 + }, + { + "epoch": 2.8053226674871166, + "grad_norm": 6.766906261444092, + "learning_rate": 2.1963057566536715e-07, + "loss": 1.8563, + "step": 4559 + }, + { + "epoch": 2.805938004768864, + "grad_norm": 8.431882858276367, + "learning_rate": 2.182477787650694e-07, + "loss": 1.8788, + "step": 4560 + }, + { + "epoch": 2.8065533420506115, + "grad_norm": 5.495389938354492, + "learning_rate": 2.168693006484479e-07, + "loss": 1.782, + "step": 4561 + }, + { + "epoch": 2.807168679332359, + "grad_norm": 7.144077777862549, + "learning_rate": 2.1549514192412136e-07, + "loss": 1.611, + "step": 4562 + }, + { + "epoch": 2.8077840166141064, + "grad_norm": 6.5251145362854, + "learning_rate": 2.1412530319879888e-07, + "loss": 1.6977, + "step": 4563 + }, + { + "epoch": 2.808399353895854, + "grad_norm": 6.7782392501831055, + "learning_rate": 2.127597850772889e-07, + "loss": 1.8294, + "step": 4564 + }, + { + "epoch": 2.8090146911776017, + "grad_norm": 6.701347827911377, + "learning_rate": 2.1139858816248248e-07, + "loss": 1.8887, + "step": 4565 + }, + { + "epoch": 2.8096300284593494, + "grad_norm": 6.274772644042969, + "learning_rate": 2.100417130553678e-07, + "loss": 1.5802, + "step": 4566 + }, + { + "epoch": 2.810245365741097, + "grad_norm": 7.362088680267334, + "learning_rate": 2.0868916035502785e-07, + "loss": 1.8828, + "step": 4567 + }, + { + "epoch": 2.8108607030228443, + "grad_norm": 6.5660080909729, + "learning_rate": 2.0734093065863158e-07, + "loss": 1.9496, + "step": 4568 + }, + { + "epoch": 2.811476040304592, + "grad_norm": 5.886792182922363, + "learning_rate": 2.0599702456144178e-07, + "loss": 1.8117, + "step": 4569 + }, + { + "epoch": 2.8120913775863396, + "grad_norm": 6.306582927703857, + "learning_rate": 2.0465744265681263e-07, + "loss": 1.7766, + "step": 4570 + }, + { + "epoch": 2.812706714868087, + "grad_norm": 5.674841403961182, + "learning_rate": 2.0332218553618888e-07, + "loss": 1.5809, + "step": 4571 + }, + { + "epoch": 2.8133220521498346, + "grad_norm": 6.91290283203125, + "learning_rate": 2.0199125378910666e-07, + "loss": 1.7864, + "step": 4572 + }, + { + "epoch": 2.8139373894315822, + "grad_norm": 6.996735572814941, + "learning_rate": 2.0066464800318818e-07, + "loss": 1.6929, + "step": 4573 + }, + { + "epoch": 2.81455272671333, + "grad_norm": 5.938069820404053, + "learning_rate": 1.993423687641516e-07, + "loss": 1.7774, + "step": 4574 + }, + { + "epoch": 2.8151680639950776, + "grad_norm": 6.5066986083984375, + "learning_rate": 1.9802441665580208e-07, + "loss": 1.6601, + "step": 4575 + }, + { + "epoch": 2.815783401276825, + "grad_norm": 5.2851643562316895, + "learning_rate": 1.967107922600342e-07, + "loss": 1.6741, + "step": 4576 + }, + { + "epoch": 2.8163987385585725, + "grad_norm": 6.520896911621094, + "learning_rate": 1.9540149615683068e-07, + "loss": 1.7736, + "step": 4577 + }, + { + "epoch": 2.81701407584032, + "grad_norm": 6.347739219665527, + "learning_rate": 1.9409652892426578e-07, + "loss": 1.8886, + "step": 4578 + }, + { + "epoch": 2.8176294131220674, + "grad_norm": 6.059541702270508, + "learning_rate": 1.9279589113850082e-07, + "loss": 1.9653, + "step": 4579 + }, + { + "epoch": 2.818244750403815, + "grad_norm": 5.9655561447143555, + "learning_rate": 1.9149958337378538e-07, + "loss": 1.7697, + "step": 4580 + }, + { + "epoch": 2.8188600876855627, + "grad_norm": 7.32972526550293, + "learning_rate": 1.9020760620245938e-07, + "loss": 1.6272, + "step": 4581 + }, + { + "epoch": 2.81947542496731, + "grad_norm": 6.889088153839111, + "learning_rate": 1.8891996019494764e-07, + "loss": 1.7162, + "step": 4582 + }, + { + "epoch": 2.8200907622490576, + "grad_norm": 6.574466705322266, + "learning_rate": 1.8763664591976426e-07, + "loss": 1.6784, + "step": 4583 + }, + { + "epoch": 2.8207060995308053, + "grad_norm": 9.454893112182617, + "learning_rate": 1.8635766394351384e-07, + "loss": 1.7144, + "step": 4584 + }, + { + "epoch": 2.821321436812553, + "grad_norm": 6.9669880867004395, + "learning_rate": 1.8508301483088131e-07, + "loss": 1.9075, + "step": 4585 + }, + { + "epoch": 2.8219367740943007, + "grad_norm": 6.202524662017822, + "learning_rate": 1.8381269914464538e-07, + "loss": 1.6835, + "step": 4586 + }, + { + "epoch": 2.822552111376048, + "grad_norm": 5.614279747009277, + "learning_rate": 1.825467174456652e-07, + "loss": 1.8124, + "step": 4587 + }, + { + "epoch": 2.8231674486577956, + "grad_norm": 8.143087387084961, + "learning_rate": 1.8128507029289477e-07, + "loss": 1.7318, + "step": 4588 + }, + { + "epoch": 2.8237827859395432, + "grad_norm": 5.570952415466309, + "learning_rate": 1.8002775824336628e-07, + "loss": 1.8016, + "step": 4589 + }, + { + "epoch": 2.8243981232212905, + "grad_norm": 6.00308895111084, + "learning_rate": 1.787747818522001e-07, + "loss": 1.754, + "step": 4590 + }, + { + "epoch": 2.825013460503038, + "grad_norm": 8.23668384552002, + "learning_rate": 1.7752614167260484e-07, + "loss": 1.7785, + "step": 4591 + }, + { + "epoch": 2.825628797784786, + "grad_norm": 6.845446586608887, + "learning_rate": 1.7628183825587397e-07, + "loss": 1.8007, + "step": 4592 + }, + { + "epoch": 2.8262441350665335, + "grad_norm": 5.969099521636963, + "learning_rate": 1.7504187215138358e-07, + "loss": 2.0253, + "step": 4593 + }, + { + "epoch": 2.826859472348281, + "grad_norm": 7.269412994384766, + "learning_rate": 1.7380624390659572e-07, + "loss": 1.717, + "step": 4594 + }, + { + "epoch": 2.8274748096300284, + "grad_norm": 6.032900333404541, + "learning_rate": 1.7257495406705959e-07, + "loss": 1.7937, + "step": 4595 + }, + { + "epoch": 2.828090146911776, + "grad_norm": 6.389214992523193, + "learning_rate": 1.713480031764081e-07, + "loss": 1.6897, + "step": 4596 + }, + { + "epoch": 2.8287054841935237, + "grad_norm": 6.245080947875977, + "learning_rate": 1.7012539177635457e-07, + "loss": 1.953, + "step": 4597 + }, + { + "epoch": 2.829320821475271, + "grad_norm": 7.131957054138184, + "learning_rate": 1.6890712040670277e-07, + "loss": 1.8516, + "step": 4598 + }, + { + "epoch": 2.8299361587570186, + "grad_norm": 5.742912769317627, + "learning_rate": 1.6769318960533465e-07, + "loss": 1.8221, + "step": 4599 + }, + { + "epoch": 2.8305514960387663, + "grad_norm": 6.619790554046631, + "learning_rate": 1.6648359990822038e-07, + "loss": 1.9184, + "step": 4600 + }, + { + "epoch": 2.8311668333205136, + "grad_norm": 6.996294021606445, + "learning_rate": 1.6527835184940722e-07, + "loss": 1.7906, + "step": 4601 + }, + { + "epoch": 2.8317821706022612, + "grad_norm": 6.978058338165283, + "learning_rate": 1.640774459610328e-07, + "loss": 1.8458, + "step": 4602 + }, + { + "epoch": 2.832397507884009, + "grad_norm": 6.289536952972412, + "learning_rate": 1.6288088277331303e-07, + "loss": 1.6935, + "step": 4603 + }, + { + "epoch": 2.8330128451657566, + "grad_norm": 7.372666835784912, + "learning_rate": 1.6168866281454866e-07, + "loss": 1.7289, + "step": 4604 + }, + { + "epoch": 2.8336281824475043, + "grad_norm": 6.077349662780762, + "learning_rate": 1.6050078661112078e-07, + "loss": 1.6767, + "step": 4605 + }, + { + "epoch": 2.8342435197292515, + "grad_norm": 6.097970962524414, + "learning_rate": 1.5931725468749436e-07, + "loss": 1.6421, + "step": 4606 + }, + { + "epoch": 2.834858857010999, + "grad_norm": 7.974865436553955, + "learning_rate": 1.5813806756621475e-07, + "loss": 1.8733, + "step": 4607 + }, + { + "epoch": 2.835474194292747, + "grad_norm": 6.838261127471924, + "learning_rate": 1.5696322576791101e-07, + "loss": 1.8804, + "step": 4608 + }, + { + "epoch": 2.836089531574494, + "grad_norm": 7.459291458129883, + "learning_rate": 1.5579272981129268e-07, + "loss": 1.6978, + "step": 4609 + }, + { + "epoch": 2.8367048688562417, + "grad_norm": 6.300049304962158, + "learning_rate": 1.5462658021314968e-07, + "loss": 1.9763, + "step": 4610 + }, + { + "epoch": 2.8373202061379894, + "grad_norm": 6.640649795532227, + "learning_rate": 1.5346477748835353e-07, + "loss": 1.6731, + "step": 4611 + }, + { + "epoch": 2.837935543419737, + "grad_norm": 5.052441596984863, + "learning_rate": 1.5230732214985833e-07, + "loss": 1.7933, + "step": 4612 + }, + { + "epoch": 2.8385508807014848, + "grad_norm": 6.379444122314453, + "learning_rate": 1.511542147086964e-07, + "loss": 1.7426, + "step": 4613 + }, + { + "epoch": 2.839166217983232, + "grad_norm": 6.778030872344971, + "learning_rate": 1.500054556739805e-07, + "loss": 1.6966, + "step": 4614 + }, + { + "epoch": 2.8397815552649797, + "grad_norm": 6.470156192779541, + "learning_rate": 1.488610455529038e-07, + "loss": 1.8923, + "step": 4615 + }, + { + "epoch": 2.8403968925467273, + "grad_norm": 7.201651573181152, + "learning_rate": 1.4772098485074216e-07, + "loss": 1.6963, + "step": 4616 + }, + { + "epoch": 2.8410122298284746, + "grad_norm": 6.569672584533691, + "learning_rate": 1.4658527407084732e-07, + "loss": 1.6592, + "step": 4617 + }, + { + "epoch": 2.8416275671102222, + "grad_norm": 6.492232799530029, + "learning_rate": 1.4545391371465046e-07, + "loss": 1.6678, + "step": 4618 + }, + { + "epoch": 2.84224290439197, + "grad_norm": 6.792582035064697, + "learning_rate": 1.4432690428166528e-07, + "loss": 1.8849, + "step": 4619 + }, + { + "epoch": 2.842858241673717, + "grad_norm": 8.150391578674316, + "learning_rate": 1.432042462694827e-07, + "loss": 1.7904, + "step": 4620 + }, + { + "epoch": 2.843473578955465, + "grad_norm": 6.328104019165039, + "learning_rate": 1.4208594017377065e-07, + "loss": 1.8026, + "step": 4621 + }, + { + "epoch": 2.8440889162372125, + "grad_norm": 7.103504180908203, + "learning_rate": 1.409719864882775e-07, + "loss": 1.8978, + "step": 4622 + }, + { + "epoch": 2.84470425351896, + "grad_norm": 6.799252986907959, + "learning_rate": 1.39862385704832e-07, + "loss": 1.6304, + "step": 4623 + }, + { + "epoch": 2.845319590800708, + "grad_norm": 5.237522125244141, + "learning_rate": 1.387571383133368e-07, + "loss": 1.6949, + "step": 4624 + }, + { + "epoch": 2.845934928082455, + "grad_norm": 8.234872817993164, + "learning_rate": 1.3765624480177488e-07, + "loss": 1.7202, + "step": 4625 + }, + { + "epoch": 2.8465502653642027, + "grad_norm": 8.237601280212402, + "learning_rate": 1.3655970565620623e-07, + "loss": 1.5065, + "step": 4626 + }, + { + "epoch": 2.8471656026459504, + "grad_norm": 6.324047565460205, + "learning_rate": 1.3546752136076924e-07, + "loss": 1.7531, + "step": 4627 + }, + { + "epoch": 2.8477809399276977, + "grad_norm": 5.400428771972656, + "learning_rate": 1.3437969239767811e-07, + "loss": 1.8257, + "step": 4628 + }, + { + "epoch": 2.8483962772094453, + "grad_norm": 6.9858245849609375, + "learning_rate": 1.3329621924722536e-07, + "loss": 1.726, + "step": 4629 + }, + { + "epoch": 2.849011614491193, + "grad_norm": 6.3917741775512695, + "learning_rate": 1.3221710238778052e-07, + "loss": 1.6442, + "step": 4630 + }, + { + "epoch": 2.8496269517729407, + "grad_norm": 5.836415767669678, + "learning_rate": 1.3114234229578803e-07, + "loss": 2.0237, + "step": 4631 + }, + { + "epoch": 2.8502422890546883, + "grad_norm": 6.174195766448975, + "learning_rate": 1.3007193944577056e-07, + "loss": 1.6626, + "step": 4632 + }, + { + "epoch": 2.8508576263364356, + "grad_norm": 7.1304497718811035, + "learning_rate": 1.2900589431032563e-07, + "loss": 1.8752, + "step": 4633 + }, + { + "epoch": 2.8514729636181833, + "grad_norm": 6.558872699737549, + "learning_rate": 1.279442073601278e-07, + "loss": 1.8324, + "step": 4634 + }, + { + "epoch": 2.852088300899931, + "grad_norm": 5.6403679847717285, + "learning_rate": 1.2688687906392772e-07, + "loss": 1.865, + "step": 4635 + }, + { + "epoch": 2.852703638181678, + "grad_norm": 5.815674781799316, + "learning_rate": 1.258339098885497e-07, + "loss": 1.6815, + "step": 4636 + }, + { + "epoch": 2.853318975463426, + "grad_norm": 5.968740940093994, + "learning_rate": 1.2478530029889635e-07, + "loss": 1.7026, + "step": 4637 + }, + { + "epoch": 2.8539343127451735, + "grad_norm": 6.473319053649902, + "learning_rate": 1.2374105075794175e-07, + "loss": 1.8685, + "step": 4638 + }, + { + "epoch": 2.8545496500269207, + "grad_norm": 6.175434589385986, + "learning_rate": 1.2270116172673818e-07, + "loss": 1.7333, + "step": 4639 + }, + { + "epoch": 2.8551649873086684, + "grad_norm": 8.119619369506836, + "learning_rate": 1.2166563366441287e-07, + "loss": 1.6625, + "step": 4640 + }, + { + "epoch": 2.855780324590416, + "grad_norm": 6.538888454437256, + "learning_rate": 1.206344670281645e-07, + "loss": 1.7564, + "step": 4641 + }, + { + "epoch": 2.8563956618721638, + "grad_norm": 7.450347423553467, + "learning_rate": 1.1960766227326782e-07, + "loss": 1.6983, + "step": 4642 + }, + { + "epoch": 2.8570109991539114, + "grad_norm": 8.233902931213379, + "learning_rate": 1.1858521985307126e-07, + "loss": 1.7327, + "step": 4643 + }, + { + "epoch": 2.8576263364356587, + "grad_norm": 6.919505596160889, + "learning_rate": 1.175671402190004e-07, + "loss": 1.9073, + "step": 4644 + }, + { + "epoch": 2.8582416737174063, + "grad_norm": 8.359834671020508, + "learning_rate": 1.1655342382055013e-07, + "loss": 1.8192, + "step": 4645 + }, + { + "epoch": 2.858857010999154, + "grad_norm": 5.367745399475098, + "learning_rate": 1.155440711052902e-07, + "loss": 1.5772, + "step": 4646 + }, + { + "epoch": 2.8594723482809012, + "grad_norm": 5.962616920471191, + "learning_rate": 1.1453908251886636e-07, + "loss": 1.7808, + "step": 4647 + }, + { + "epoch": 2.860087685562649, + "grad_norm": 5.5588579177856445, + "learning_rate": 1.1353845850499478e-07, + "loss": 1.6999, + "step": 4648 + }, + { + "epoch": 2.8607030228443966, + "grad_norm": 6.588895320892334, + "learning_rate": 1.1254219950546319e-07, + "loss": 1.6933, + "step": 4649 + }, + { + "epoch": 2.8613183601261443, + "grad_norm": 6.792422771453857, + "learning_rate": 1.1155030596013638e-07, + "loss": 1.8574, + "step": 4650 + }, + { + "epoch": 2.861933697407892, + "grad_norm": 5.52543830871582, + "learning_rate": 1.105627783069485e-07, + "loss": 1.7097, + "step": 4651 + }, + { + "epoch": 2.862549034689639, + "grad_norm": 7.913383960723877, + "learning_rate": 1.0957961698190856e-07, + "loss": 1.9484, + "step": 4652 + }, + { + "epoch": 2.863164371971387, + "grad_norm": 8.765035629272461, + "learning_rate": 1.0860082241909487e-07, + "loss": 1.7928, + "step": 4653 + }, + { + "epoch": 2.8637797092531345, + "grad_norm": 5.976325988769531, + "learning_rate": 1.0762639505065952e-07, + "loss": 1.7907, + "step": 4654 + }, + { + "epoch": 2.8643950465348818, + "grad_norm": 7.612587928771973, + "learning_rate": 1.0665633530682618e-07, + "loss": 1.7003, + "step": 4655 + }, + { + "epoch": 2.8650103838166294, + "grad_norm": 5.99962043762207, + "learning_rate": 1.056906436158911e-07, + "loss": 1.8239, + "step": 4656 + }, + { + "epoch": 2.865625721098377, + "grad_norm": 6.213390827178955, + "learning_rate": 1.047293204042199e-07, + "loss": 1.766, + "step": 4657 + }, + { + "epoch": 2.8662410583801243, + "grad_norm": 6.410811901092529, + "learning_rate": 1.0377236609625086e-07, + "loss": 1.5983, + "step": 4658 + }, + { + "epoch": 2.866856395661872, + "grad_norm": 5.803953170776367, + "learning_rate": 1.0281978111449375e-07, + "loss": 1.6772, + "step": 4659 + }, + { + "epoch": 2.8674717329436197, + "grad_norm": 6.01049280166626, + "learning_rate": 1.0187156587952662e-07, + "loss": 1.8194, + "step": 4660 + }, + { + "epoch": 2.8680870702253674, + "grad_norm": 7.170664310455322, + "learning_rate": 1.0092772081000235e-07, + "loss": 1.6581, + "step": 4661 + }, + { + "epoch": 2.868702407507115, + "grad_norm": 7.733486652374268, + "learning_rate": 9.998824632263981e-08, + "loss": 1.8237, + "step": 4662 + }, + { + "epoch": 2.8693177447888623, + "grad_norm": 6.462808132171631, + "learning_rate": 9.905314283223166e-08, + "loss": 1.7423, + "step": 4663 + }, + { + "epoch": 2.86993308207061, + "grad_norm": 5.611052989959717, + "learning_rate": 9.812241075163986e-08, + "loss": 1.703, + "step": 4664 + }, + { + "epoch": 2.8705484193523576, + "grad_norm": 5.8838396072387695, + "learning_rate": 9.71960504917957e-08, + "loss": 1.8228, + "step": 4665 + }, + { + "epoch": 2.871163756634105, + "grad_norm": 7.6939005851745605, + "learning_rate": 9.627406246169978e-08, + "loss": 1.8884, + "step": 4666 + }, + { + "epoch": 2.8717790939158525, + "grad_norm": 5.915440082550049, + "learning_rate": 9.535644706842318e-08, + "loss": 2.0262, + "step": 4667 + }, + { + "epoch": 2.8723944311976, + "grad_norm": 5.948936462402344, + "learning_rate": 9.444320471710732e-08, + "loss": 1.8009, + "step": 4668 + }, + { + "epoch": 2.873009768479348, + "grad_norm": 6.819876670837402, + "learning_rate": 9.353433581096082e-08, + "loss": 2.0285, + "step": 4669 + }, + { + "epoch": 2.8736251057610955, + "grad_norm": 6.092942714691162, + "learning_rate": 9.26298407512627e-08, + "loss": 1.7557, + "step": 4670 + }, + { + "epoch": 2.8742404430428428, + "grad_norm": 6.507721424102783, + "learning_rate": 9.172971993735902e-08, + "loss": 2.0004, + "step": 4671 + }, + { + "epoch": 2.8748557803245904, + "grad_norm": 5.829037189483643, + "learning_rate": 9.083397376666858e-08, + "loss": 1.9923, + "step": 4672 + }, + { + "epoch": 2.875471117606338, + "grad_norm": 6.843047142028809, + "learning_rate": 8.994260263467502e-08, + "loss": 1.6689, + "step": 4673 + }, + { + "epoch": 2.8760864548880853, + "grad_norm": 6.5388054847717285, + "learning_rate": 8.90556069349302e-08, + "loss": 1.6695, + "step": 4674 + }, + { + "epoch": 2.876701792169833, + "grad_norm": 6.677099227905273, + "learning_rate": 8.817298705905642e-08, + "loss": 1.9609, + "step": 4675 + }, + { + "epoch": 2.8773171294515807, + "grad_norm": 6.466150760650635, + "learning_rate": 8.729474339674415e-08, + "loss": 1.7904, + "step": 4676 + }, + { + "epoch": 2.8779324667333284, + "grad_norm": 5.754880428314209, + "learning_rate": 8.64208763357488e-08, + "loss": 1.8415, + "step": 4677 + }, + { + "epoch": 2.878547804015076, + "grad_norm": 6.114316940307617, + "learning_rate": 8.555138626189619e-08, + "loss": 1.5341, + "step": 4678 + }, + { + "epoch": 2.8791631412968233, + "grad_norm": 6.181856155395508, + "learning_rate": 8.468627355907811e-08, + "loss": 1.8322, + "step": 4679 + }, + { + "epoch": 2.879778478578571, + "grad_norm": 8.346745491027832, + "learning_rate": 8.382553860925458e-08, + "loss": 1.4523, + "step": 4680 + }, + { + "epoch": 2.8803938158603186, + "grad_norm": 5.883575439453125, + "learning_rate": 8.296918179245161e-08, + "loss": 1.782, + "step": 4681 + }, + { + "epoch": 2.881009153142066, + "grad_norm": 5.891323566436768, + "learning_rate": 8.211720348676456e-08, + "loss": 1.8573, + "step": 4682 + }, + { + "epoch": 2.8816244904238135, + "grad_norm": 5.344628810882568, + "learning_rate": 8.12696040683525e-08, + "loss": 1.9296, + "step": 4683 + }, + { + "epoch": 2.882239827705561, + "grad_norm": 7.309035301208496, + "learning_rate": 8.042638391144276e-08, + "loss": 1.7226, + "step": 4684 + }, + { + "epoch": 2.8828551649873084, + "grad_norm": 5.874395370483398, + "learning_rate": 7.958754338833085e-08, + "loss": 1.8111, + "step": 4685 + }, + { + "epoch": 2.883470502269056, + "grad_norm": 5.914357662200928, + "learning_rate": 7.8753082869375e-08, + "loss": 1.8822, + "step": 4686 + }, + { + "epoch": 2.8840858395508038, + "grad_norm": 6.258183002471924, + "learning_rate": 7.792300272300268e-08, + "loss": 1.7503, + "step": 4687 + }, + { + "epoch": 2.8847011768325515, + "grad_norm": 7.608938694000244, + "learning_rate": 7.70973033157052e-08, + "loss": 1.6635, + "step": 4688 + }, + { + "epoch": 2.885316514114299, + "grad_norm": 5.67008113861084, + "learning_rate": 7.627598501204092e-08, + "loss": 1.751, + "step": 4689 + }, + { + "epoch": 2.8859318513960464, + "grad_norm": 6.105820655822754, + "learning_rate": 7.545904817463423e-08, + "loss": 1.7878, + "step": 4690 + }, + { + "epoch": 2.886547188677794, + "grad_norm": 5.407336235046387, + "learning_rate": 7.464649316417438e-08, + "loss": 1.7616, + "step": 4691 + }, + { + "epoch": 2.8871625259595417, + "grad_norm": 6.2941508293151855, + "learning_rate": 7.383832033941551e-08, + "loss": 1.7594, + "step": 4692 + }, + { + "epoch": 2.887777863241289, + "grad_norm": 6.446435928344727, + "learning_rate": 7.303453005717665e-08, + "loss": 1.9074, + "step": 4693 + }, + { + "epoch": 2.8883932005230366, + "grad_norm": 6.038694858551025, + "learning_rate": 7.2235122672345e-08, + "loss": 1.7195, + "step": 4694 + }, + { + "epoch": 2.8890085378047843, + "grad_norm": 6.3426432609558105, + "learning_rate": 7.144009853786826e-08, + "loss": 2.007, + "step": 4695 + }, + { + "epoch": 2.889623875086532, + "grad_norm": 6.402960300445557, + "learning_rate": 7.064945800476342e-08, + "loss": 1.8799, + "step": 4696 + }, + { + "epoch": 2.8902392123682796, + "grad_norm": 5.806665897369385, + "learning_rate": 6.986320142210678e-08, + "loss": 1.8306, + "step": 4697 + }, + { + "epoch": 2.890854549650027, + "grad_norm": 6.0462446212768555, + "learning_rate": 6.908132913704291e-08, + "loss": 1.7906, + "step": 4698 + }, + { + "epoch": 2.8914698869317745, + "grad_norm": 7.697526454925537, + "learning_rate": 6.830384149478009e-08, + "loss": 1.7477, + "step": 4699 + }, + { + "epoch": 2.892085224213522, + "grad_norm": 6.764117240905762, + "learning_rate": 6.75307388385893e-08, + "loss": 1.798, + "step": 4700 + }, + { + "epoch": 2.8927005614952694, + "grad_norm": 7.642149448394775, + "learning_rate": 6.67620215098086e-08, + "loss": 1.8087, + "step": 4701 + }, + { + "epoch": 2.893315898777017, + "grad_norm": 7.200974941253662, + "learning_rate": 6.59976898478354e-08, + "loss": 1.825, + "step": 4702 + }, + { + "epoch": 2.893931236058765, + "grad_norm": 6.611457824707031, + "learning_rate": 6.523774419013418e-08, + "loss": 1.8463, + "step": 4703 + }, + { + "epoch": 2.894546573340512, + "grad_norm": 7.283010959625244, + "learning_rate": 6.448218487223101e-08, + "loss": 1.7675, + "step": 4704 + }, + { + "epoch": 2.8951619106222597, + "grad_norm": 6.4294562339782715, + "learning_rate": 6.373101222771793e-08, + "loss": 1.7629, + "step": 4705 + }, + { + "epoch": 2.8957772479040074, + "grad_norm": 7.433370113372803, + "learning_rate": 6.298422658824521e-08, + "loss": 1.9824, + "step": 4706 + }, + { + "epoch": 2.896392585185755, + "grad_norm": 6.155649662017822, + "learning_rate": 6.224182828353243e-08, + "loss": 1.7693, + "step": 4707 + }, + { + "epoch": 2.8970079224675027, + "grad_norm": 5.318051338195801, + "learning_rate": 6.150381764135849e-08, + "loss": 1.7067, + "step": 4708 + }, + { + "epoch": 2.89762325974925, + "grad_norm": 6.670203685760498, + "learning_rate": 6.077019498756497e-08, + "loss": 1.9001, + "step": 4709 + }, + { + "epoch": 2.8982385970309976, + "grad_norm": 7.017638683319092, + "learning_rate": 6.004096064605613e-08, + "loss": 1.8352, + "step": 4710 + }, + { + "epoch": 2.8988539343127453, + "grad_norm": 5.4878339767456055, + "learning_rate": 5.9316114938801076e-08, + "loss": 1.854, + "step": 4711 + }, + { + "epoch": 2.8994692715944925, + "grad_norm": 5.566630840301514, + "learning_rate": 5.859565818582824e-08, + "loss": 1.7623, + "step": 4712 + }, + { + "epoch": 2.90008460887624, + "grad_norm": 6.198762893676758, + "learning_rate": 5.787959070523097e-08, + "loss": 1.8321, + "step": 4713 + }, + { + "epoch": 2.900699946157988, + "grad_norm": 6.54020357131958, + "learning_rate": 5.7167912813160806e-08, + "loss": 1.8486, + "step": 4714 + }, + { + "epoch": 2.9013152834397355, + "grad_norm": 6.652507781982422, + "learning_rate": 5.6460624823836405e-08, + "loss": 1.7109, + "step": 4715 + }, + { + "epoch": 2.9019306207214832, + "grad_norm": 6.001161575317383, + "learning_rate": 5.575772704953464e-08, + "loss": 1.9584, + "step": 4716 + }, + { + "epoch": 2.9025459580032305, + "grad_norm": 8.329659461975098, + "learning_rate": 5.505921980059503e-08, + "loss": 1.8378, + "step": 4717 + }, + { + "epoch": 2.903161295284978, + "grad_norm": 7.361462593078613, + "learning_rate": 5.436510338541756e-08, + "loss": 1.8247, + "step": 4718 + }, + { + "epoch": 2.903776632566726, + "grad_norm": 8.065169334411621, + "learning_rate": 5.367537811046486e-08, + "loss": 1.6583, + "step": 4719 + }, + { + "epoch": 2.904391969848473, + "grad_norm": 6.355956077575684, + "learning_rate": 5.299004428026222e-08, + "loss": 1.9988, + "step": 4720 + }, + { + "epoch": 2.9050073071302207, + "grad_norm": 5.842173099517822, + "learning_rate": 5.230910219739205e-08, + "loss": 1.8336, + "step": 4721 + }, + { + "epoch": 2.9056226444119684, + "grad_norm": 6.374865531921387, + "learning_rate": 5.163255216250162e-08, + "loss": 1.8638, + "step": 4722 + }, + { + "epoch": 2.9062379816937156, + "grad_norm": 6.382627964019775, + "learning_rate": 5.096039447429535e-08, + "loss": 1.9137, + "step": 4723 + }, + { + "epoch": 2.9068533189754633, + "grad_norm": 6.090041160583496, + "learning_rate": 5.0292629429543606e-08, + "loss": 1.6781, + "step": 4724 + }, + { + "epoch": 2.907468656257211, + "grad_norm": 5.742069721221924, + "learning_rate": 4.962925732307278e-08, + "loss": 1.9159, + "step": 4725 + }, + { + "epoch": 2.9080839935389586, + "grad_norm": 6.187016487121582, + "learning_rate": 4.89702784477708e-08, + "loss": 1.9223, + "step": 4726 + }, + { + "epoch": 2.9086993308207063, + "grad_norm": 6.140006065368652, + "learning_rate": 4.8315693094584945e-08, + "loss": 1.7154, + "step": 4727 + }, + { + "epoch": 2.9093146681024535, + "grad_norm": 6.116796970367432, + "learning_rate": 4.766550155252736e-08, + "loss": 1.7206, + "step": 4728 + }, + { + "epoch": 2.909930005384201, + "grad_norm": 5.863661766052246, + "learning_rate": 4.7019704108665074e-08, + "loss": 1.6355, + "step": 4729 + }, + { + "epoch": 2.910545342665949, + "grad_norm": 5.41718053817749, + "learning_rate": 4.637830104812557e-08, + "loss": 1.6979, + "step": 4730 + }, + { + "epoch": 2.911160679947696, + "grad_norm": 6.469456195831299, + "learning_rate": 4.57412926541001e-08, + "loss": 1.5212, + "step": 4731 + }, + { + "epoch": 2.911776017229444, + "grad_norm": 6.687331199645996, + "learning_rate": 4.510867920783479e-08, + "loss": 1.7951, + "step": 4732 + }, + { + "epoch": 2.9123913545111915, + "grad_norm": 6.6437764167785645, + "learning_rate": 4.4480460988639564e-08, + "loss": 1.753, + "step": 4733 + }, + { + "epoch": 2.913006691792939, + "grad_norm": 6.952179908752441, + "learning_rate": 4.385663827388031e-08, + "loss": 1.7184, + "step": 4734 + }, + { + "epoch": 2.913622029074687, + "grad_norm": 6.906987190246582, + "learning_rate": 4.3237211338983396e-08, + "loss": 2.0056, + "step": 4735 + }, + { + "epoch": 2.914237366356434, + "grad_norm": 6.290803909301758, + "learning_rate": 4.2622180457435604e-08, + "loss": 1.8854, + "step": 4736 + }, + { + "epoch": 2.9148527036381817, + "grad_norm": 6.085556507110596, + "learning_rate": 4.2011545900781936e-08, + "loss": 1.81, + "step": 4737 + }, + { + "epoch": 2.9154680409199294, + "grad_norm": 5.2416672706604, + "learning_rate": 4.140530793862674e-08, + "loss": 1.856, + "step": 4738 + }, + { + "epoch": 2.9160833782016766, + "grad_norm": 7.670105457305908, + "learning_rate": 4.0803466838631454e-08, + "loss": 1.7263, + "step": 4739 + }, + { + "epoch": 2.9166987154834243, + "grad_norm": 5.99717378616333, + "learning_rate": 4.020602286651687e-08, + "loss": 1.6748, + "step": 4740 + }, + { + "epoch": 2.917314052765172, + "grad_norm": 7.788820743560791, + "learning_rate": 3.9612976286065305e-08, + "loss": 1.7728, + "step": 4741 + }, + { + "epoch": 2.917929390046919, + "grad_norm": 8.03035831451416, + "learning_rate": 3.90243273591151e-08, + "loss": 1.8042, + "step": 4742 + }, + { + "epoch": 2.918544727328667, + "grad_norm": 5.620676040649414, + "learning_rate": 3.8440076345561685e-08, + "loss": 1.6163, + "step": 4743 + }, + { + "epoch": 2.9191600646104146, + "grad_norm": 6.025263786315918, + "learning_rate": 3.786022350335983e-08, + "loss": 1.5333, + "step": 4744 + }, + { + "epoch": 2.9197754018921622, + "grad_norm": 6.990189552307129, + "learning_rate": 3.728476908852585e-08, + "loss": 1.7277, + "step": 4745 + }, + { + "epoch": 2.92039073917391, + "grad_norm": 6.533090114593506, + "learning_rate": 3.6713713355127634e-08, + "loss": 1.8994, + "step": 4746 + }, + { + "epoch": 2.921006076455657, + "grad_norm": 6.58897590637207, + "learning_rate": 3.6147056555296825e-08, + "loss": 1.6926, + "step": 4747 + }, + { + "epoch": 2.921621413737405, + "grad_norm": 6.162493705749512, + "learning_rate": 3.5584798939219956e-08, + "loss": 1.6991, + "step": 4748 + }, + { + "epoch": 2.9222367510191525, + "grad_norm": 6.5991339683532715, + "learning_rate": 3.502694075514179e-08, + "loss": 1.7681, + "step": 4749 + }, + { + "epoch": 2.9228520883008997, + "grad_norm": 6.868391513824463, + "learning_rate": 3.44734822493642e-08, + "loss": 1.7469, + "step": 4750 + }, + { + "epoch": 2.9234674255826474, + "grad_norm": 7.078403949737549, + "learning_rate": 3.392442366624615e-08, + "loss": 1.8358, + "step": 4751 + }, + { + "epoch": 2.924082762864395, + "grad_norm": 6.654616832733154, + "learning_rate": 3.3379765248207077e-08, + "loss": 1.7656, + "step": 4752 + }, + { + "epoch": 2.9246981001461427, + "grad_norm": 6.871440887451172, + "learning_rate": 3.283950723572127e-08, + "loss": 1.7115, + "step": 4753 + }, + { + "epoch": 2.9253134374278904, + "grad_norm": 6.1727070808410645, + "learning_rate": 3.230364986731904e-08, + "loss": 1.8197, + "step": 4754 + }, + { + "epoch": 2.9259287747096376, + "grad_norm": 6.324635982513428, + "learning_rate": 3.177219337958892e-08, + "loss": 1.885, + "step": 4755 + }, + { + "epoch": 2.9265441119913853, + "grad_norm": 5.717742919921875, + "learning_rate": 3.124513800717877e-08, + "loss": 2.0796, + "step": 4756 + }, + { + "epoch": 2.927159449273133, + "grad_norm": 7.041825771331787, + "learning_rate": 3.0722483982789095e-08, + "loss": 1.8404, + "step": 4757 + }, + { + "epoch": 2.92777478655488, + "grad_norm": 7.37984561920166, + "learning_rate": 3.020423153717866e-08, + "loss": 1.8523, + "step": 4758 + }, + { + "epoch": 2.928390123836628, + "grad_norm": 7.117954254150391, + "learning_rate": 2.969038089916443e-08, + "loss": 1.7804, + "step": 4759 + }, + { + "epoch": 2.9290054611183756, + "grad_norm": 5.568545341491699, + "learning_rate": 2.918093229561936e-08, + "loss": 1.7573, + "step": 4760 + }, + { + "epoch": 2.929620798400123, + "grad_norm": 6.205462455749512, + "learning_rate": 2.8675885951472415e-08, + "loss": 1.7011, + "step": 4761 + }, + { + "epoch": 2.9302361356818705, + "grad_norm": 6.238028526306152, + "learning_rate": 2.8175242089706345e-08, + "loss": 1.7611, + "step": 4762 + }, + { + "epoch": 2.930851472963618, + "grad_norm": 5.578752040863037, + "learning_rate": 2.767900093136544e-08, + "loss": 1.7092, + "step": 4763 + }, + { + "epoch": 2.931466810245366, + "grad_norm": 6.310956001281738, + "learning_rate": 2.7187162695545554e-08, + "loss": 1.7684, + "step": 4764 + }, + { + "epoch": 2.9320821475271135, + "grad_norm": 6.186322212219238, + "learning_rate": 2.669972759940076e-08, + "loss": 1.8357, + "step": 4765 + }, + { + "epoch": 2.9326974848088607, + "grad_norm": 5.652623653411865, + "learning_rate": 2.6216695858141127e-08, + "loss": 2.0919, + "step": 4766 + }, + { + "epoch": 2.9333128220906084, + "grad_norm": 6.108942985534668, + "learning_rate": 2.573806768503162e-08, + "loss": 1.7347, + "step": 4767 + }, + { + "epoch": 2.933928159372356, + "grad_norm": 7.061485767364502, + "learning_rate": 2.5263843291393197e-08, + "loss": 1.6903, + "step": 4768 + }, + { + "epoch": 2.9345434966541033, + "grad_norm": 6.6972808837890625, + "learning_rate": 2.479402288660282e-08, + "loss": 1.8058, + "step": 4769 + }, + { + "epoch": 2.935158833935851, + "grad_norm": 6.156447887420654, + "learning_rate": 2.4328606678093446e-08, + "loss": 1.9641, + "step": 4770 + }, + { + "epoch": 2.9357741712175986, + "grad_norm": 7.566534519195557, + "learning_rate": 2.386759487135293e-08, + "loss": 1.7937, + "step": 4771 + }, + { + "epoch": 2.9363895084993463, + "grad_norm": 6.246122360229492, + "learning_rate": 2.3410987669924002e-08, + "loss": 1.7245, + "step": 4772 + }, + { + "epoch": 2.937004845781094, + "grad_norm": 6.9308600425720215, + "learning_rate": 2.2958785275406514e-08, + "loss": 1.8719, + "step": 4773 + }, + { + "epoch": 2.9376201830628412, + "grad_norm": 5.577329635620117, + "learning_rate": 2.2510987887451875e-08, + "loss": 1.8205, + "step": 4774 + }, + { + "epoch": 2.938235520344589, + "grad_norm": 5.451444625854492, + "learning_rate": 2.206759570377193e-08, + "loss": 1.685, + "step": 4775 + }, + { + "epoch": 2.9388508576263366, + "grad_norm": 6.542118549346924, + "learning_rate": 2.162860892012786e-08, + "loss": 1.7362, + "step": 4776 + }, + { + "epoch": 2.939466194908084, + "grad_norm": 6.521056652069092, + "learning_rate": 2.1194027730341294e-08, + "loss": 1.77, + "step": 4777 + }, + { + "epoch": 2.9400815321898315, + "grad_norm": 7.773115634918213, + "learning_rate": 2.0763852326284307e-08, + "loss": 1.7195, + "step": 4778 + }, + { + "epoch": 2.940696869471579, + "grad_norm": 6.942408084869385, + "learning_rate": 2.033808289788608e-08, + "loss": 1.7415, + "step": 4779 + }, + { + "epoch": 2.941312206753327, + "grad_norm": 7.673993110656738, + "learning_rate": 1.991671963312958e-08, + "loss": 1.7085, + "step": 4780 + }, + { + "epoch": 2.941927544035074, + "grad_norm": 7.018185138702393, + "learning_rate": 1.949976271805376e-08, + "loss": 1.7304, + "step": 4781 + }, + { + "epoch": 2.9425428813168217, + "grad_norm": 6.579115867614746, + "learning_rate": 1.908721233675026e-08, + "loss": 1.8028, + "step": 4782 + }, + { + "epoch": 2.9431582185985694, + "grad_norm": 5.373909950256348, + "learning_rate": 1.8679068671364488e-08, + "loss": 1.8903, + "step": 4783 + }, + { + "epoch": 2.943773555880317, + "grad_norm": 7.259073257446289, + "learning_rate": 1.827533190210118e-08, + "loss": 1.9046, + "step": 4784 + }, + { + "epoch": 2.9443888931620643, + "grad_norm": 5.955874443054199, + "learning_rate": 1.7876002207213304e-08, + "loss": 1.9122, + "step": 4785 + }, + { + "epoch": 2.945004230443812, + "grad_norm": 6.8434038162231445, + "learning_rate": 1.7481079763010943e-08, + "loss": 1.7075, + "step": 4786 + }, + { + "epoch": 2.9456195677255597, + "grad_norm": 6.680148124694824, + "learning_rate": 1.7090564743857952e-08, + "loss": 1.9294, + "step": 4787 + }, + { + "epoch": 2.946234905007307, + "grad_norm": 7.008884906768799, + "learning_rate": 1.6704457322173073e-08, + "loss": 1.9953, + "step": 4788 + }, + { + "epoch": 2.9468502422890546, + "grad_norm": 5.325714111328125, + "learning_rate": 1.6322757668428836e-08, + "loss": 1.9385, + "step": 4789 + }, + { + "epoch": 2.9474655795708022, + "grad_norm": 6.571719646453857, + "learning_rate": 1.5945465951149318e-08, + "loss": 1.793, + "step": 4790 + }, + { + "epoch": 2.94808091685255, + "grad_norm": 8.125704765319824, + "learning_rate": 1.557258233691572e-08, + "loss": 1.7762, + "step": 4791 + }, + { + "epoch": 2.9486962541342976, + "grad_norm": 6.58264684677124, + "learning_rate": 1.520410699035968e-08, + "loss": 1.8589, + "step": 4792 + }, + { + "epoch": 2.949311591416045, + "grad_norm": 7.191124439239502, + "learning_rate": 1.4840040074169947e-08, + "loss": 1.8229, + "step": 4793 + }, + { + "epoch": 2.9499269286977925, + "grad_norm": 6.6752543449401855, + "learning_rate": 1.4480381749087945e-08, + "loss": 1.7803, + "step": 4794 + }, + { + "epoch": 2.95054226597954, + "grad_norm": 7.949001789093018, + "learning_rate": 1.4125132173905542e-08, + "loss": 1.7252, + "step": 4795 + }, + { + "epoch": 2.9511576032612874, + "grad_norm": 5.7251973152160645, + "learning_rate": 1.3774291505472826e-08, + "loss": 1.7112, + "step": 4796 + }, + { + "epoch": 2.951772940543035, + "grad_norm": 6.10806941986084, + "learning_rate": 1.3427859898689221e-08, + "loss": 1.799, + "step": 4797 + }, + { + "epoch": 2.9523882778247827, + "grad_norm": 6.8487019538879395, + "learning_rate": 1.3085837506510158e-08, + "loss": 1.6611, + "step": 4798 + }, + { + "epoch": 2.9530036151065304, + "grad_norm": 5.787548542022705, + "learning_rate": 1.2748224479943727e-08, + "loss": 1.796, + "step": 4799 + }, + { + "epoch": 2.953618952388278, + "grad_norm": 6.801617622375488, + "learning_rate": 1.2415020968049585e-08, + "loss": 1.9468, + "step": 4800 + }, + { + "epoch": 2.9542342896700253, + "grad_norm": 7.4270339012146, + "learning_rate": 1.2086227117943383e-08, + "loss": 1.6678, + "step": 4801 + }, + { + "epoch": 2.954849626951773, + "grad_norm": 6.158993244171143, + "learning_rate": 1.1761843074792334e-08, + "loss": 1.762, + "step": 4802 + }, + { + "epoch": 2.9554649642335207, + "grad_norm": 6.011277675628662, + "learning_rate": 1.1441868981815207e-08, + "loss": 1.6897, + "step": 4803 + }, + { + "epoch": 2.956080301515268, + "grad_norm": 6.670826435089111, + "learning_rate": 1.1126304980285663e-08, + "loss": 1.7253, + "step": 4804 + }, + { + "epoch": 2.9566956387970156, + "grad_norm": 6.547310829162598, + "learning_rate": 1.0815151209530029e-08, + "loss": 1.8198, + "step": 4805 + }, + { + "epoch": 2.9573109760787633, + "grad_norm": 6.760555744171143, + "learning_rate": 1.0508407806927302e-08, + "loss": 1.8684, + "step": 4806 + }, + { + "epoch": 2.9579263133605105, + "grad_norm": 6.000650882720947, + "learning_rate": 1.0206074907909147e-08, + "loss": 1.6025, + "step": 4807 + }, + { + "epoch": 2.958541650642258, + "grad_norm": 7.440823078155518, + "learning_rate": 9.908152645958791e-09, + "loss": 1.7512, + "step": 4808 + }, + { + "epoch": 2.959156987924006, + "grad_norm": 7.18161153793335, + "learning_rate": 9.614641152615457e-09, + "loss": 1.9538, + "step": 4809 + }, + { + "epoch": 2.9597723252057535, + "grad_norm": 6.2112274169921875, + "learning_rate": 9.325540557465484e-09, + "loss": 1.7888, + "step": 4810 + }, + { + "epoch": 2.960387662487501, + "grad_norm": 6.196125507354736, + "learning_rate": 9.040850988153437e-09, + "loss": 1.9642, + "step": 4811 + }, + { + "epoch": 2.9610029997692484, + "grad_norm": 7.560054302215576, + "learning_rate": 8.760572570373215e-09, + "loss": 1.7395, + "step": 4812 + }, + { + "epoch": 2.961618337050996, + "grad_norm": 5.874863147735596, + "learning_rate": 8.484705427871387e-09, + "loss": 1.9771, + "step": 4813 + }, + { + "epoch": 2.9622336743327438, + "grad_norm": 5.455219268798828, + "learning_rate": 8.213249682447188e-09, + "loss": 1.8073, + "step": 4814 + }, + { + "epoch": 2.962849011614491, + "grad_norm": 11.868804931640625, + "learning_rate": 7.946205453953638e-09, + "loss": 1.838, + "step": 4815 + }, + { + "epoch": 2.9634643488962387, + "grad_norm": 6.275689125061035, + "learning_rate": 7.683572860291976e-09, + "loss": 1.9007, + "step": 4816 + }, + { + "epoch": 2.9640796861779863, + "grad_norm": 5.571803092956543, + "learning_rate": 7.425352017420562e-09, + "loss": 1.6353, + "step": 4817 + }, + { + "epoch": 2.964695023459734, + "grad_norm": 6.8441338539123535, + "learning_rate": 7.171543039345974e-09, + "loss": 1.9148, + "step": 4818 + }, + { + "epoch": 2.9653103607414817, + "grad_norm": 6.293154239654541, + "learning_rate": 6.9221460381296845e-09, + "loss": 1.6389, + "step": 4819 + }, + { + "epoch": 2.965925698023229, + "grad_norm": 5.692101955413818, + "learning_rate": 6.6771611238836135e-09, + "loss": 1.7479, + "step": 4820 + }, + { + "epoch": 2.9665410353049766, + "grad_norm": 5.81475830078125, + "learning_rate": 6.4365884047723486e-09, + "loss": 1.8069, + "step": 4821 + }, + { + "epoch": 2.9671563725867243, + "grad_norm": 6.8166022300720215, + "learning_rate": 6.200427987012037e-09, + "loss": 1.6898, + "step": 4822 + }, + { + "epoch": 2.9677717098684715, + "grad_norm": 5.079616546630859, + "learning_rate": 5.968679974870384e-09, + "loss": 1.8747, + "step": 4823 + }, + { + "epoch": 2.968387047150219, + "grad_norm": 5.643310546875, + "learning_rate": 5.741344470667765e-09, + "loss": 1.9004, + "step": 4824 + }, + { + "epoch": 2.969002384431967, + "grad_norm": 6.658138751983643, + "learning_rate": 5.518421574777222e-09, + "loss": 1.8982, + "step": 4825 + }, + { + "epoch": 2.969617721713714, + "grad_norm": 5.594161033630371, + "learning_rate": 5.299911385620027e-09, + "loss": 1.9462, + "step": 4826 + }, + { + "epoch": 2.9702330589954618, + "grad_norm": 5.426443099975586, + "learning_rate": 5.08581399967345e-09, + "loss": 1.8403, + "step": 4827 + }, + { + "epoch": 2.9708483962772094, + "grad_norm": 5.353538990020752, + "learning_rate": 4.8761295114641005e-09, + "loss": 1.922, + "step": 4828 + }, + { + "epoch": 2.971463733558957, + "grad_norm": 5.639281749725342, + "learning_rate": 4.670858013570146e-09, + "loss": 1.9055, + "step": 4829 + }, + { + "epoch": 2.9720790708407048, + "grad_norm": 6.038074016571045, + "learning_rate": 4.469999596622421e-09, + "loss": 1.7943, + "step": 4830 + }, + { + "epoch": 2.972694408122452, + "grad_norm": 6.001288414001465, + "learning_rate": 4.273554349303321e-09, + "loss": 1.8599, + "step": 4831 + }, + { + "epoch": 2.9733097454041997, + "grad_norm": 8.103233337402344, + "learning_rate": 4.081522358345691e-09, + "loss": 1.5693, + "step": 4832 + }, + { + "epoch": 2.9739250826859474, + "grad_norm": 6.323145389556885, + "learning_rate": 3.8939037085339305e-09, + "loss": 1.8301, + "step": 4833 + }, + { + "epoch": 2.9745404199676946, + "grad_norm": 5.852814197540283, + "learning_rate": 3.7106984827040003e-09, + "loss": 1.7905, + "step": 4834 + }, + { + "epoch": 2.9751557572494423, + "grad_norm": 7.373106002807617, + "learning_rate": 3.5319067617445302e-09, + "loss": 1.6711, + "step": 4835 + }, + { + "epoch": 2.97577109453119, + "grad_norm": 6.393796920776367, + "learning_rate": 3.3575286245957073e-09, + "loss": 1.7463, + "step": 4836 + }, + { + "epoch": 2.9763864318129376, + "grad_norm": 7.714530944824219, + "learning_rate": 3.1875641482459476e-09, + "loss": 1.9278, + "step": 4837 + }, + { + "epoch": 2.9770017690946853, + "grad_norm": 6.198000431060791, + "learning_rate": 3.0220134077374452e-09, + "loss": 1.6237, + "step": 4838 + }, + { + "epoch": 2.9776171063764325, + "grad_norm": 7.44834041595459, + "learning_rate": 2.8608764761639542e-09, + "loss": 1.7951, + "step": 4839 + }, + { + "epoch": 2.97823244365818, + "grad_norm": 5.94035530090332, + "learning_rate": 2.7041534246696756e-09, + "loss": 1.7928, + "step": 4840 + }, + { + "epoch": 2.978847780939928, + "grad_norm": 6.232882499694824, + "learning_rate": 2.5518443224492594e-09, + "loss": 1.7961, + "step": 4841 + }, + { + "epoch": 2.979463118221675, + "grad_norm": 6.750180244445801, + "learning_rate": 2.403949236750025e-09, + "loss": 1.8225, + "step": 4842 + }, + { + "epoch": 2.9800784555034228, + "grad_norm": 8.20643424987793, + "learning_rate": 2.2604682328697393e-09, + "loss": 1.789, + "step": 4843 + }, + { + "epoch": 2.9806937927851704, + "grad_norm": 5.934808731079102, + "learning_rate": 2.1214013741577276e-09, + "loss": 1.9381, + "step": 4844 + }, + { + "epoch": 2.9813091300669177, + "grad_norm": 6.512531757354736, + "learning_rate": 1.9867487220126546e-09, + "loss": 1.6942, + "step": 4845 + }, + { + "epoch": 2.9819244673486653, + "grad_norm": 6.248026371002197, + "learning_rate": 1.856510335886963e-09, + "loss": 1.894, + "step": 4846 + }, + { + "epoch": 2.982539804630413, + "grad_norm": 7.200949668884277, + "learning_rate": 1.7306862732813234e-09, + "loss": 1.9813, + "step": 4847 + }, + { + "epoch": 2.9831551419121607, + "grad_norm": 8.916942596435547, + "learning_rate": 1.609276589750186e-09, + "loss": 1.7688, + "step": 4848 + }, + { + "epoch": 2.9837704791939084, + "grad_norm": 6.254885196685791, + "learning_rate": 1.492281338897339e-09, + "loss": 1.8795, + "step": 4849 + }, + { + "epoch": 2.9843858164756556, + "grad_norm": 6.191501140594482, + "learning_rate": 1.3797005723781287e-09, + "loss": 1.9303, + "step": 4850 + }, + { + "epoch": 2.9850011537574033, + "grad_norm": 5.418047904968262, + "learning_rate": 1.2715343398972401e-09, + "loss": 1.8383, + "step": 4851 + }, + { + "epoch": 2.985616491039151, + "grad_norm": 6.097516059875488, + "learning_rate": 1.1677826892131373e-09, + "loss": 1.8364, + "step": 4852 + }, + { + "epoch": 2.986231828320898, + "grad_norm": 5.921658039093018, + "learning_rate": 1.0684456661325116e-09, + "loss": 1.7522, + "step": 4853 + }, + { + "epoch": 2.986847165602646, + "grad_norm": 7.876391410827637, + "learning_rate": 9.73523314515834e-10, + "loss": 1.6451, + "step": 4854 + }, + { + "epoch": 2.9874625028843935, + "grad_norm": 6.342991828918457, + "learning_rate": 8.830156762706932e-10, + "loss": 1.5592, + "step": 4855 + }, + { + "epoch": 2.988077840166141, + "grad_norm": 7.4683613777160645, + "learning_rate": 7.969227913584565e-10, + "loss": 1.9716, + "step": 4856 + }, + { + "epoch": 2.988693177447889, + "grad_norm": 7.092097282409668, + "learning_rate": 7.152446977898297e-10, + "loss": 1.7646, + "step": 4857 + }, + { + "epoch": 2.989308514729636, + "grad_norm": 8.851576805114746, + "learning_rate": 6.379814316281873e-10, + "loss": 1.8368, + "step": 4858 + }, + { + "epoch": 2.9899238520113838, + "grad_norm": 6.467036247253418, + "learning_rate": 5.651330269840216e-10, + "loss": 1.9352, + "step": 4859 + }, + { + "epoch": 2.9905391892931315, + "grad_norm": 6.295518398284912, + "learning_rate": 4.966995160238242e-10, + "loss": 1.7697, + "step": 4860 + }, + { + "epoch": 2.9911545265748787, + "grad_norm": 6.022071838378906, + "learning_rate": 4.326809289600942e-10, + "loss": 1.8366, + "step": 4861 + }, + { + "epoch": 2.9917698638566264, + "grad_norm": 6.535977363586426, + "learning_rate": 3.7307729405799965e-10, + "loss": 1.8637, + "step": 4862 + }, + { + "epoch": 2.992385201138374, + "grad_norm": 6.713740348815918, + "learning_rate": 3.178886376331569e-10, + "loss": 1.7658, + "step": 4863 + }, + { + "epoch": 2.9930005384201213, + "grad_norm": 6.423643589019775, + "learning_rate": 2.6711498405385117e-10, + "loss": 1.7295, + "step": 4864 + }, + { + "epoch": 2.993615875701869, + "grad_norm": 6.397696018218994, + "learning_rate": 2.2075635573659549e-10, + "loss": 1.7332, + "step": 4865 + }, + { + "epoch": 2.9942312129836166, + "grad_norm": 5.858211517333984, + "learning_rate": 1.7881277314835132e-10, + "loss": 1.9443, + "step": 4866 + }, + { + "epoch": 2.9948465502653643, + "grad_norm": 6.681111812591553, + "learning_rate": 1.4128425480763874e-10, + "loss": 1.9665, + "step": 4867 + }, + { + "epoch": 2.995461887547112, + "grad_norm": 5.779622554779053, + "learning_rate": 1.081708172856466e-10, + "loss": 1.7812, + "step": 4868 + }, + { + "epoch": 2.996077224828859, + "grad_norm": 7.922970771789551, + "learning_rate": 7.947247520179169e-11, + "loss": 1.6798, + "step": 4869 + }, + { + "epoch": 2.996692562110607, + "grad_norm": 7.02757453918457, + "learning_rate": 5.518924122704938e-11, + "loss": 1.9097, + "step": 4870 + }, + { + "epoch": 2.9973078993923545, + "grad_norm": 7.591292858123779, + "learning_rate": 3.532112608062299e-11, + "loss": 1.8008, + "step": 4871 + }, + { + "epoch": 2.9979232366741018, + "grad_norm": 4.890567779541016, + "learning_rate": 1.9868138537715297e-11, + "loss": 1.9263, + "step": 4872 + }, + { + "epoch": 2.9985385739558494, + "grad_norm": 7.152225971221924, + "learning_rate": 8.830285418426343e-12, + "loss": 1.6145, + "step": 4873 + }, + { + "epoch": 2.999153911237597, + "grad_norm": 6.39163064956665, + "learning_rate": 2.207571598855651e-12, + "loss": 1.9042, + "step": 4874 + }, + { + "epoch": 2.999769248519345, + "grad_norm": 6.897037982940674, + "learning_rate": 0.0, + "loss": 1.9786, + "step": 4875 + }, + { + "epoch": 2.999769248519345, + "step": 4875, + "total_flos": 1.3767181169197056e+16, + "train_loss": 1.9322890693957988, + "train_runtime": 2394.6676, + "train_samples_per_second": 65.147, + "train_steps_per_second": 2.036 + } + ], + "logging_steps": 1.0, + "max_steps": 4875, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 2000, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 1.3767181169197056e+16, + "train_batch_size": 4, + "trial_name": null, + "trial_params": null +}