diff --git "a/trainer_state.json" "b/trainer_state.json" deleted file mode 100644--- "a/trainer_state.json" +++ /dev/null @@ -1,78033 +0,0 @@ -{ - "best_metric": null, - "best_model_checkpoint": null, - "epoch": 0.9999230828397816, - "eval_steps": 500, - "global_step": 13000, - "is_hyper_param_search": false, - "is_local_process_zero": true, - "is_world_process_zero": true, - "log_history": [ - { - "epoch": 7.691716021844474e-05, - "learning_rate": 0.0029999999562067186, - "loss": 1.9092, - "step": 1 - }, - { - "epoch": 0.00015383432043688947, - "learning_rate": 0.0029999998248268762, - "loss": 1.9814, - "step": 2 - }, - { - "epoch": 0.0002307514806553342, - "learning_rate": 0.002999999605860481, - "loss": 1.7023, - "step": 3 - }, - { - "epoch": 0.00030766864087377894, - "learning_rate": 0.0029999992993075455, - "loss": 1.9262, - "step": 4 - }, - { - "epoch": 0.00038458580109222365, - "learning_rate": 0.002999998905168087, - "loss": 2.1237, - "step": 5 - }, - { - "epoch": 0.0004615029613106684, - "learning_rate": 0.0029999984234421297, - "loss": 1.6514, - "step": 6 - }, - { - "epoch": 0.0005384201215291131, - "learning_rate": 0.0029999978541297013, - "loss": 2.2443, - "step": 7 - }, - { - "epoch": 0.0006153372817475579, - "learning_rate": 0.0029999971972308344, - "loss": 1.872, - "step": 8 - }, - { - "epoch": 0.0006922544419660026, - "learning_rate": 0.0029999964527455685, - "loss": 1.8875, - "step": 9 - }, - { - "epoch": 0.0007691716021844473, - "learning_rate": 0.002999995620673947, - "loss": 2.0612, - "step": 10 - }, - { - "epoch": 0.0008460887624028921, - "learning_rate": 0.0029999947010160167, - "loss": 1.9884, - "step": 11 - }, - { - "epoch": 0.0009230059226213368, - "learning_rate": 0.0029999936937718332, - "loss": 2.2815, - "step": 12 - }, - { - "epoch": 0.0009999230828397815, - "learning_rate": 0.0029999925989414545, - "loss": 1.6098, - "step": 13 - }, - { - "epoch": 0.0010768402430582262, - "learning_rate": 0.0029999914165249448, - "loss": 1.8513, - "step": 14 - }, - { - "epoch": 0.001153757403276671, - "learning_rate": 0.002999990146522373, - "loss": 1.9658, - "step": 15 - }, - { - "epoch": 0.0012306745634951158, - "learning_rate": 0.002999988788933813, - "loss": 1.6379, - "step": 16 - }, - { - "epoch": 0.0013075917237135605, - "learning_rate": 0.0029999873437593447, - "loss": 1.7758, - "step": 17 - }, - { - "epoch": 0.0013845088839320053, - "learning_rate": 0.0029999858109990524, - "loss": 1.7943, - "step": 18 - }, - { - "epoch": 0.00146142604415045, - "learning_rate": 0.002999984190653025, - "loss": 1.3816, - "step": 19 - }, - { - "epoch": 0.0015383432043688946, - "learning_rate": 0.0029999824827213572, - "loss": 1.7113, - "step": 20 - }, - { - "epoch": 0.0016152603645873394, - "learning_rate": 0.0029999806872041497, - "loss": 1.6993, - "step": 21 - }, - { - "epoch": 0.0016921775248057841, - "learning_rate": 0.0029999788041015062, - "loss": 1.3472, - "step": 22 - }, - { - "epoch": 0.0017690946850242289, - "learning_rate": 0.002999976833413537, - "loss": 1.7283, - "step": 23 - }, - { - "epoch": 0.0018460118452426736, - "learning_rate": 0.002999974775140357, - "loss": 1.0342, - "step": 24 - }, - { - "epoch": 0.0019229290054611184, - "learning_rate": 0.0029999726292820874, - "loss": 1.3413, - "step": 25 - }, - { - "epoch": 0.001999846165679563, - "learning_rate": 0.0029999703958388524, - "loss": 1.3192, - "step": 26 - }, - { - "epoch": 0.002076763325898008, - "learning_rate": 0.0029999680748107826, - "loss": 1.4627, - "step": 27 - }, - { - "epoch": 0.0021536804861164525, - "learning_rate": 0.002999965666198014, - "loss": 1.2075, - "step": 28 - }, - { - "epoch": 0.0022305976463348975, - "learning_rate": 0.0029999631700006863, - "loss": 1.3928, - "step": 29 - }, - { - "epoch": 0.002307514806553342, - "learning_rate": 0.0029999605862189465, - "loss": 1.3366, - "step": 30 - }, - { - "epoch": 0.002384431966771787, - "learning_rate": 0.002999957914852945, - "loss": 1.2492, - "step": 31 - }, - { - "epoch": 0.0024613491269902315, - "learning_rate": 0.002999955155902837, - "loss": 1.5065, - "step": 32 - }, - { - "epoch": 0.002538266287208676, - "learning_rate": 0.0029999523093687847, - "loss": 1.3048, - "step": 33 - }, - { - "epoch": 0.002615183447427121, - "learning_rate": 0.002999949375250953, - "loss": 1.209, - "step": 34 - }, - { - "epoch": 0.0026921006076455656, - "learning_rate": 0.002999946353549515, - "loss": 1.457, - "step": 35 - }, - { - "epoch": 0.0027690177678640106, - "learning_rate": 0.002999943244264646, - "loss": 1.0777, - "step": 36 - }, - { - "epoch": 0.002845934928082455, - "learning_rate": 0.0029999400473965277, - "loss": 1.1821, - "step": 37 - }, - { - "epoch": 0.0029228520883009, - "learning_rate": 0.0029999367629453466, - "loss": 0.8748, - "step": 38 - }, - { - "epoch": 0.0029997692485193446, - "learning_rate": 0.0029999333909112953, - "loss": 1.3999, - "step": 39 - }, - { - "epoch": 0.003076686408737789, - "learning_rate": 0.0029999299312945694, - "loss": 1.1431, - "step": 40 - }, - { - "epoch": 0.003153603568956234, - "learning_rate": 0.0029999263840953722, - "loss": 1.2118, - "step": 41 - }, - { - "epoch": 0.0032305207291746787, - "learning_rate": 0.00299992274931391, - "loss": 1.1694, - "step": 42 - }, - { - "epoch": 0.0033074378893931237, - "learning_rate": 0.0029999190269503952, - "loss": 1.5292, - "step": 43 - }, - { - "epoch": 0.0033843550496115682, - "learning_rate": 0.0029999152170050456, - "loss": 0.9456, - "step": 44 - }, - { - "epoch": 0.0034612722098300132, - "learning_rate": 0.002999911319478083, - "loss": 1.3324, - "step": 45 - }, - { - "epoch": 0.0035381893700484578, - "learning_rate": 0.0029999073343697355, - "loss": 1.2328, - "step": 46 - }, - { - "epoch": 0.0036151065302669027, - "learning_rate": 0.002999903261680235, - "loss": 0.9419, - "step": 47 - }, - { - "epoch": 0.0036920236904853473, - "learning_rate": 0.0029998991014098205, - "loss": 0.8992, - "step": 48 - }, - { - "epoch": 0.003768940850703792, - "learning_rate": 0.0029998948535587345, - "loss": 1.1284, - "step": 49 - }, - { - "epoch": 0.003845858010922237, - "learning_rate": 0.0029998905181272237, - "loss": 1.3362, - "step": 50 - }, - { - "epoch": 0.003922775171140681, - "learning_rate": 0.0029998860951155437, - "loss": 1.3353, - "step": 51 - }, - { - "epoch": 0.003999692331359126, - "learning_rate": 0.0029998815845239508, - "loss": 1.1782, - "step": 52 - }, - { - "epoch": 0.004076609491577571, - "learning_rate": 0.002999876986352709, - "loss": 1.4899, - "step": 53 - }, - { - "epoch": 0.004153526651796016, - "learning_rate": 0.002999872300602087, - "loss": 1.2823, - "step": 54 - }, - { - "epoch": 0.00423044381201446, - "learning_rate": 0.0029998675272723584, - "loss": 1.1416, - "step": 55 - }, - { - "epoch": 0.004307360972232905, - "learning_rate": 0.0029998626663638017, - "loss": 1.4207, - "step": 56 - }, - { - "epoch": 0.0043842781324513495, - "learning_rate": 0.0029998577178767007, - "loss": 1.2175, - "step": 57 - }, - { - "epoch": 0.004461195292669795, - "learning_rate": 0.0029998526818113443, - "loss": 1.1006, - "step": 58 - }, - { - "epoch": 0.0045381124528882395, - "learning_rate": 0.002999847558168027, - "loss": 1.2269, - "step": 59 - }, - { - "epoch": 0.004615029613106684, - "learning_rate": 0.002999842346947047, - "loss": 1.5156, - "step": 60 - }, - { - "epoch": 0.0046919467733251286, - "learning_rate": 0.00299983704814871, - "loss": 1.0262, - "step": 61 - }, - { - "epoch": 0.004768863933543574, - "learning_rate": 0.0029998316617733246, - "loss": 1.5848, - "step": 62 - }, - { - "epoch": 0.0048457810937620185, - "learning_rate": 0.002999826187821205, - "loss": 1.5708, - "step": 63 - }, - { - "epoch": 0.004922698253980463, - "learning_rate": 0.002999820626292671, - "loss": 1.1905, - "step": 64 - }, - { - "epoch": 0.004999615414198908, - "learning_rate": 0.0029998149771880483, - "loss": 1.4874, - "step": 65 - }, - { - "epoch": 0.005076532574417352, - "learning_rate": 0.002999809240507666, - "loss": 1.3132, - "step": 66 - }, - { - "epoch": 0.0051534497346357976, - "learning_rate": 0.0029998034162518586, - "loss": 1.2571, - "step": 67 - }, - { - "epoch": 0.005230366894854242, - "learning_rate": 0.0029997975044209666, - "loss": 1.3234, - "step": 68 - }, - { - "epoch": 0.005307284055072687, - "learning_rate": 0.002999791505015336, - "loss": 1.2275, - "step": 69 - }, - { - "epoch": 0.005384201215291131, - "learning_rate": 0.0029997854180353156, - "loss": 1.2061, - "step": 70 - }, - { - "epoch": 0.005461118375509577, - "learning_rate": 0.002999779243481262, - "loss": 1.1499, - "step": 71 - }, - { - "epoch": 0.005538035535728021, - "learning_rate": 0.0029997729813535353, - "loss": 1.4395, - "step": 72 - }, - { - "epoch": 0.005614952695946466, - "learning_rate": 0.0029997666316525008, - "loss": 1.416, - "step": 73 - }, - { - "epoch": 0.00569186985616491, - "learning_rate": 0.00299976019437853, - "loss": 1.0341, - "step": 74 - }, - { - "epoch": 0.005768787016383355, - "learning_rate": 0.002999753669531999, - "loss": 1.5975, - "step": 75 - }, - { - "epoch": 0.0058457041766018, - "learning_rate": 0.0029997470571132873, - "loss": 0.9277, - "step": 76 - }, - { - "epoch": 0.005922621336820245, - "learning_rate": 0.002999740357122782, - "loss": 0.894, - "step": 77 - }, - { - "epoch": 0.005999538497038689, - "learning_rate": 0.0029997335695608747, - "loss": 1.4544, - "step": 78 - }, - { - "epoch": 0.006076455657257134, - "learning_rate": 0.002999726694427961, - "loss": 0.9724, - "step": 79 - }, - { - "epoch": 0.006153372817475578, - "learning_rate": 0.0029997197317244433, - "loss": 1.4934, - "step": 80 - }, - { - "epoch": 0.006230289977694024, - "learning_rate": 0.0029997126814507264, - "loss": 1.193, - "step": 81 - }, - { - "epoch": 0.006307207137912468, - "learning_rate": 0.0029997055436072236, - "loss": 0.8429, - "step": 82 - }, - { - "epoch": 0.006384124298130913, - "learning_rate": 0.0029996983181943514, - "loss": 1.2726, - "step": 83 - }, - { - "epoch": 0.006461041458349357, - "learning_rate": 0.0029996910052125314, - "loss": 1.218, - "step": 84 - }, - { - "epoch": 0.006537958618567803, - "learning_rate": 0.00299968360466219, - "loss": 0.9128, - "step": 85 - }, - { - "epoch": 0.006614875778786247, - "learning_rate": 0.002999676116543761, - "loss": 1.2736, - "step": 86 - }, - { - "epoch": 0.006691792939004692, - "learning_rate": 0.00299966854085768, - "loss": 1.0608, - "step": 87 - }, - { - "epoch": 0.0067687100992231365, - "learning_rate": 0.00299966087760439, - "loss": 1.0455, - "step": 88 - }, - { - "epoch": 0.006845627259441581, - "learning_rate": 0.0029996531267843384, - "loss": 1.3783, - "step": 89 - }, - { - "epoch": 0.0069225444196600264, - "learning_rate": 0.002999645288397978, - "loss": 1.0357, - "step": 90 - }, - { - "epoch": 0.006999461579878471, - "learning_rate": 0.0029996373624457664, - "loss": 1.2053, - "step": 91 - }, - { - "epoch": 0.0070763787400969155, - "learning_rate": 0.002999629348928166, - "loss": 1.098, - "step": 92 - }, - { - "epoch": 0.00715329590031536, - "learning_rate": 0.0029996212478456455, - "loss": 1.2888, - "step": 93 - }, - { - "epoch": 0.0072302130605338055, - "learning_rate": 0.002999613059198677, - "loss": 1.1579, - "step": 94 - }, - { - "epoch": 0.00730713022075225, - "learning_rate": 0.002999604782987739, - "loss": 1.2183, - "step": 95 - }, - { - "epoch": 0.007384047380970695, - "learning_rate": 0.0029995964192133157, - "loss": 0.9659, - "step": 96 - }, - { - "epoch": 0.007460964541189139, - "learning_rate": 0.002999587967875894, - "loss": 0.9437, - "step": 97 - }, - { - "epoch": 0.007537881701407584, - "learning_rate": 0.002999579428975968, - "loss": 1.0836, - "step": 98 - }, - { - "epoch": 0.007614798861626029, - "learning_rate": 0.002999570802514037, - "loss": 1.0699, - "step": 99 - }, - { - "epoch": 0.007691716021844474, - "learning_rate": 0.002999562088490603, - "loss": 0.7358, - "step": 100 - }, - { - "epoch": 0.007768633182062918, - "learning_rate": 0.0029995532869061766, - "loss": 1.0536, - "step": 101 - }, - { - "epoch": 0.007845550342281363, - "learning_rate": 0.002999544397761271, - "loss": 0.7006, - "step": 102 - }, - { - "epoch": 0.007922467502499808, - "learning_rate": 0.002999535421056405, - "loss": 0.8425, - "step": 103 - }, - { - "epoch": 0.007999384662718252, - "learning_rate": 0.0029995263567921026, - "loss": 1.2017, - "step": 104 - }, - { - "epoch": 0.008076301822936697, - "learning_rate": 0.0029995172049688936, - "loss": 0.9685, - "step": 105 - }, - { - "epoch": 0.008153218983155143, - "learning_rate": 0.0029995079655873126, - "loss": 1.0908, - "step": 106 - }, - { - "epoch": 0.008230136143373586, - "learning_rate": 0.0029994986386478986, - "loss": 0.7359, - "step": 107 - }, - { - "epoch": 0.008307053303592032, - "learning_rate": 0.0029994892241511965, - "loss": 1.1188, - "step": 108 - }, - { - "epoch": 0.008383970463810475, - "learning_rate": 0.0029994797220977555, - "loss": 1.5312, - "step": 109 - }, - { - "epoch": 0.00846088762402892, - "learning_rate": 0.002999470132488131, - "loss": 0.888, - "step": 110 - }, - { - "epoch": 0.008537804784247366, - "learning_rate": 0.002999460455322883, - "loss": 0.8843, - "step": 111 - }, - { - "epoch": 0.00861472194446581, - "learning_rate": 0.0029994506906025763, - "loss": 1.1051, - "step": 112 - }, - { - "epoch": 0.008691639104684255, - "learning_rate": 0.0029994408383277804, - "loss": 1.2141, - "step": 113 - }, - { - "epoch": 0.008768556264902699, - "learning_rate": 0.002999430898499072, - "loss": 1.2885, - "step": 114 - }, - { - "epoch": 0.008845473425121144, - "learning_rate": 0.0029994208711170305, - "loss": 1.3617, - "step": 115 - }, - { - "epoch": 0.00892239058533959, - "learning_rate": 0.0029994107561822415, - "loss": 1.0363, - "step": 116 - }, - { - "epoch": 0.008999307745558034, - "learning_rate": 0.002999400553695296, - "loss": 1.1184, - "step": 117 - }, - { - "epoch": 0.009076224905776479, - "learning_rate": 0.0029993902636567898, - "loss": 0.8976, - "step": 118 - }, - { - "epoch": 0.009153142065994924, - "learning_rate": 0.002999379886067323, - "loss": 1.283, - "step": 119 - }, - { - "epoch": 0.009230059226213368, - "learning_rate": 0.0029993694209275025, - "loss": 1.3905, - "step": 120 - }, - { - "epoch": 0.009306976386431813, - "learning_rate": 0.0029993588682379383, - "loss": 1.2637, - "step": 121 - }, - { - "epoch": 0.009383893546650257, - "learning_rate": 0.0029993482279992477, - "loss": 1.0531, - "step": 122 - }, - { - "epoch": 0.009460810706868703, - "learning_rate": 0.002999337500212051, - "loss": 1.2387, - "step": 123 - }, - { - "epoch": 0.009537727867087148, - "learning_rate": 0.0029993266848769755, - "loss": 1.2568, - "step": 124 - }, - { - "epoch": 0.009614645027305592, - "learning_rate": 0.0029993157819946524, - "loss": 1.0881, - "step": 125 - }, - { - "epoch": 0.009691562187524037, - "learning_rate": 0.002999304791565718, - "loss": 1.0521, - "step": 126 - }, - { - "epoch": 0.00976847934774248, - "learning_rate": 0.002999293713590814, - "loss": 1.1498, - "step": 127 - }, - { - "epoch": 0.009845396507960926, - "learning_rate": 0.0029992825480705883, - "loss": 0.8994, - "step": 128 - }, - { - "epoch": 0.009922313668179372, - "learning_rate": 0.0029992712950056917, - "loss": 1.3625, - "step": 129 - }, - { - "epoch": 0.009999230828397815, - "learning_rate": 0.0029992599543967814, - "loss": 1.3445, - "step": 130 - }, - { - "epoch": 0.01007614798861626, - "learning_rate": 0.00299924852624452, - "loss": 1.3456, - "step": 131 - }, - { - "epoch": 0.010153065148834704, - "learning_rate": 0.002999237010549575, - "loss": 1.2097, - "step": 132 - }, - { - "epoch": 0.01022998230905315, - "learning_rate": 0.0029992254073126184, - "loss": 1.2986, - "step": 133 - }, - { - "epoch": 0.010306899469271595, - "learning_rate": 0.0029992137165343277, - "loss": 1.3062, - "step": 134 - }, - { - "epoch": 0.010383816629490039, - "learning_rate": 0.0029992019382153855, - "loss": 1.0305, - "step": 135 - }, - { - "epoch": 0.010460733789708484, - "learning_rate": 0.00299919007235648, - "loss": 1.1904, - "step": 136 - }, - { - "epoch": 0.010537650949926928, - "learning_rate": 0.0029991781189583036, - "loss": 1.1767, - "step": 137 - }, - { - "epoch": 0.010614568110145373, - "learning_rate": 0.0029991660780215542, - "loss": 1.3267, - "step": 138 - }, - { - "epoch": 0.010691485270363819, - "learning_rate": 0.0029991539495469355, - "loss": 1.2611, - "step": 139 - }, - { - "epoch": 0.010768402430582262, - "learning_rate": 0.0029991417335351552, - "loss": 1.0256, - "step": 140 - }, - { - "epoch": 0.010845319590800708, - "learning_rate": 0.002999129429986927, - "loss": 1.1199, - "step": 141 - }, - { - "epoch": 0.010922236751019153, - "learning_rate": 0.002999117038902968, - "loss": 1.138, - "step": 142 - }, - { - "epoch": 0.010999153911237597, - "learning_rate": 0.002999104560284004, - "loss": 1.232, - "step": 143 - }, - { - "epoch": 0.011076071071456042, - "learning_rate": 0.0029990919941307614, - "loss": 1.1461, - "step": 144 - }, - { - "epoch": 0.011152988231674486, - "learning_rate": 0.0029990793404439755, - "loss": 1.1026, - "step": 145 - }, - { - "epoch": 0.011229905391892931, - "learning_rate": 0.0029990665992243844, - "loss": 0.9884, - "step": 146 - }, - { - "epoch": 0.011306822552111377, - "learning_rate": 0.0029990537704727327, - "loss": 1.3524, - "step": 147 - }, - { - "epoch": 0.01138373971232982, - "learning_rate": 0.0029990408541897685, - "loss": 1.2749, - "step": 148 - }, - { - "epoch": 0.011460656872548266, - "learning_rate": 0.002999027850376247, - "loss": 1.2295, - "step": 149 - }, - { - "epoch": 0.01153757403276671, - "learning_rate": 0.0029990147590329267, - "loss": 1.1709, - "step": 150 - }, - { - "epoch": 0.011614491192985155, - "learning_rate": 0.0029990015801605726, - "loss": 0.7045, - "step": 151 - }, - { - "epoch": 0.0116914083532036, - "learning_rate": 0.002998988313759954, - "loss": 1.0454, - "step": 152 - }, - { - "epoch": 0.011768325513422044, - "learning_rate": 0.0029989749598318453, - "loss": 1.2119, - "step": 153 - }, - { - "epoch": 0.01184524267364049, - "learning_rate": 0.0029989615183770267, - "loss": 1.4649, - "step": 154 - }, - { - "epoch": 0.011922159833858933, - "learning_rate": 0.002998947989396283, - "loss": 1.183, - "step": 155 - }, - { - "epoch": 0.011999076994077379, - "learning_rate": 0.002998934372890404, - "loss": 1.307, - "step": 156 - }, - { - "epoch": 0.012075994154295824, - "learning_rate": 0.0029989206688601847, - "loss": 0.9439, - "step": 157 - }, - { - "epoch": 0.012152911314514268, - "learning_rate": 0.0029989068773064254, - "loss": 1.4585, - "step": 158 - }, - { - "epoch": 0.012229828474732713, - "learning_rate": 0.0029988929982299314, - "loss": 1.0362, - "step": 159 - }, - { - "epoch": 0.012306745634951157, - "learning_rate": 0.0029988790316315132, - "loss": 0.9236, - "step": 160 - }, - { - "epoch": 0.012383662795169602, - "learning_rate": 0.0029988649775119862, - "loss": 1.1296, - "step": 161 - }, - { - "epoch": 0.012460579955388048, - "learning_rate": 0.002998850835872171, - "loss": 0.7674, - "step": 162 - }, - { - "epoch": 0.012537497115606491, - "learning_rate": 0.0029988366067128934, - "loss": 1.3065, - "step": 163 - }, - { - "epoch": 0.012614414275824937, - "learning_rate": 0.0029988222900349847, - "loss": 0.9165, - "step": 164 - }, - { - "epoch": 0.012691331436043382, - "learning_rate": 0.00299880788583928, - "loss": 1.0541, - "step": 165 - }, - { - "epoch": 0.012768248596261826, - "learning_rate": 0.002998793394126621, - "loss": 1.2432, - "step": 166 - }, - { - "epoch": 0.012845165756480271, - "learning_rate": 0.0029987788148978537, - "loss": 1.2695, - "step": 167 - }, - { - "epoch": 0.012922082916698715, - "learning_rate": 0.002998764148153829, - "loss": 1.6526, - "step": 168 - }, - { - "epoch": 0.01299900007691716, - "learning_rate": 0.0029987493938954048, - "loss": 1.4513, - "step": 169 - }, - { - "epoch": 0.013075917237135606, - "learning_rate": 0.0029987345521234406, - "loss": 1.1943, - "step": 170 - }, - { - "epoch": 0.01315283439735405, - "learning_rate": 0.002998719622838804, - "loss": 1.2793, - "step": 171 - }, - { - "epoch": 0.013229751557572495, - "learning_rate": 0.002998704606042367, - "loss": 1.2412, - "step": 172 - }, - { - "epoch": 0.013306668717790938, - "learning_rate": 0.002998689501735006, - "loss": 1.5079, - "step": 173 - }, - { - "epoch": 0.013383585878009384, - "learning_rate": 0.0029986743099176035, - "loss": 1.124, - "step": 174 - }, - { - "epoch": 0.01346050303822783, - "learning_rate": 0.0029986590305910456, - "loss": 1.4621, - "step": 175 - }, - { - "epoch": 0.013537420198446273, - "learning_rate": 0.0029986436637562254, - "loss": 0.8467, - "step": 176 - }, - { - "epoch": 0.013614337358664718, - "learning_rate": 0.0029986282094140396, - "loss": 1.0726, - "step": 177 - }, - { - "epoch": 0.013691254518883162, - "learning_rate": 0.002998612667565391, - "loss": 0.8901, - "step": 178 - }, - { - "epoch": 0.013768171679101607, - "learning_rate": 0.002998597038211187, - "loss": 1.2866, - "step": 179 - }, - { - "epoch": 0.013845088839320053, - "learning_rate": 0.0029985813213523394, - "loss": 1.2543, - "step": 180 - }, - { - "epoch": 0.013922005999538497, - "learning_rate": 0.0029985655169897673, - "loss": 0.9437, - "step": 181 - }, - { - "epoch": 0.013998923159756942, - "learning_rate": 0.002998549625124393, - "loss": 1.0591, - "step": 182 - }, - { - "epoch": 0.014075840319975386, - "learning_rate": 0.0029985336457571438, - "loss": 1.4138, - "step": 183 - }, - { - "epoch": 0.014152757480193831, - "learning_rate": 0.002998517578888954, - "loss": 1.6227, - "step": 184 - }, - { - "epoch": 0.014229674640412276, - "learning_rate": 0.0029985014245207608, - "loss": 1.4723, - "step": 185 - }, - { - "epoch": 0.01430659180063072, - "learning_rate": 0.0029984851826535073, - "loss": 1.3542, - "step": 186 - }, - { - "epoch": 0.014383508960849166, - "learning_rate": 0.0029984688532881426, - "loss": 1.76, - "step": 187 - }, - { - "epoch": 0.014460426121067611, - "learning_rate": 0.00299845243642562, - "loss": 0.858, - "step": 188 - }, - { - "epoch": 0.014537343281286055, - "learning_rate": 0.002998435932066898, - "loss": 1.4137, - "step": 189 - }, - { - "epoch": 0.0146142604415045, - "learning_rate": 0.00299841934021294, - "loss": 1.4808, - "step": 190 - }, - { - "epoch": 0.014691177601722944, - "learning_rate": 0.0029984026608647157, - "loss": 1.1505, - "step": 191 - }, - { - "epoch": 0.01476809476194139, - "learning_rate": 0.0029983858940231976, - "loss": 1.2201, - "step": 192 - }, - { - "epoch": 0.014845011922159835, - "learning_rate": 0.0029983690396893662, - "loss": 1.1977, - "step": 193 - }, - { - "epoch": 0.014921929082378278, - "learning_rate": 0.0029983520978642047, - "loss": 1.2986, - "step": 194 - }, - { - "epoch": 0.014998846242596724, - "learning_rate": 0.0029983350685487027, - "loss": 0.8975, - "step": 195 - }, - { - "epoch": 0.015075763402815167, - "learning_rate": 0.002998317951743854, - "loss": 1.3165, - "step": 196 - }, - { - "epoch": 0.015152680563033613, - "learning_rate": 0.0029983007474506597, - "loss": 1.2725, - "step": 197 - }, - { - "epoch": 0.015229597723252058, - "learning_rate": 0.0029982834556701228, - "loss": 1.3455, - "step": 198 - }, - { - "epoch": 0.015306514883470502, - "learning_rate": 0.0029982660764032534, - "loss": 1.0963, - "step": 199 - }, - { - "epoch": 0.015383432043688947, - "learning_rate": 0.0029982486096510664, - "loss": 1.0346, - "step": 200 - }, - { - "epoch": 0.015460349203907391, - "learning_rate": 0.0029982310554145814, - "loss": 1.281, - "step": 201 - }, - { - "epoch": 0.015537266364125836, - "learning_rate": 0.0029982134136948236, - "loss": 1.6145, - "step": 202 - }, - { - "epoch": 0.015614183524344282, - "learning_rate": 0.0029981956844928235, - "loss": 0.9711, - "step": 203 - }, - { - "epoch": 0.015691100684562725, - "learning_rate": 0.0029981778678096157, - "loss": 0.9053, - "step": 204 - }, - { - "epoch": 0.01576801784478117, - "learning_rate": 0.002998159963646241, - "loss": 1.4662, - "step": 205 - }, - { - "epoch": 0.015844935004999616, - "learning_rate": 0.002998141972003745, - "loss": 1.0082, - "step": 206 - }, - { - "epoch": 0.01592185216521806, - "learning_rate": 0.0029981238928831777, - "loss": 1.4221, - "step": 207 - }, - { - "epoch": 0.015998769325436504, - "learning_rate": 0.002998105726285595, - "loss": 0.7058, - "step": 208 - }, - { - "epoch": 0.01607568648565495, - "learning_rate": 0.0029980874722120576, - "loss": 1.0257, - "step": 209 - }, - { - "epoch": 0.016152603645873394, - "learning_rate": 0.0029980691306636313, - "loss": 1.1321, - "step": 210 - }, - { - "epoch": 0.01622952080609184, - "learning_rate": 0.0029980507016413877, - "loss": 1.0979, - "step": 211 - }, - { - "epoch": 0.016306437966310285, - "learning_rate": 0.0029980321851464015, - "loss": 1.3522, - "step": 212 - }, - { - "epoch": 0.016383355126528727, - "learning_rate": 0.0029980135811797557, - "loss": 0.8686, - "step": 213 - }, - { - "epoch": 0.016460272286747173, - "learning_rate": 0.002997994889742535, - "loss": 1.5654, - "step": 214 - }, - { - "epoch": 0.016537189446965618, - "learning_rate": 0.002997976110835832, - "loss": 0.8115, - "step": 215 - }, - { - "epoch": 0.016614106607184063, - "learning_rate": 0.0029979572444607428, - "loss": 1.0477, - "step": 216 - }, - { - "epoch": 0.01669102376740251, - "learning_rate": 0.0029979382906183687, - "loss": 0.9833, - "step": 217 - }, - { - "epoch": 0.01676794092762095, - "learning_rate": 0.002997919249309816, - "loss": 1.4087, - "step": 218 - }, - { - "epoch": 0.016844858087839396, - "learning_rate": 0.0029979001205361986, - "loss": 0.9413, - "step": 219 - }, - { - "epoch": 0.01692177524805784, - "learning_rate": 0.0029978809042986313, - "loss": 1.3196, - "step": 220 - }, - { - "epoch": 0.016998692408276287, - "learning_rate": 0.0029978616005982375, - "loss": 1.3242, - "step": 221 - }, - { - "epoch": 0.017075609568494732, - "learning_rate": 0.0029978422094361435, - "loss": 1.1892, - "step": 222 - }, - { - "epoch": 0.017152526728713174, - "learning_rate": 0.0029978227308134816, - "loss": 1.1711, - "step": 223 - }, - { - "epoch": 0.01722944388893162, - "learning_rate": 0.0029978031647313894, - "loss": 1.1237, - "step": 224 - }, - { - "epoch": 0.017306361049150065, - "learning_rate": 0.0029977835111910096, - "loss": 1.2171, - "step": 225 - }, - { - "epoch": 0.01738327820936851, - "learning_rate": 0.0029977637701934902, - "loss": 1.4833, - "step": 226 - }, - { - "epoch": 0.017460195369586956, - "learning_rate": 0.002997743941739982, - "loss": 1.383, - "step": 227 - }, - { - "epoch": 0.017537112529805398, - "learning_rate": 0.0029977240258316456, - "loss": 1.2232, - "step": 228 - }, - { - "epoch": 0.017614029690023843, - "learning_rate": 0.0029977040224696415, - "loss": 1.2125, - "step": 229 - }, - { - "epoch": 0.01769094685024229, - "learning_rate": 0.002997683931655139, - "loss": 1.0057, - "step": 230 - }, - { - "epoch": 0.017767864010460734, - "learning_rate": 0.0029976637533893112, - "loss": 1.2638, - "step": 231 - }, - { - "epoch": 0.01784478117067918, - "learning_rate": 0.0029976434876733352, - "loss": 1.3071, - "step": 232 - }, - { - "epoch": 0.01792169833089762, - "learning_rate": 0.0029976231345083958, - "loss": 1.1912, - "step": 233 - }, - { - "epoch": 0.017998615491116067, - "learning_rate": 0.0029976026938956808, - "loss": 1.2945, - "step": 234 - }, - { - "epoch": 0.018075532651334512, - "learning_rate": 0.0029975821658363836, - "loss": 1.1906, - "step": 235 - }, - { - "epoch": 0.018152449811552958, - "learning_rate": 0.0029975615503317027, - "loss": 1.329, - "step": 236 - }, - { - "epoch": 0.018229366971771403, - "learning_rate": 0.0029975408473828426, - "loss": 1.1752, - "step": 237 - }, - { - "epoch": 0.01830628413198985, - "learning_rate": 0.0029975200569910113, - "loss": 1.157, - "step": 238 - }, - { - "epoch": 0.01838320129220829, - "learning_rate": 0.0029974991791574235, - "loss": 1.0557, - "step": 239 - }, - { - "epoch": 0.018460118452426736, - "learning_rate": 0.002997478213883298, - "loss": 0.9176, - "step": 240 - }, - { - "epoch": 0.01853703561264518, - "learning_rate": 0.0029974571611698585, - "loss": 1.3192, - "step": 241 - }, - { - "epoch": 0.018613952772863627, - "learning_rate": 0.002997436021018335, - "loss": 1.2865, - "step": 242 - }, - { - "epoch": 0.018690869933082072, - "learning_rate": 0.0029974147934299623, - "loss": 1.2836, - "step": 243 - }, - { - "epoch": 0.018767787093300514, - "learning_rate": 0.002997393478405978, - "loss": 1.161, - "step": 244 - }, - { - "epoch": 0.01884470425351896, - "learning_rate": 0.002997372075947629, - "loss": 0.9845, - "step": 245 - }, - { - "epoch": 0.018921621413737405, - "learning_rate": 0.0029973505860561634, - "loss": 1.3582, - "step": 246 - }, - { - "epoch": 0.01899853857395585, - "learning_rate": 0.002997329008732837, - "loss": 1.3781, - "step": 247 - }, - { - "epoch": 0.019075455734174296, - "learning_rate": 0.0029973073439789086, - "loss": 1.0758, - "step": 248 - }, - { - "epoch": 0.019152372894392738, - "learning_rate": 0.0029972855917956447, - "loss": 1.1509, - "step": 249 - }, - { - "epoch": 0.019229290054611183, - "learning_rate": 0.0029972637521843144, - "loss": 1.2729, - "step": 250 - }, - { - "epoch": 0.01930620721482963, - "learning_rate": 0.002997241825146193, - "loss": 1.3322, - "step": 251 - }, - { - "epoch": 0.019383124375048074, - "learning_rate": 0.0029972198106825616, - "loss": 1.2791, - "step": 252 - }, - { - "epoch": 0.01946004153526652, - "learning_rate": 0.0029971977087947044, - "loss": 1.4703, - "step": 253 - }, - { - "epoch": 0.01953695869548496, - "learning_rate": 0.002997175519483913, - "loss": 1.2515, - "step": 254 - }, - { - "epoch": 0.019613875855703407, - "learning_rate": 0.0029971532427514828, - "loss": 1.1393, - "step": 255 - }, - { - "epoch": 0.019690793015921852, - "learning_rate": 0.002997130878598714, - "loss": 1.3116, - "step": 256 - }, - { - "epoch": 0.019767710176140298, - "learning_rate": 0.0029971084270269135, - "loss": 1.0154, - "step": 257 - }, - { - "epoch": 0.019844627336358743, - "learning_rate": 0.002997085888037391, - "loss": 1.0423, - "step": 258 - }, - { - "epoch": 0.019921544496577185, - "learning_rate": 0.002997063261631464, - "loss": 1.0003, - "step": 259 - }, - { - "epoch": 0.01999846165679563, - "learning_rate": 0.0029970405478104522, - "loss": 1.2568, - "step": 260 - }, - { - "epoch": 0.020075378817014076, - "learning_rate": 0.0029970177465756835, - "loss": 1.1609, - "step": 261 - }, - { - "epoch": 0.02015229597723252, - "learning_rate": 0.002996994857928488, - "loss": 1.55, - "step": 262 - }, - { - "epoch": 0.020229213137450967, - "learning_rate": 0.0029969718818702025, - "loss": 1.1516, - "step": 263 - }, - { - "epoch": 0.02030613029766941, - "learning_rate": 0.0029969488184021687, - "loss": 1.3809, - "step": 264 - }, - { - "epoch": 0.020383047457887854, - "learning_rate": 0.002996925667525733, - "loss": 1.4538, - "step": 265 - }, - { - "epoch": 0.0204599646181063, - "learning_rate": 0.0029969024292422486, - "loss": 0.8137, - "step": 266 - }, - { - "epoch": 0.020536881778324745, - "learning_rate": 0.0029968791035530702, - "loss": 1.0519, - "step": 267 - }, - { - "epoch": 0.02061379893854319, - "learning_rate": 0.0029968556904595616, - "loss": 1.4587, - "step": 268 - }, - { - "epoch": 0.020690716098761632, - "learning_rate": 0.0029968321899630887, - "loss": 1.2783, - "step": 269 - }, - { - "epoch": 0.020767633258980078, - "learning_rate": 0.0029968086020650247, - "loss": 1.0779, - "step": 270 - }, - { - "epoch": 0.020844550419198523, - "learning_rate": 0.0029967849267667464, - "loss": 1.0817, - "step": 271 - }, - { - "epoch": 0.02092146757941697, - "learning_rate": 0.0029967611640696365, - "loss": 1.3202, - "step": 272 - }, - { - "epoch": 0.020998384739635414, - "learning_rate": 0.0029967373139750822, - "loss": 1.3649, - "step": 273 - }, - { - "epoch": 0.021075301899853856, - "learning_rate": 0.002996713376484476, - "loss": 1.101, - "step": 274 - }, - { - "epoch": 0.0211522190600723, - "learning_rate": 0.002996689351599216, - "loss": 1.0024, - "step": 275 - }, - { - "epoch": 0.021229136220290747, - "learning_rate": 0.0029966652393207055, - "loss": 1.2042, - "step": 276 - }, - { - "epoch": 0.021306053380509192, - "learning_rate": 0.002996641039650351, - "loss": 0.9547, - "step": 277 - }, - { - "epoch": 0.021382970540727637, - "learning_rate": 0.002996616752589567, - "loss": 1.2709, - "step": 278 - }, - { - "epoch": 0.02145988770094608, - "learning_rate": 0.0029965923781397714, - "loss": 1.166, - "step": 279 - }, - { - "epoch": 0.021536804861164525, - "learning_rate": 0.0029965679163023863, - "loss": 1.1005, - "step": 280 - }, - { - "epoch": 0.02161372202138297, - "learning_rate": 0.0029965433670788417, - "loss": 1.1991, - "step": 281 - }, - { - "epoch": 0.021690639181601416, - "learning_rate": 0.0029965187304705695, - "loss": 1.2865, - "step": 282 - }, - { - "epoch": 0.02176755634181986, - "learning_rate": 0.0029964940064790098, - "loss": 1.2444, - "step": 283 - }, - { - "epoch": 0.021844473502038306, - "learning_rate": 0.0029964691951056043, - "loss": 1.1044, - "step": 284 - }, - { - "epoch": 0.02192139066225675, - "learning_rate": 0.0029964442963518042, - "loss": 1.3341, - "step": 285 - }, - { - "epoch": 0.021998307822475194, - "learning_rate": 0.002996419310219061, - "loss": 0.9102, - "step": 286 - }, - { - "epoch": 0.02207522498269364, - "learning_rate": 0.0029963942367088356, - "loss": 1.6223, - "step": 287 - }, - { - "epoch": 0.022152142142912085, - "learning_rate": 0.002996369075822591, - "loss": 1.2945, - "step": 288 - }, - { - "epoch": 0.02222905930313053, - "learning_rate": 0.0029963438275617963, - "loss": 1.1997, - "step": 289 - }, - { - "epoch": 0.022305976463348972, - "learning_rate": 0.002996318491927926, - "loss": 1.1263, - "step": 290 - }, - { - "epoch": 0.022382893623567417, - "learning_rate": 0.00299629306892246, - "loss": 1.1537, - "step": 291 - }, - { - "epoch": 0.022459810783785863, - "learning_rate": 0.002996267558546882, - "loss": 1.0203, - "step": 292 - }, - { - "epoch": 0.022536727944004308, - "learning_rate": 0.0029962419608026815, - "loss": 1.1749, - "step": 293 - }, - { - "epoch": 0.022613645104222754, - "learning_rate": 0.0029962162756913538, - "loss": 0.9919, - "step": 294 - }, - { - "epoch": 0.022690562264441196, - "learning_rate": 0.002996190503214399, - "loss": 1.3889, - "step": 295 - }, - { - "epoch": 0.02276747942465964, - "learning_rate": 0.0029961646433733207, - "loss": 1.2516, - "step": 296 - }, - { - "epoch": 0.022844396584878086, - "learning_rate": 0.00299613869616963, - "loss": 1.1091, - "step": 297 - }, - { - "epoch": 0.022921313745096532, - "learning_rate": 0.0029961126616048417, - "loss": 0.9692, - "step": 298 - }, - { - "epoch": 0.022998230905314977, - "learning_rate": 0.0029960865396804755, - "loss": 1.6793, - "step": 299 - }, - { - "epoch": 0.02307514806553342, - "learning_rate": 0.002996060330398057, - "loss": 1.5065, - "step": 300 - }, - { - "epoch": 0.023152065225751865, - "learning_rate": 0.0029960340337591173, - "loss": 1.0508, - "step": 301 - }, - { - "epoch": 0.02322898238597031, - "learning_rate": 0.0029960076497651906, - "loss": 1.2244, - "step": 302 - }, - { - "epoch": 0.023305899546188755, - "learning_rate": 0.0029959811784178187, - "loss": 1.2929, - "step": 303 - }, - { - "epoch": 0.0233828167064072, - "learning_rate": 0.0029959546197185463, - "loss": 1.0615, - "step": 304 - }, - { - "epoch": 0.023459733866625643, - "learning_rate": 0.002995927973668925, - "loss": 1.2689, - "step": 305 - }, - { - "epoch": 0.023536651026844088, - "learning_rate": 0.0029959012402705097, - "loss": 1.1925, - "step": 306 - }, - { - "epoch": 0.023613568187062534, - "learning_rate": 0.0029958744195248623, - "loss": 1.0969, - "step": 307 - }, - { - "epoch": 0.02369048534728098, - "learning_rate": 0.0029958475114335486, - "loss": 1.2156, - "step": 308 - }, - { - "epoch": 0.023767402507499424, - "learning_rate": 0.0029958205159981398, - "loss": 1.3017, - "step": 309 - }, - { - "epoch": 0.023844319667717866, - "learning_rate": 0.002995793433220212, - "loss": 1.0581, - "step": 310 - }, - { - "epoch": 0.023921236827936312, - "learning_rate": 0.002995766263101347, - "loss": 1.2094, - "step": 311 - }, - { - "epoch": 0.023998153988154757, - "learning_rate": 0.002995739005643131, - "loss": 0.9861, - "step": 312 - }, - { - "epoch": 0.024075071148373203, - "learning_rate": 0.0029957116608471555, - "loss": 0.9938, - "step": 313 - }, - { - "epoch": 0.024151988308591648, - "learning_rate": 0.002995684228715017, - "loss": 1.0069, - "step": 314 - }, - { - "epoch": 0.02422890546881009, - "learning_rate": 0.0029956567092483182, - "loss": 1.0306, - "step": 315 - }, - { - "epoch": 0.024305822629028535, - "learning_rate": 0.0029956291024486655, - "loss": 1.2565, - "step": 316 - }, - { - "epoch": 0.02438273978924698, - "learning_rate": 0.0029956014083176706, - "loss": 1.054, - "step": 317 - }, - { - "epoch": 0.024459656949465426, - "learning_rate": 0.0029955736268569505, - "loss": 0.9882, - "step": 318 - }, - { - "epoch": 0.02453657410968387, - "learning_rate": 0.002995545758068128, - "loss": 1.4163, - "step": 319 - }, - { - "epoch": 0.024613491269902314, - "learning_rate": 0.0029955178019528297, - "loss": 0.9077, - "step": 320 - }, - { - "epoch": 0.02469040843012076, - "learning_rate": 0.002995489758512689, - "loss": 1.0501, - "step": 321 - }, - { - "epoch": 0.024767325590339204, - "learning_rate": 0.002995461627749342, - "loss": 1.2069, - "step": 322 - }, - { - "epoch": 0.02484424275055765, - "learning_rate": 0.0029954334096644326, - "loss": 1.3046, - "step": 323 - }, - { - "epoch": 0.024921159910776095, - "learning_rate": 0.0029954051042596075, - "loss": 1.3116, - "step": 324 - }, - { - "epoch": 0.02499807707099454, - "learning_rate": 0.0029953767115365204, - "loss": 1.2408, - "step": 325 - }, - { - "epoch": 0.025074994231212983, - "learning_rate": 0.002995348231496828, - "loss": 0.8897, - "step": 326 - }, - { - "epoch": 0.025151911391431428, - "learning_rate": 0.0029953196641421946, - "loss": 1.9307, - "step": 327 - }, - { - "epoch": 0.025228828551649873, - "learning_rate": 0.002995291009474287, - "loss": 1.0036, - "step": 328 - }, - { - "epoch": 0.02530574571186832, - "learning_rate": 0.0029952622674947796, - "loss": 1.1598, - "step": 329 - }, - { - "epoch": 0.025382662872086764, - "learning_rate": 0.0029952334382053495, - "loss": 0.8737, - "step": 330 - }, - { - "epoch": 0.025459580032305206, - "learning_rate": 0.0029952045216076807, - "loss": 1.431, - "step": 331 - }, - { - "epoch": 0.02553649719252365, - "learning_rate": 0.0029951755177034622, - "loss": 1.4716, - "step": 332 - }, - { - "epoch": 0.025613414352742097, - "learning_rate": 0.0029951464264943864, - "loss": 0.9831, - "step": 333 - }, - { - "epoch": 0.025690331512960542, - "learning_rate": 0.0029951172479821533, - "loss": 1.1124, - "step": 334 - }, - { - "epoch": 0.025767248673178988, - "learning_rate": 0.002995087982168465, - "loss": 1.0785, - "step": 335 - }, - { - "epoch": 0.02584416583339743, - "learning_rate": 0.0029950586290550317, - "loss": 1.1973, - "step": 336 - }, - { - "epoch": 0.025921082993615875, - "learning_rate": 0.0029950291886435667, - "loss": 1.3794, - "step": 337 - }, - { - "epoch": 0.02599800015383432, - "learning_rate": 0.00299499966093579, - "loss": 1.0184, - "step": 338 - }, - { - "epoch": 0.026074917314052766, - "learning_rate": 0.002994970045933424, - "loss": 1.3075, - "step": 339 - }, - { - "epoch": 0.02615183447427121, - "learning_rate": 0.0029949403436382, - "loss": 1.416, - "step": 340 - }, - { - "epoch": 0.026228751634489653, - "learning_rate": 0.0029949105540518508, - "loss": 1.4782, - "step": 341 - }, - { - "epoch": 0.0263056687947081, - "learning_rate": 0.0029948806771761165, - "loss": 1.1191, - "step": 342 - }, - { - "epoch": 0.026382585954926544, - "learning_rate": 0.0029948507130127416, - "loss": 0.8146, - "step": 343 - }, - { - "epoch": 0.02645950311514499, - "learning_rate": 0.0029948206615634755, - "loss": 2.0228, - "step": 344 - }, - { - "epoch": 0.026536420275363435, - "learning_rate": 0.0029947905228300733, - "loss": 1.329, - "step": 345 - }, - { - "epoch": 0.026613337435581877, - "learning_rate": 0.0029947602968142945, - "loss": 1.0044, - "step": 346 - }, - { - "epoch": 0.026690254595800322, - "learning_rate": 0.002994729983517904, - "loss": 1.1451, - "step": 347 - }, - { - "epoch": 0.026767171756018768, - "learning_rate": 0.002994699582942673, - "loss": 1.4733, - "step": 348 - }, - { - "epoch": 0.026844088916237213, - "learning_rate": 0.002994669095090374, - "loss": 0.8037, - "step": 349 - }, - { - "epoch": 0.02692100607645566, - "learning_rate": 0.00299463851996279, - "loss": 1.0593, - "step": 350 - }, - { - "epoch": 0.0269979232366741, - "learning_rate": 0.0029946078575617046, - "loss": 1.276, - "step": 351 - }, - { - "epoch": 0.027074840396892546, - "learning_rate": 0.002994577107888909, - "loss": 1.3483, - "step": 352 - }, - { - "epoch": 0.02715175755711099, - "learning_rate": 0.0029945462709461985, - "loss": 0.9461, - "step": 353 - }, - { - "epoch": 0.027228674717329437, - "learning_rate": 0.0029945153467353736, - "loss": 1.2791, - "step": 354 - }, - { - "epoch": 0.027305591877547882, - "learning_rate": 0.00299448433525824, - "loss": 0.7401, - "step": 355 - }, - { - "epoch": 0.027382509037766324, - "learning_rate": 0.0029944532365166086, - "loss": 1.539, - "step": 356 - }, - { - "epoch": 0.02745942619798477, - "learning_rate": 0.002994422050512295, - "loss": 1.0192, - "step": 357 - }, - { - "epoch": 0.027536343358203215, - "learning_rate": 0.002994390777247121, - "loss": 1.0759, - "step": 358 - }, - { - "epoch": 0.02761326051842166, - "learning_rate": 0.0029943594167229116, - "loss": 0.9437, - "step": 359 - }, - { - "epoch": 0.027690177678640106, - "learning_rate": 0.0029943279689414984, - "loss": 1.3718, - "step": 360 - }, - { - "epoch": 0.027767094838858548, - "learning_rate": 0.002994296433904718, - "loss": 1.1736, - "step": 361 - }, - { - "epoch": 0.027844011999076993, - "learning_rate": 0.0029942648116144115, - "loss": 1.116, - "step": 362 - }, - { - "epoch": 0.02792092915929544, - "learning_rate": 0.002994233102072425, - "loss": 1.3106, - "step": 363 - }, - { - "epoch": 0.027997846319513884, - "learning_rate": 0.002994201305280611, - "loss": 1.15, - "step": 364 - }, - { - "epoch": 0.02807476347973233, - "learning_rate": 0.0029941694212408253, - "loss": 1.177, - "step": 365 - }, - { - "epoch": 0.02815168063995077, - "learning_rate": 0.0029941374499549297, - "loss": 1.2402, - "step": 366 - }, - { - "epoch": 0.028228597800169217, - "learning_rate": 0.002994105391424792, - "loss": 1.4125, - "step": 367 - }, - { - "epoch": 0.028305514960387662, - "learning_rate": 0.0029940732456522822, - "loss": 1.2672, - "step": 368 - }, - { - "epoch": 0.028382432120606108, - "learning_rate": 0.0029940410126392796, - "loss": 1.5322, - "step": 369 - }, - { - "epoch": 0.028459349280824553, - "learning_rate": 0.0029940086923876644, - "loss": 1.2714, - "step": 370 - }, - { - "epoch": 0.028536266441043, - "learning_rate": 0.0029939762848993252, - "loss": 1.2146, - "step": 371 - }, - { - "epoch": 0.02861318360126144, - "learning_rate": 0.0029939437901761535, - "loss": 1.4489, - "step": 372 - }, - { - "epoch": 0.028690100761479886, - "learning_rate": 0.002993911208220047, - "loss": 0.878, - "step": 373 - }, - { - "epoch": 0.02876701792169833, - "learning_rate": 0.002993878539032908, - "loss": 0.9767, - "step": 374 - }, - { - "epoch": 0.028843935081916777, - "learning_rate": 0.0029938457826166444, - "loss": 1.1914, - "step": 375 - }, - { - "epoch": 0.028920852242135222, - "learning_rate": 0.0029938129389731685, - "loss": 1.1226, - "step": 376 - }, - { - "epoch": 0.028997769402353664, - "learning_rate": 0.0029937800081043984, - "loss": 1.3933, - "step": 377 - }, - { - "epoch": 0.02907468656257211, - "learning_rate": 0.002993746990012257, - "loss": 1.2843, - "step": 378 - }, - { - "epoch": 0.029151603722790555, - "learning_rate": 0.002993713884698672, - "loss": 1.5428, - "step": 379 - }, - { - "epoch": 0.029228520883009, - "learning_rate": 0.0029936806921655765, - "loss": 1.1047, - "step": 380 - }, - { - "epoch": 0.029305438043227446, - "learning_rate": 0.002993647412414909, - "loss": 1.5107, - "step": 381 - }, - { - "epoch": 0.029382355203445887, - "learning_rate": 0.002993614045448612, - "loss": 1.3802, - "step": 382 - }, - { - "epoch": 0.029459272363664333, - "learning_rate": 0.0029935805912686343, - "loss": 1.1889, - "step": 383 - }, - { - "epoch": 0.02953618952388278, - "learning_rate": 0.0029935470498769295, - "loss": 1.0533, - "step": 384 - }, - { - "epoch": 0.029613106684101224, - "learning_rate": 0.0029935134212754557, - "loss": 1.1599, - "step": 385 - }, - { - "epoch": 0.02969002384431967, - "learning_rate": 0.0029934797054661775, - "loss": 0.9938, - "step": 386 - }, - { - "epoch": 0.02976694100453811, - "learning_rate": 0.0029934459024510618, - "loss": 1.2057, - "step": 387 - }, - { - "epoch": 0.029843858164756557, - "learning_rate": 0.002993412012232084, - "loss": 0.9993, - "step": 388 - }, - { - "epoch": 0.029920775324975002, - "learning_rate": 0.002993378034811222, - "loss": 1.4387, - "step": 389 - }, - { - "epoch": 0.029997692485193447, - "learning_rate": 0.002993343970190461, - "loss": 1.2548, - "step": 390 - }, - { - "epoch": 0.030074609645411893, - "learning_rate": 0.0029933098183717883, - "loss": 1.747, - "step": 391 - }, - { - "epoch": 0.030151526805630335, - "learning_rate": 0.0029932755793571995, - "loss": 1.5008, - "step": 392 - }, - { - "epoch": 0.03022844396584878, - "learning_rate": 0.002993241253148693, - "loss": 1.0202, - "step": 393 - }, - { - "epoch": 0.030305361126067226, - "learning_rate": 0.002993206839748274, - "loss": 1.0718, - "step": 394 - }, - { - "epoch": 0.03038227828628567, - "learning_rate": 0.0029931723391579514, - "loss": 1.2228, - "step": 395 - }, - { - "epoch": 0.030459195446504116, - "learning_rate": 0.0029931377513797396, - "loss": 1.4326, - "step": 396 - }, - { - "epoch": 0.03053611260672256, - "learning_rate": 0.0029931030764156585, - "loss": 1.293, - "step": 397 - }, - { - "epoch": 0.030613029766941004, - "learning_rate": 0.0029930683142677323, - "loss": 1.514, - "step": 398 - }, - { - "epoch": 0.03068994692715945, - "learning_rate": 0.0029930334649379917, - "loss": 1.1175, - "step": 399 - }, - { - "epoch": 0.030766864087377895, - "learning_rate": 0.002992998528428471, - "loss": 1.0261, - "step": 400 - }, - { - "epoch": 0.03084378124759634, - "learning_rate": 0.0029929635047412097, - "loss": 1.2312, - "step": 401 - }, - { - "epoch": 0.030920698407814782, - "learning_rate": 0.002992928393878254, - "loss": 1.1255, - "step": 402 - }, - { - "epoch": 0.030997615568033227, - "learning_rate": 0.0029928931958416536, - "loss": 1.2353, - "step": 403 - }, - { - "epoch": 0.031074532728251673, - "learning_rate": 0.002992857910633463, - "loss": 1.3461, - "step": 404 - }, - { - "epoch": 0.031151449888470118, - "learning_rate": 0.002992822538255744, - "loss": 1.3139, - "step": 405 - }, - { - "epoch": 0.031228367048688564, - "learning_rate": 0.0029927870787105604, - "loss": 1.2285, - "step": 406 - }, - { - "epoch": 0.031305284208907005, - "learning_rate": 0.002992751531999984, - "loss": 1.4545, - "step": 407 - }, - { - "epoch": 0.03138220136912545, - "learning_rate": 0.00299271589812609, - "loss": 1.2949, - "step": 408 - }, - { - "epoch": 0.031459118529343896, - "learning_rate": 0.002992680177090959, - "loss": 1.0288, - "step": 409 - }, - { - "epoch": 0.03153603568956234, - "learning_rate": 0.0029926443688966763, - "loss": 0.8704, - "step": 410 - }, - { - "epoch": 0.03161295284978079, - "learning_rate": 0.0029926084735453335, - "loss": 1.3113, - "step": 411 - }, - { - "epoch": 0.03168987000999923, - "learning_rate": 0.002992572491039027, - "loss": 0.9521, - "step": 412 - }, - { - "epoch": 0.03176678717021768, - "learning_rate": 0.002992536421379857, - "loss": 1.1472, - "step": 413 - }, - { - "epoch": 0.03184370433043612, - "learning_rate": 0.0029925002645699297, - "loss": 1.3, - "step": 414 - }, - { - "epoch": 0.03192062149065456, - "learning_rate": 0.002992464020611356, - "loss": 1.5984, - "step": 415 - }, - { - "epoch": 0.03199753865087301, - "learning_rate": 0.0029924276895062535, - "loss": 0.7235, - "step": 416 - }, - { - "epoch": 0.03207445581109145, - "learning_rate": 0.0029923912712567424, - "loss": 1.4046, - "step": 417 - }, - { - "epoch": 0.0321513729713099, - "learning_rate": 0.00299235476586495, - "loss": 1.1582, - "step": 418 - }, - { - "epoch": 0.032228290131528343, - "learning_rate": 0.002992318173333007, - "loss": 1.2819, - "step": 419 - }, - { - "epoch": 0.03230520729174679, - "learning_rate": 0.002992281493663051, - "loss": 1.0412, - "step": 420 - }, - { - "epoch": 0.032382124451965234, - "learning_rate": 0.0029922447268572235, - "loss": 1.2739, - "step": 421 - }, - { - "epoch": 0.03245904161218368, - "learning_rate": 0.0029922078729176706, - "loss": 1.2815, - "step": 422 - }, - { - "epoch": 0.032535958772402125, - "learning_rate": 0.0029921709318465454, - "loss": 1.4791, - "step": 423 - }, - { - "epoch": 0.03261287593262057, - "learning_rate": 0.002992133903646004, - "loss": 1.1519, - "step": 424 - }, - { - "epoch": 0.03268979309283901, - "learning_rate": 0.0029920967883182094, - "loss": 1.0397, - "step": 425 - }, - { - "epoch": 0.032766710253057454, - "learning_rate": 0.002992059585865328, - "loss": 0.7153, - "step": 426 - }, - { - "epoch": 0.0328436274132759, - "learning_rate": 0.0029920222962895324, - "loss": 1.1698, - "step": 427 - }, - { - "epoch": 0.032920544573494345, - "learning_rate": 0.0029919849195930003, - "loss": 1.0705, - "step": 428 - }, - { - "epoch": 0.03299746173371279, - "learning_rate": 0.0029919474557779137, - "loss": 1.2043, - "step": 429 - }, - { - "epoch": 0.033074378893931236, - "learning_rate": 0.00299190990484646, - "loss": 1.1393, - "step": 430 - }, - { - "epoch": 0.03315129605414968, - "learning_rate": 0.0029918722668008323, - "loss": 1.285, - "step": 431 - }, - { - "epoch": 0.03322821321436813, - "learning_rate": 0.002991834541643228, - "loss": 1.0071, - "step": 432 - }, - { - "epoch": 0.03330513037458657, - "learning_rate": 0.00299179672937585, - "loss": 1.197, - "step": 433 - }, - { - "epoch": 0.03338204753480502, - "learning_rate": 0.0029917588300009063, - "loss": 1.0339, - "step": 434 - }, - { - "epoch": 0.03345896469502346, - "learning_rate": 0.00299172084352061, - "loss": 1.3188, - "step": 435 - }, - { - "epoch": 0.0335358818552419, - "learning_rate": 0.002991682769937179, - "loss": 1.0145, - "step": 436 - }, - { - "epoch": 0.03361279901546035, - "learning_rate": 0.0029916446092528364, - "loss": 0.8821, - "step": 437 - }, - { - "epoch": 0.03368971617567879, - "learning_rate": 0.0029916063614698106, - "loss": 0.9893, - "step": 438 - }, - { - "epoch": 0.03376663333589724, - "learning_rate": 0.002991568026590335, - "loss": 1.1368, - "step": 439 - }, - { - "epoch": 0.03384355049611568, - "learning_rate": 0.0029915296046166477, - "loss": 0.9755, - "step": 440 - }, - { - "epoch": 0.03392046765633413, - "learning_rate": 0.0029914910955509922, - "loss": 1.1123, - "step": 441 - }, - { - "epoch": 0.033997384816552574, - "learning_rate": 0.0029914524993956177, - "loss": 1.1482, - "step": 442 - }, - { - "epoch": 0.03407430197677102, - "learning_rate": 0.002991413816152777, - "loss": 0.9763, - "step": 443 - }, - { - "epoch": 0.034151219136989465, - "learning_rate": 0.0029913750458247287, - "loss": 0.8, - "step": 444 - }, - { - "epoch": 0.03422813629720791, - "learning_rate": 0.002991336188413738, - "loss": 1.1179, - "step": 445 - }, - { - "epoch": 0.03430505345742635, - "learning_rate": 0.002991297243922073, - "loss": 1.185, - "step": 446 - }, - { - "epoch": 0.034381970617644794, - "learning_rate": 0.0029912582123520076, - "loss": 1.2302, - "step": 447 - }, - { - "epoch": 0.03445888777786324, - "learning_rate": 0.0029912190937058207, - "loss": 1.2877, - "step": 448 - }, - { - "epoch": 0.034535804938081685, - "learning_rate": 0.0029911798879857974, - "loss": 0.9684, - "step": 449 - }, - { - "epoch": 0.03461272209830013, - "learning_rate": 0.0029911405951942257, - "loss": 1.5201, - "step": 450 - }, - { - "epoch": 0.034689639258518576, - "learning_rate": 0.002991101215333401, - "loss": 1.19, - "step": 451 - }, - { - "epoch": 0.03476655641873702, - "learning_rate": 0.002991061748405623, - "loss": 1.2372, - "step": 452 - }, - { - "epoch": 0.03484347357895547, - "learning_rate": 0.0029910221944131946, - "loss": 1.1227, - "step": 453 - }, - { - "epoch": 0.03492039073917391, - "learning_rate": 0.0029909825533584265, - "loss": 0.8888, - "step": 454 - }, - { - "epoch": 0.03499730789939236, - "learning_rate": 0.0029909428252436338, - "loss": 1.14, - "step": 455 - }, - { - "epoch": 0.035074225059610796, - "learning_rate": 0.002990903010071135, - "loss": 1.1346, - "step": 456 - }, - { - "epoch": 0.03515114221982924, - "learning_rate": 0.0029908631078432563, - "loss": 1.4103, - "step": 457 - }, - { - "epoch": 0.03522805938004769, - "learning_rate": 0.0029908231185623263, - "loss": 1.0122, - "step": 458 - }, - { - "epoch": 0.03530497654026613, - "learning_rate": 0.0029907830422306815, - "loss": 1.1954, - "step": 459 - }, - { - "epoch": 0.03538189370048458, - "learning_rate": 0.0029907428788506604, - "loss": 1.2236, - "step": 460 - }, - { - "epoch": 0.03545881086070302, - "learning_rate": 0.0029907026284246097, - "loss": 0.8695, - "step": 461 - }, - { - "epoch": 0.03553572802092147, - "learning_rate": 0.0029906622909548785, - "loss": 0.7571, - "step": 462 - }, - { - "epoch": 0.035612645181139914, - "learning_rate": 0.0029906218664438227, - "loss": 1.1938, - "step": 463 - }, - { - "epoch": 0.03568956234135836, - "learning_rate": 0.0029905813548938027, - "loss": 0.971, - "step": 464 - }, - { - "epoch": 0.035766479501576805, - "learning_rate": 0.002990540756307184, - "loss": 1.3198, - "step": 465 - }, - { - "epoch": 0.03584339666179524, - "learning_rate": 0.002990500070686337, - "loss": 0.9848, - "step": 466 - }, - { - "epoch": 0.03592031382201369, - "learning_rate": 0.0029904592980336373, - "loss": 1.342, - "step": 467 - }, - { - "epoch": 0.035997230982232134, - "learning_rate": 0.002990418438351466, - "loss": 1.4538, - "step": 468 - }, - { - "epoch": 0.03607414814245058, - "learning_rate": 0.0029903774916422094, - "loss": 1.2134, - "step": 469 - }, - { - "epoch": 0.036151065302669025, - "learning_rate": 0.002990336457908257, - "loss": 1.1878, - "step": 470 - }, - { - "epoch": 0.03622798246288747, - "learning_rate": 0.002990295337152006, - "loss": 1.277, - "step": 471 - }, - { - "epoch": 0.036304899623105916, - "learning_rate": 0.0029902541293758576, - "loss": 1.2576, - "step": 472 - }, - { - "epoch": 0.03638181678332436, - "learning_rate": 0.0029902128345822166, - "loss": 0.758, - "step": 473 - }, - { - "epoch": 0.036458733943542806, - "learning_rate": 0.0029901714527734956, - "loss": 1.2159, - "step": 474 - }, - { - "epoch": 0.03653565110376125, - "learning_rate": 0.00299012998395211, - "loss": 1.1898, - "step": 475 - }, - { - "epoch": 0.0366125682639797, - "learning_rate": 0.0029900884281204823, - "loss": 1.3354, - "step": 476 - }, - { - "epoch": 0.036689485424198136, - "learning_rate": 0.0029900467852810382, - "loss": 1.1051, - "step": 477 - }, - { - "epoch": 0.03676640258441658, - "learning_rate": 0.002990005055436209, - "loss": 1.3771, - "step": 478 - }, - { - "epoch": 0.03684331974463503, - "learning_rate": 0.0029899632385884324, - "loss": 1.0696, - "step": 479 - }, - { - "epoch": 0.03692023690485347, - "learning_rate": 0.002989921334740149, - "loss": 1.15, - "step": 480 - }, - { - "epoch": 0.03699715406507192, - "learning_rate": 0.002989879343893806, - "loss": 1.3266, - "step": 481 - }, - { - "epoch": 0.03707407122529036, - "learning_rate": 0.002989837266051856, - "loss": 1.2287, - "step": 482 - }, - { - "epoch": 0.03715098838550881, - "learning_rate": 0.002989795101216755, - "loss": 1.2021, - "step": 483 - }, - { - "epoch": 0.037227905545727254, - "learning_rate": 0.0029897528493909656, - "loss": 1.4613, - "step": 484 - }, - { - "epoch": 0.0373048227059457, - "learning_rate": 0.0029897105105769544, - "loss": 0.9157, - "step": 485 - }, - { - "epoch": 0.037381739866164145, - "learning_rate": 0.002989668084777194, - "loss": 1.4113, - "step": 486 - }, - { - "epoch": 0.03745865702638258, - "learning_rate": 0.0029896255719941618, - "loss": 1.1766, - "step": 487 - }, - { - "epoch": 0.03753557418660103, - "learning_rate": 0.00298958297223034, - "loss": 1.5105, - "step": 488 - }, - { - "epoch": 0.037612491346819474, - "learning_rate": 0.0029895402854882157, - "loss": 1.1155, - "step": 489 - }, - { - "epoch": 0.03768940850703792, - "learning_rate": 0.002989497511770282, - "loss": 1.1206, - "step": 490 - }, - { - "epoch": 0.037766325667256365, - "learning_rate": 0.002989454651079037, - "loss": 0.8207, - "step": 491 - }, - { - "epoch": 0.03784324282747481, - "learning_rate": 0.0029894117034169813, - "loss": 1.1771, - "step": 492 - }, - { - "epoch": 0.037920159987693255, - "learning_rate": 0.0029893686687866247, - "loss": 1.0857, - "step": 493 - }, - { - "epoch": 0.0379970771479117, - "learning_rate": 0.0029893255471904797, - "loss": 1.0518, - "step": 494 - }, - { - "epoch": 0.038073994308130146, - "learning_rate": 0.002989282338631063, - "loss": 1.1607, - "step": 495 - }, - { - "epoch": 0.03815091146834859, - "learning_rate": 0.0029892390431108997, - "loss": 0.9763, - "step": 496 - }, - { - "epoch": 0.03822782862856703, - "learning_rate": 0.0029891956606325158, - "loss": 1.2348, - "step": 497 - }, - { - "epoch": 0.038304745788785476, - "learning_rate": 0.002989152191198445, - "loss": 1.3312, - "step": 498 - }, - { - "epoch": 0.03838166294900392, - "learning_rate": 0.0029891086348112265, - "loss": 1.1876, - "step": 499 - }, - { - "epoch": 0.038458580109222366, - "learning_rate": 0.002989064991473402, - "loss": 1.3925, - "step": 500 - }, - { - "epoch": 0.03853549726944081, - "learning_rate": 0.002989021261187522, - "loss": 1.2061, - "step": 501 - }, - { - "epoch": 0.03861241442965926, - "learning_rate": 0.0029889774439561378, - "loss": 1.2061, - "step": 502 - }, - { - "epoch": 0.0386893315898777, - "learning_rate": 0.002988933539781809, - "loss": 1.178, - "step": 503 - }, - { - "epoch": 0.03876624875009615, - "learning_rate": 0.0029888895486670994, - "loss": 1.0782, - "step": 504 - }, - { - "epoch": 0.038843165910314593, - "learning_rate": 0.002988845470614577, - "loss": 1.1933, - "step": 505 - }, - { - "epoch": 0.03892008307053304, - "learning_rate": 0.002988801305626816, - "loss": 1.0163, - "step": 506 - }, - { - "epoch": 0.03899700023075148, - "learning_rate": 0.0029887570537063955, - "loss": 1.1219, - "step": 507 - }, - { - "epoch": 0.03907391739096992, - "learning_rate": 0.002988712714855899, - "loss": 1.0401, - "step": 508 - }, - { - "epoch": 0.03915083455118837, - "learning_rate": 0.0029886682890779156, - "loss": 1.1159, - "step": 509 - }, - { - "epoch": 0.039227751711406814, - "learning_rate": 0.002988623776375039, - "loss": 0.8955, - "step": 510 - }, - { - "epoch": 0.03930466887162526, - "learning_rate": 0.002988579176749869, - "loss": 1.1181, - "step": 511 - }, - { - "epoch": 0.039381586031843704, - "learning_rate": 0.0029885344902050096, - "loss": 0.998, - "step": 512 - }, - { - "epoch": 0.03945850319206215, - "learning_rate": 0.00298848971674307, - "loss": 0.9474, - "step": 513 - }, - { - "epoch": 0.039535420352280595, - "learning_rate": 0.0029884448563666645, - "loss": 1.3869, - "step": 514 - }, - { - "epoch": 0.03961233751249904, - "learning_rate": 0.0029883999090784125, - "loss": 1.0953, - "step": 515 - }, - { - "epoch": 0.039689254672717486, - "learning_rate": 0.0029883548748809385, - "loss": 1.061, - "step": 516 - }, - { - "epoch": 0.039766171832935925, - "learning_rate": 0.0029883097537768723, - "loss": 1.1206, - "step": 517 - }, - { - "epoch": 0.03984308899315437, - "learning_rate": 0.0029882645457688485, - "loss": 1.0127, - "step": 518 - }, - { - "epoch": 0.039920006153372815, - "learning_rate": 0.002988219250859507, - "loss": 1.5546, - "step": 519 - }, - { - "epoch": 0.03999692331359126, - "learning_rate": 0.0029881738690514924, - "loss": 1.1249, - "step": 520 - }, - { - "epoch": 0.040073840473809706, - "learning_rate": 0.0029881284003474545, - "loss": 1.5855, - "step": 521 - }, - { - "epoch": 0.04015075763402815, - "learning_rate": 0.002988082844750049, - "loss": 1.1175, - "step": 522 - }, - { - "epoch": 0.0402276747942466, - "learning_rate": 0.0029880372022619344, - "loss": 1.0745, - "step": 523 - }, - { - "epoch": 0.04030459195446504, - "learning_rate": 0.002987991472885777, - "loss": 1.1992, - "step": 524 - }, - { - "epoch": 0.04038150911468349, - "learning_rate": 0.0029879456566242473, - "loss": 1.2495, - "step": 525 - }, - { - "epoch": 0.04045842627490193, - "learning_rate": 0.0029878997534800197, - "loss": 1.0105, - "step": 526 - }, - { - "epoch": 0.04053534343512038, - "learning_rate": 0.0029878537634557747, - "loss": 1.2723, - "step": 527 - }, - { - "epoch": 0.04061226059533882, - "learning_rate": 0.0029878076865541984, - "loss": 1.4241, - "step": 528 - }, - { - "epoch": 0.04068917775555726, - "learning_rate": 0.00298776152277798, - "loss": 0.9104, - "step": 529 - }, - { - "epoch": 0.04076609491577571, - "learning_rate": 0.0029877152721298162, - "loss": 1.346, - "step": 530 - }, - { - "epoch": 0.04084301207599415, - "learning_rate": 0.002987668934612407, - "loss": 1.1694, - "step": 531 - }, - { - "epoch": 0.0409199292362126, - "learning_rate": 0.0029876225102284585, - "loss": 0.8494, - "step": 532 - }, - { - "epoch": 0.040996846396431044, - "learning_rate": 0.0029875759989806813, - "loss": 0.9455, - "step": 533 - }, - { - "epoch": 0.04107376355664949, - "learning_rate": 0.002987529400871791, - "loss": 1.3369, - "step": 534 - }, - { - "epoch": 0.041150680716867935, - "learning_rate": 0.002987482715904509, - "loss": 1.1992, - "step": 535 - }, - { - "epoch": 0.04122759787708638, - "learning_rate": 0.0029874359440815604, - "loss": 1.2606, - "step": 536 - }, - { - "epoch": 0.041304515037304826, - "learning_rate": 0.0029873890854056778, - "loss": 1.5976, - "step": 537 - }, - { - "epoch": 0.041381432197523264, - "learning_rate": 0.002987342139879596, - "loss": 1.2291, - "step": 538 - }, - { - "epoch": 0.04145834935774171, - "learning_rate": 0.002987295107506056, - "loss": 0.8531, - "step": 539 - }, - { - "epoch": 0.041535266517960155, - "learning_rate": 0.002987247988287805, - "loss": 1.3273, - "step": 540 - }, - { - "epoch": 0.0416121836781786, - "learning_rate": 0.002987200782227594, - "loss": 1.0323, - "step": 541 - }, - { - "epoch": 0.041689100838397046, - "learning_rate": 0.0029871534893281797, - "loss": 0.9876, - "step": 542 - }, - { - "epoch": 0.04176601799861549, - "learning_rate": 0.002987106109592323, - "loss": 1.7456, - "step": 543 - }, - { - "epoch": 0.04184293515883394, - "learning_rate": 0.0029870586430227907, - "loss": 0.9066, - "step": 544 - }, - { - "epoch": 0.04191985231905238, - "learning_rate": 0.0029870110896223546, - "loss": 1.4167, - "step": 545 - }, - { - "epoch": 0.04199676947927083, - "learning_rate": 0.0029869634493937913, - "loss": 1.4998, - "step": 546 - }, - { - "epoch": 0.04207368663948927, - "learning_rate": 0.002986915722339882, - "loss": 1.2597, - "step": 547 - }, - { - "epoch": 0.04215060379970771, - "learning_rate": 0.0029868679084634146, - "loss": 1.2115, - "step": 548 - }, - { - "epoch": 0.04222752095992616, - "learning_rate": 0.0029868200077671804, - "loss": 1.027, - "step": 549 - }, - { - "epoch": 0.0423044381201446, - "learning_rate": 0.0029867720202539764, - "loss": 1.1086, - "step": 550 - }, - { - "epoch": 0.04238135528036305, - "learning_rate": 0.0029867239459266046, - "loss": 1.2673, - "step": 551 - }, - { - "epoch": 0.04245827244058149, - "learning_rate": 0.002986675784787872, - "loss": 1.4465, - "step": 552 - }, - { - "epoch": 0.04253518960079994, - "learning_rate": 0.0029866275368405915, - "loss": 1.1503, - "step": 553 - }, - { - "epoch": 0.042612106761018384, - "learning_rate": 0.0029865792020875794, - "loss": 1.2056, - "step": 554 - }, - { - "epoch": 0.04268902392123683, - "learning_rate": 0.0029865307805316582, - "loss": 0.8459, - "step": 555 - }, - { - "epoch": 0.042765941081455275, - "learning_rate": 0.0029864822721756557, - "loss": 0.896, - "step": 556 - }, - { - "epoch": 0.04284285824167372, - "learning_rate": 0.0029864336770224044, - "loss": 1.189, - "step": 557 - }, - { - "epoch": 0.04291977540189216, - "learning_rate": 0.0029863849950747413, - "loss": 1.0664, - "step": 558 - }, - { - "epoch": 0.042996692562110604, - "learning_rate": 0.002986336226335509, - "loss": 1.0806, - "step": 559 - }, - { - "epoch": 0.04307360972232905, - "learning_rate": 0.002986287370807556, - "loss": 0.7105, - "step": 560 - }, - { - "epoch": 0.043150526882547495, - "learning_rate": 0.002986238428493734, - "loss": 1.3178, - "step": 561 - }, - { - "epoch": 0.04322744404276594, - "learning_rate": 0.0029861893993969016, - "loss": 1.0987, - "step": 562 - }, - { - "epoch": 0.043304361202984386, - "learning_rate": 0.002986140283519921, - "loss": 1.0808, - "step": 563 - }, - { - "epoch": 0.04338127836320283, - "learning_rate": 0.0029860910808656607, - "loss": 1.2433, - "step": 564 - }, - { - "epoch": 0.04345819552342128, - "learning_rate": 0.0029860417914369932, - "loss": 1.2721, - "step": 565 - }, - { - "epoch": 0.04353511268363972, - "learning_rate": 0.0029859924152367966, - "loss": 1.0626, - "step": 566 - }, - { - "epoch": 0.04361202984385817, - "learning_rate": 0.0029859429522679547, - "loss": 1.2338, - "step": 567 - }, - { - "epoch": 0.04368894700407661, - "learning_rate": 0.002985893402533355, - "loss": 1.0487, - "step": 568 - }, - { - "epoch": 0.04376586416429505, - "learning_rate": 0.002985843766035891, - "loss": 1.0549, - "step": 569 - }, - { - "epoch": 0.0438427813245135, - "learning_rate": 0.002985794042778461, - "loss": 0.9051, - "step": 570 - }, - { - "epoch": 0.04391969848473194, - "learning_rate": 0.002985744232763968, - "loss": 0.9448, - "step": 571 - }, - { - "epoch": 0.04399661564495039, - "learning_rate": 0.002985694335995321, - "loss": 1.314, - "step": 572 - }, - { - "epoch": 0.04407353280516883, - "learning_rate": 0.002985644352475434, - "loss": 1.0038, - "step": 573 - }, - { - "epoch": 0.04415044996538728, - "learning_rate": 0.0029855942822072244, - "loss": 1.2588, - "step": 574 - }, - { - "epoch": 0.044227367125605724, - "learning_rate": 0.0029855441251936165, - "loss": 1.1826, - "step": 575 - }, - { - "epoch": 0.04430428428582417, - "learning_rate": 0.0029854938814375385, - "loss": 1.1935, - "step": 576 - }, - { - "epoch": 0.044381201446042615, - "learning_rate": 0.0029854435509419253, - "loss": 1.3885, - "step": 577 - }, - { - "epoch": 0.04445811860626106, - "learning_rate": 0.0029853931337097146, - "loss": 1.5294, - "step": 578 - }, - { - "epoch": 0.0445350357664795, - "learning_rate": 0.0029853426297438515, - "loss": 1.0981, - "step": 579 - }, - { - "epoch": 0.044611952926697944, - "learning_rate": 0.0029852920390472833, - "loss": 1.5897, - "step": 580 - }, - { - "epoch": 0.04468887008691639, - "learning_rate": 0.0029852413616229655, - "loss": 1.2229, - "step": 581 - }, - { - "epoch": 0.044765787247134835, - "learning_rate": 0.002985190597473857, - "loss": 1.0862, - "step": 582 - }, - { - "epoch": 0.04484270440735328, - "learning_rate": 0.002985139746602921, - "loss": 1.0314, - "step": 583 - }, - { - "epoch": 0.044919621567571726, - "learning_rate": 0.0029850888090131276, - "loss": 1.0007, - "step": 584 - }, - { - "epoch": 0.04499653872779017, - "learning_rate": 0.002985037784707451, - "loss": 1.0844, - "step": 585 - }, - { - "epoch": 0.045073455888008616, - "learning_rate": 0.002984986673688871, - "loss": 0.95, - "step": 586 - }, - { - "epoch": 0.04515037304822706, - "learning_rate": 0.002984935475960371, - "loss": 0.7464, - "step": 587 - }, - { - "epoch": 0.04522729020844551, - "learning_rate": 0.002984884191524941, - "loss": 1.3654, - "step": 588 - }, - { - "epoch": 0.045304207368663946, - "learning_rate": 0.0029848328203855756, - "loss": 1.3091, - "step": 589 - }, - { - "epoch": 0.04538112452888239, - "learning_rate": 0.0029847813625452743, - "loss": 1.201, - "step": 590 - }, - { - "epoch": 0.04545804168910084, - "learning_rate": 0.002984729818007042, - "loss": 0.9744, - "step": 591 - }, - { - "epoch": 0.04553495884931928, - "learning_rate": 0.002984678186773888, - "loss": 1.5352, - "step": 592 - }, - { - "epoch": 0.04561187600953773, - "learning_rate": 0.002984626468848828, - "loss": 1.1746, - "step": 593 - }, - { - "epoch": 0.04568879316975617, - "learning_rate": 0.0029845746642348805, - "loss": 1.044, - "step": 594 - }, - { - "epoch": 0.04576571032997462, - "learning_rate": 0.0029845227729350717, - "loss": 0.9655, - "step": 595 - }, - { - "epoch": 0.045842627490193064, - "learning_rate": 0.0029844707949524306, - "loss": 1.1739, - "step": 596 - }, - { - "epoch": 0.04591954465041151, - "learning_rate": 0.0029844187302899934, - "loss": 1.2033, - "step": 597 - }, - { - "epoch": 0.045996461810629954, - "learning_rate": 0.002984366578950799, - "loss": 1.0512, - "step": 598 - }, - { - "epoch": 0.04607337897084839, - "learning_rate": 0.002984314340937893, - "loss": 1.1152, - "step": 599 - }, - { - "epoch": 0.04615029613106684, - "learning_rate": 0.002984262016254326, - "loss": 1.351, - "step": 600 - }, - { - "epoch": 0.046227213291285284, - "learning_rate": 0.0029842096049031526, - "loss": 1.1306, - "step": 601 - }, - { - "epoch": 0.04630413045150373, - "learning_rate": 0.002984157106887434, - "loss": 1.548, - "step": 602 - }, - { - "epoch": 0.046381047611722175, - "learning_rate": 0.002984104522210235, - "loss": 1.4748, - "step": 603 - }, - { - "epoch": 0.04645796477194062, - "learning_rate": 0.0029840518508746266, - "loss": 1.2217, - "step": 604 - }, - { - "epoch": 0.046534881932159065, - "learning_rate": 0.0029839990928836836, - "loss": 1.1253, - "step": 605 - }, - { - "epoch": 0.04661179909237751, - "learning_rate": 0.002983946248240487, - "loss": 0.9298, - "step": 606 - }, - { - "epoch": 0.046688716252595956, - "learning_rate": 0.0029838933169481227, - "loss": 0.8226, - "step": 607 - }, - { - "epoch": 0.0467656334128144, - "learning_rate": 0.0029838402990096805, - "loss": 0.8816, - "step": 608 - }, - { - "epoch": 0.04684255057303285, - "learning_rate": 0.0029837871944282577, - "loss": 1.0678, - "step": 609 - }, - { - "epoch": 0.046919467733251286, - "learning_rate": 0.0029837340032069534, - "loss": 0.8852, - "step": 610 - }, - { - "epoch": 0.04699638489346973, - "learning_rate": 0.0029836807253488746, - "loss": 1.4652, - "step": 611 - }, - { - "epoch": 0.047073302053688176, - "learning_rate": 0.0029836273608571324, - "loss": 1.445, - "step": 612 - }, - { - "epoch": 0.04715021921390662, - "learning_rate": 0.0029835739097348424, - "loss": 1.1648, - "step": 613 - }, - { - "epoch": 0.04722713637412507, - "learning_rate": 0.002983520371985125, - "loss": 1.0133, - "step": 614 - }, - { - "epoch": 0.04730405353434351, - "learning_rate": 0.0029834667476111073, - "loss": 1.1974, - "step": 615 - }, - { - "epoch": 0.04738097069456196, - "learning_rate": 0.0029834130366159202, - "loss": 1.2651, - "step": 616 - }, - { - "epoch": 0.0474578878547804, - "learning_rate": 0.0029833592390027, - "loss": 0.8177, - "step": 617 - }, - { - "epoch": 0.04753480501499885, - "learning_rate": 0.0029833053547745885, - "loss": 1.5305, - "step": 618 - }, - { - "epoch": 0.047611722175217294, - "learning_rate": 0.0029832513839347305, - "loss": 1.1941, - "step": 619 - }, - { - "epoch": 0.04768863933543573, - "learning_rate": 0.002983197326486279, - "loss": 1.0049, - "step": 620 - }, - { - "epoch": 0.04776555649565418, - "learning_rate": 0.0029831431824323892, - "loss": 1.4915, - "step": 621 - }, - { - "epoch": 0.047842473655872624, - "learning_rate": 0.002983088951776224, - "loss": 0.9474, - "step": 622 - }, - { - "epoch": 0.04791939081609107, - "learning_rate": 0.0029830346345209487, - "loss": 1.2237, - "step": 623 - }, - { - "epoch": 0.047996307976309514, - "learning_rate": 0.0029829802306697356, - "loss": 1.4277, - "step": 624 - }, - { - "epoch": 0.04807322513652796, - "learning_rate": 0.0029829257402257613, - "loss": 1.0073, - "step": 625 - }, - { - "epoch": 0.048150142296746405, - "learning_rate": 0.0029828711631922082, - "loss": 0.9298, - "step": 626 - }, - { - "epoch": 0.04822705945696485, - "learning_rate": 0.0029828164995722617, - "loss": 1.5419, - "step": 627 - }, - { - "epoch": 0.048303976617183296, - "learning_rate": 0.002982761749369115, - "loss": 1.1724, - "step": 628 - }, - { - "epoch": 0.04838089377740174, - "learning_rate": 0.002982706912585964, - "loss": 1.0517, - "step": 629 - }, - { - "epoch": 0.04845781093762018, - "learning_rate": 0.0029826519892260117, - "loss": 1.3097, - "step": 630 - }, - { - "epoch": 0.048534728097838625, - "learning_rate": 0.002982596979292464, - "loss": 1.1801, - "step": 631 - }, - { - "epoch": 0.04861164525805707, - "learning_rate": 0.0029825418827885337, - "loss": 1.2174, - "step": 632 - }, - { - "epoch": 0.048688562418275516, - "learning_rate": 0.002982486699717438, - "loss": 0.9513, - "step": 633 - }, - { - "epoch": 0.04876547957849396, - "learning_rate": 0.0029824314300823987, - "loss": 1.3247, - "step": 634 - }, - { - "epoch": 0.04884239673871241, - "learning_rate": 0.002982376073886643, - "loss": 1.0623, - "step": 635 - }, - { - "epoch": 0.04891931389893085, - "learning_rate": 0.002982320631133404, - "loss": 1.3911, - "step": 636 - }, - { - "epoch": 0.0489962310591493, - "learning_rate": 0.0029822651018259184, - "loss": 1.2937, - "step": 637 - }, - { - "epoch": 0.04907314821936774, - "learning_rate": 0.002982209485967428, - "loss": 1.1114, - "step": 638 - }, - { - "epoch": 0.04915006537958619, - "learning_rate": 0.002982153783561182, - "loss": 1.2588, - "step": 639 - }, - { - "epoch": 0.04922698253980463, - "learning_rate": 0.0029820979946104313, - "loss": 1.2962, - "step": 640 - }, - { - "epoch": 0.04930389970002307, - "learning_rate": 0.0029820421191184346, - "loss": 1.272, - "step": 641 - }, - { - "epoch": 0.04938081686024152, - "learning_rate": 0.002981986157088453, - "loss": 1.178, - "step": 642 - }, - { - "epoch": 0.04945773402045996, - "learning_rate": 0.002981930108523756, - "loss": 1.3278, - "step": 643 - }, - { - "epoch": 0.04953465118067841, - "learning_rate": 0.0029818739734276153, - "loss": 0.9838, - "step": 644 - }, - { - "epoch": 0.049611568340896854, - "learning_rate": 0.0029818177518033096, - "loss": 1.325, - "step": 645 - }, - { - "epoch": 0.0496884855011153, - "learning_rate": 0.0029817614436541203, - "loss": 1.1888, - "step": 646 - }, - { - "epoch": 0.049765402661333745, - "learning_rate": 0.0029817050489833363, - "loss": 1.5704, - "step": 647 - }, - { - "epoch": 0.04984231982155219, - "learning_rate": 0.00298164856779425, - "loss": 0.7675, - "step": 648 - }, - { - "epoch": 0.049919236981770636, - "learning_rate": 0.00298159200009016, - "loss": 1.1223, - "step": 649 - }, - { - "epoch": 0.04999615414198908, - "learning_rate": 0.0029815353458743686, - "loss": 1.1525, - "step": 650 - }, - { - "epoch": 0.05007307130220752, - "learning_rate": 0.002981478605150185, - "loss": 1.3927, - "step": 651 - }, - { - "epoch": 0.050149988462425965, - "learning_rate": 0.0029814217779209214, - "loss": 1.1732, - "step": 652 - }, - { - "epoch": 0.05022690562264441, - "learning_rate": 0.0029813648641898965, - "loss": 1.3517, - "step": 653 - }, - { - "epoch": 0.050303822782862856, - "learning_rate": 0.002981307863960433, - "loss": 0.8532, - "step": 654 - }, - { - "epoch": 0.0503807399430813, - "learning_rate": 0.0029812507772358594, - "loss": 1.2538, - "step": 655 - }, - { - "epoch": 0.05045765710329975, - "learning_rate": 0.0029811936040195094, - "loss": 1.4504, - "step": 656 - }, - { - "epoch": 0.05053457426351819, - "learning_rate": 0.0029811363443147214, - "loss": 1.0522, - "step": 657 - }, - { - "epoch": 0.05061149142373664, - "learning_rate": 0.0029810789981248387, - "loss": 1.2034, - "step": 658 - }, - { - "epoch": 0.05068840858395508, - "learning_rate": 0.0029810215654532094, - "loss": 1.0663, - "step": 659 - }, - { - "epoch": 0.05076532574417353, - "learning_rate": 0.002980964046303188, - "loss": 1.4473, - "step": 660 - }, - { - "epoch": 0.05084224290439197, - "learning_rate": 0.0029809064406781318, - "loss": 1.0033, - "step": 661 - }, - { - "epoch": 0.05091916006461041, - "learning_rate": 0.002980848748581405, - "loss": 1.2814, - "step": 662 - }, - { - "epoch": 0.05099607722482886, - "learning_rate": 0.002980790970016377, - "loss": 1.2509, - "step": 663 - }, - { - "epoch": 0.0510729943850473, - "learning_rate": 0.002980733104986421, - "loss": 1.04, - "step": 664 - }, - { - "epoch": 0.05114991154526575, - "learning_rate": 0.002980675153494916, - "loss": 0.9383, - "step": 665 - }, - { - "epoch": 0.051226828705484194, - "learning_rate": 0.0029806171155452453, - "loss": 1.2609, - "step": 666 - }, - { - "epoch": 0.05130374586570264, - "learning_rate": 0.0029805589911407982, - "loss": 1.2434, - "step": 667 - }, - { - "epoch": 0.051380663025921085, - "learning_rate": 0.0029805007802849687, - "loss": 1.0247, - "step": 668 - }, - { - "epoch": 0.05145758018613953, - "learning_rate": 0.0029804424829811558, - "loss": 1.3384, - "step": 669 - }, - { - "epoch": 0.051534497346357976, - "learning_rate": 0.0029803840992327635, - "loss": 0.8823, - "step": 670 - }, - { - "epoch": 0.051611414506576414, - "learning_rate": 0.0029803256290432, - "loss": 0.763, - "step": 671 - }, - { - "epoch": 0.05168833166679486, - "learning_rate": 0.0029802670724158814, - "loss": 1.2358, - "step": 672 - }, - { - "epoch": 0.051765248827013305, - "learning_rate": 0.0029802084293542247, - "loss": 1.425, - "step": 673 - }, - { - "epoch": 0.05184216598723175, - "learning_rate": 0.002980149699861656, - "loss": 1.3391, - "step": 674 - }, - { - "epoch": 0.051919083147450196, - "learning_rate": 0.0029800908839416034, - "loss": 1.4196, - "step": 675 - }, - { - "epoch": 0.05199600030766864, - "learning_rate": 0.002980031981597502, - "loss": 1.3749, - "step": 676 - }, - { - "epoch": 0.052072917467887087, - "learning_rate": 0.0029799729928327904, - "loss": 1.0857, - "step": 677 - }, - { - "epoch": 0.05214983462810553, - "learning_rate": 0.002979913917650913, - "loss": 0.8182, - "step": 678 - }, - { - "epoch": 0.05222675178832398, - "learning_rate": 0.00297985475605532, - "loss": 1.1418, - "step": 679 - }, - { - "epoch": 0.05230366894854242, - "learning_rate": 0.0029797955080494653, - "loss": 1.7806, - "step": 680 - }, - { - "epoch": 0.05238058610876086, - "learning_rate": 0.002979736173636809, - "loss": 0.9753, - "step": 681 - }, - { - "epoch": 0.05245750326897931, - "learning_rate": 0.0029796767528208153, - "loss": 1.2917, - "step": 682 - }, - { - "epoch": 0.05253442042919775, - "learning_rate": 0.002979617245604954, - "loss": 0.8755, - "step": 683 - }, - { - "epoch": 0.0526113375894162, - "learning_rate": 0.0029795576519926993, - "loss": 1.1086, - "step": 684 - }, - { - "epoch": 0.05268825474963464, - "learning_rate": 0.0029794979719875315, - "loss": 0.9721, - "step": 685 - }, - { - "epoch": 0.05276517190985309, - "learning_rate": 0.0029794382055929355, - "loss": 0.8475, - "step": 686 - }, - { - "epoch": 0.052842089070071534, - "learning_rate": 0.0029793783528124004, - "loss": 1.1931, - "step": 687 - }, - { - "epoch": 0.05291900623028998, - "learning_rate": 0.0029793184136494223, - "loss": 1.8459, - "step": 688 - }, - { - "epoch": 0.052995923390508425, - "learning_rate": 0.0029792583881074995, - "loss": 1.4024, - "step": 689 - }, - { - "epoch": 0.05307284055072687, - "learning_rate": 0.002979198276190138, - "loss": 1.2084, - "step": 690 - }, - { - "epoch": 0.05314975771094531, - "learning_rate": 0.0029791380779008478, - "loss": 1.2549, - "step": 691 - }, - { - "epoch": 0.053226674871163754, - "learning_rate": 0.0029790777932431436, - "loss": 1.1673, - "step": 692 - }, - { - "epoch": 0.0533035920313822, - "learning_rate": 0.0029790174222205455, - "loss": 0.9272, - "step": 693 - }, - { - "epoch": 0.053380509191600645, - "learning_rate": 0.0029789569648365787, - "loss": 0.9013, - "step": 694 - }, - { - "epoch": 0.05345742635181909, - "learning_rate": 0.0029788964210947736, - "loss": 0.8581, - "step": 695 - }, - { - "epoch": 0.053534343512037535, - "learning_rate": 0.002978835790998665, - "loss": 0.8097, - "step": 696 - }, - { - "epoch": 0.05361126067225598, - "learning_rate": 0.0029787750745517935, - "loss": 1.4727, - "step": 697 - }, - { - "epoch": 0.053688177832474426, - "learning_rate": 0.002978714271757704, - "loss": 1.0355, - "step": 698 - }, - { - "epoch": 0.05376509499269287, - "learning_rate": 0.0029786533826199474, - "loss": 0.8615, - "step": 699 - }, - { - "epoch": 0.05384201215291132, - "learning_rate": 0.0029785924071420787, - "loss": 1.2511, - "step": 700 - }, - { - "epoch": 0.05391892931312976, - "learning_rate": 0.002978531345327658, - "loss": 1.4, - "step": 701 - }, - { - "epoch": 0.0539958464733482, - "learning_rate": 0.0029784701971802517, - "loss": 0.9655, - "step": 702 - }, - { - "epoch": 0.054072763633566646, - "learning_rate": 0.0029784089627034297, - "loss": 1.2346, - "step": 703 - }, - { - "epoch": 0.05414968079378509, - "learning_rate": 0.002978347641900767, - "loss": 1.3287, - "step": 704 - }, - { - "epoch": 0.05422659795400354, - "learning_rate": 0.002978286234775845, - "loss": 1.1252, - "step": 705 - }, - { - "epoch": 0.05430351511422198, - "learning_rate": 0.00297822474133225, - "loss": 0.9429, - "step": 706 - }, - { - "epoch": 0.05438043227444043, - "learning_rate": 0.002978163161573571, - "loss": 1.5489, - "step": 707 - }, - { - "epoch": 0.054457349434658874, - "learning_rate": 0.0029781014955034047, - "loss": 1.1839, - "step": 708 - }, - { - "epoch": 0.05453426659487732, - "learning_rate": 0.0029780397431253517, - "loss": 1.1866, - "step": 709 - }, - { - "epoch": 0.054611183755095764, - "learning_rate": 0.0029779779044430176, - "loss": 1.4301, - "step": 710 - }, - { - "epoch": 0.05468810091531421, - "learning_rate": 0.002977915979460013, - "loss": 1.018, - "step": 711 - }, - { - "epoch": 0.05476501807553265, - "learning_rate": 0.002977853968179955, - "loss": 1.1193, - "step": 712 - }, - { - "epoch": 0.054841935235751094, - "learning_rate": 0.002977791870606463, - "loss": 1.4326, - "step": 713 - }, - { - "epoch": 0.05491885239596954, - "learning_rate": 0.0029777296867431638, - "loss": 1.0494, - "step": 714 - }, - { - "epoch": 0.054995769556187984, - "learning_rate": 0.0029776674165936873, - "loss": 0.8348, - "step": 715 - }, - { - "epoch": 0.05507268671640643, - "learning_rate": 0.0029776050601616716, - "loss": 1.4864, - "step": 716 - }, - { - "epoch": 0.055149603876624875, - "learning_rate": 0.0029775426174507556, - "loss": 1.1649, - "step": 717 - }, - { - "epoch": 0.05522652103684332, - "learning_rate": 0.0029774800884645866, - "loss": 1.0731, - "step": 718 - }, - { - "epoch": 0.055303438197061766, - "learning_rate": 0.0029774174732068157, - "loss": 1.1337, - "step": 719 - }, - { - "epoch": 0.05538035535728021, - "learning_rate": 0.0029773547716810986, - "loss": 1.435, - "step": 720 - }, - { - "epoch": 0.05545727251749866, - "learning_rate": 0.0029772919838910966, - "loss": 1.1594, - "step": 721 - }, - { - "epoch": 0.055534189677717095, - "learning_rate": 0.002977229109840476, - "loss": 0.7081, - "step": 722 - }, - { - "epoch": 0.05561110683793554, - "learning_rate": 0.0029771661495329085, - "loss": 0.9894, - "step": 723 - }, - { - "epoch": 0.055688023998153986, - "learning_rate": 0.0029771031029720697, - "loss": 1.0422, - "step": 724 - }, - { - "epoch": 0.05576494115837243, - "learning_rate": 0.002977039970161642, - "loss": 1.4302, - "step": 725 - }, - { - "epoch": 0.05584185831859088, - "learning_rate": 0.0029769767511053104, - "loss": 1.08, - "step": 726 - }, - { - "epoch": 0.05591877547880932, - "learning_rate": 0.0029769134458067672, - "loss": 1.3204, - "step": 727 - }, - { - "epoch": 0.05599569263902777, - "learning_rate": 0.002976850054269709, - "loss": 1.1759, - "step": 728 - }, - { - "epoch": 0.05607260979924621, - "learning_rate": 0.002976786576497837, - "loss": 1.091, - "step": 729 - }, - { - "epoch": 0.05614952695946466, - "learning_rate": 0.002976723012494857, - "loss": 1.3387, - "step": 730 - }, - { - "epoch": 0.056226444119683104, - "learning_rate": 0.002976659362264482, - "loss": 1.2249, - "step": 731 - }, - { - "epoch": 0.05630336127990154, - "learning_rate": 0.0029765956258104276, - "loss": 1.3977, - "step": 732 - }, - { - "epoch": 0.05638027844011999, - "learning_rate": 0.0029765318031364156, - "loss": 1.3686, - "step": 733 - }, - { - "epoch": 0.05645719560033843, - "learning_rate": 0.002976467894246173, - "loss": 1.0985, - "step": 734 - }, - { - "epoch": 0.05653411276055688, - "learning_rate": 0.0029764038991434314, - "loss": 0.868, - "step": 735 - }, - { - "epoch": 0.056611029920775324, - "learning_rate": 0.0029763398178319278, - "loss": 1.3035, - "step": 736 - }, - { - "epoch": 0.05668794708099377, - "learning_rate": 0.002976275650315403, - "loss": 1.1757, - "step": 737 - }, - { - "epoch": 0.056764864241212215, - "learning_rate": 0.0029762113965976045, - "loss": 0.9565, - "step": 738 - }, - { - "epoch": 0.05684178140143066, - "learning_rate": 0.0029761470566822845, - "loss": 1.1045, - "step": 739 - }, - { - "epoch": 0.056918698561649106, - "learning_rate": 0.0029760826305731992, - "loss": 1.0485, - "step": 740 - }, - { - "epoch": 0.05699561572186755, - "learning_rate": 0.0029760181182741105, - "loss": 0.9465, - "step": 741 - }, - { - "epoch": 0.057072532882086, - "learning_rate": 0.002975953519788786, - "loss": 1.6061, - "step": 742 - }, - { - "epoch": 0.057149450042304435, - "learning_rate": 0.0029758888351209974, - "loss": 1.2262, - "step": 743 - }, - { - "epoch": 0.05722636720252288, - "learning_rate": 0.002975824064274521, - "loss": 1.2773, - "step": 744 - }, - { - "epoch": 0.057303284362741326, - "learning_rate": 0.00297575920725314, - "loss": 1.3701, - "step": 745 - }, - { - "epoch": 0.05738020152295977, - "learning_rate": 0.0029756942640606406, - "loss": 1.3194, - "step": 746 - }, - { - "epoch": 0.05745711868317822, - "learning_rate": 0.0029756292347008152, - "loss": 1.332, - "step": 747 - }, - { - "epoch": 0.05753403584339666, - "learning_rate": 0.002975564119177461, - "loss": 1.3512, - "step": 748 - }, - { - "epoch": 0.05761095300361511, - "learning_rate": 0.00297549891749438, - "loss": 1.2467, - "step": 749 - }, - { - "epoch": 0.05768787016383355, - "learning_rate": 0.0029754336296553795, - "loss": 1.335, - "step": 750 - }, - { - "epoch": 0.057764787324052, - "learning_rate": 0.002975368255664272, - "loss": 1.0811, - "step": 751 - }, - { - "epoch": 0.057841704484270444, - "learning_rate": 0.0029753027955248743, - "loss": 1.0731, - "step": 752 - }, - { - "epoch": 0.05791862164448888, - "learning_rate": 0.0029752372492410084, - "loss": 0.8023, - "step": 753 - }, - { - "epoch": 0.05799553880470733, - "learning_rate": 0.002975171616816503, - "loss": 1.5936, - "step": 754 - }, - { - "epoch": 0.05807245596492577, - "learning_rate": 0.0029751058982551886, - "loss": 1.0659, - "step": 755 - }, - { - "epoch": 0.05814937312514422, - "learning_rate": 0.0029750400935609034, - "loss": 1.4928, - "step": 756 - }, - { - "epoch": 0.058226290285362664, - "learning_rate": 0.0029749742027374904, - "loss": 1.2801, - "step": 757 - }, - { - "epoch": 0.05830320744558111, - "learning_rate": 0.002974908225788796, - "loss": 1.7553, - "step": 758 - }, - { - "epoch": 0.058380124605799555, - "learning_rate": 0.0029748421627186737, - "loss": 1.2631, - "step": 759 - }, - { - "epoch": 0.058457041766018, - "learning_rate": 0.0029747760135309803, - "loss": 1.1292, - "step": 760 - }, - { - "epoch": 0.058533958926236446, - "learning_rate": 0.0029747097782295787, - "loss": 1.4035, - "step": 761 - }, - { - "epoch": 0.05861087608645489, - "learning_rate": 0.002974643456818336, - "loss": 0.9446, - "step": 762 - }, - { - "epoch": 0.05868779324667333, - "learning_rate": 0.0029745770493011256, - "loss": 0.7152, - "step": 763 - }, - { - "epoch": 0.058764710406891775, - "learning_rate": 0.002974510555681824, - "loss": 1.262, - "step": 764 - }, - { - "epoch": 0.05884162756711022, - "learning_rate": 0.0029744439759643144, - "loss": 1.2302, - "step": 765 - }, - { - "epoch": 0.058918544727328666, - "learning_rate": 0.002974377310152484, - "loss": 1.1512, - "step": 766 - }, - { - "epoch": 0.05899546188754711, - "learning_rate": 0.0029743105582502264, - "loss": 1.1798, - "step": 767 - }, - { - "epoch": 0.05907237904776556, - "learning_rate": 0.0029742437202614387, - "loss": 1.1354, - "step": 768 - }, - { - "epoch": 0.059149296207984, - "learning_rate": 0.0029741767961900243, - "loss": 1.3438, - "step": 769 - }, - { - "epoch": 0.05922621336820245, - "learning_rate": 0.0029741097860398894, - "loss": 1.4128, - "step": 770 - }, - { - "epoch": 0.05930313052842089, - "learning_rate": 0.0029740426898149487, - "loss": 1.2916, - "step": 771 - }, - { - "epoch": 0.05938004768863934, - "learning_rate": 0.0029739755075191185, - "loss": 0.9568, - "step": 772 - }, - { - "epoch": 0.05945696484885778, - "learning_rate": 0.0029739082391563223, - "loss": 1.5856, - "step": 773 - }, - { - "epoch": 0.05953388200907622, - "learning_rate": 0.0029738408847304883, - "loss": 1.0162, - "step": 774 - }, - { - "epoch": 0.05961079916929467, - "learning_rate": 0.002973773444245549, - "loss": 1.1085, - "step": 775 - }, - { - "epoch": 0.05968771632951311, - "learning_rate": 0.002973705917705442, - "loss": 1.1943, - "step": 776 - }, - { - "epoch": 0.05976463348973156, - "learning_rate": 0.002973638305114111, - "loss": 1.1486, - "step": 777 - }, - { - "epoch": 0.059841550649950004, - "learning_rate": 0.0029735706064755035, - "loss": 1.1634, - "step": 778 - }, - { - "epoch": 0.05991846781016845, - "learning_rate": 0.0029735028217935723, - "loss": 1.5149, - "step": 779 - }, - { - "epoch": 0.059995384970386895, - "learning_rate": 0.002973434951072276, - "loss": 0.8127, - "step": 780 - }, - { - "epoch": 0.06007230213060534, - "learning_rate": 0.0029733669943155776, - "loss": 0.9215, - "step": 781 - }, - { - "epoch": 0.060149219290823785, - "learning_rate": 0.002973298951527444, - "loss": 1.2661, - "step": 782 - }, - { - "epoch": 0.06022613645104223, - "learning_rate": 0.00297323082271185, - "loss": 1.2779, - "step": 783 - }, - { - "epoch": 0.06030305361126067, - "learning_rate": 0.0029731626078727726, - "loss": 1.2186, - "step": 784 - }, - { - "epoch": 0.060379970771479115, - "learning_rate": 0.0029730943070141952, - "loss": 1.2627, - "step": 785 - }, - { - "epoch": 0.06045688793169756, - "learning_rate": 0.002973025920140106, - "loss": 1.4769, - "step": 786 - }, - { - "epoch": 0.060533805091916006, - "learning_rate": 0.0029729574472544987, - "loss": 1.2963, - "step": 787 - }, - { - "epoch": 0.06061072225213445, - "learning_rate": 0.00297288888836137, - "loss": 1.6462, - "step": 788 - }, - { - "epoch": 0.060687639412352896, - "learning_rate": 0.002972820243464725, - "loss": 0.9503, - "step": 789 - }, - { - "epoch": 0.06076455657257134, - "learning_rate": 0.0029727515125685705, - "loss": 1.1223, - "step": 790 - }, - { - "epoch": 0.06084147373278979, - "learning_rate": 0.00297268269567692, - "loss": 0.9932, - "step": 791 - }, - { - "epoch": 0.06091839089300823, - "learning_rate": 0.0029726137927937927, - "loss": 1.1108, - "step": 792 - }, - { - "epoch": 0.06099530805322668, - "learning_rate": 0.002972544803923211, - "loss": 1.1645, - "step": 793 - }, - { - "epoch": 0.06107222521344512, - "learning_rate": 0.002972475729069204, - "loss": 1.0648, - "step": 794 - }, - { - "epoch": 0.06114914237366356, - "learning_rate": 0.0029724065682358043, - "loss": 1.1004, - "step": 795 - }, - { - "epoch": 0.06122605953388201, - "learning_rate": 0.0029723373214270506, - "loss": 1.2627, - "step": 796 - }, - { - "epoch": 0.06130297669410045, - "learning_rate": 0.002972267988646986, - "loss": 1.2579, - "step": 797 - }, - { - "epoch": 0.0613798938543189, - "learning_rate": 0.0029721985698996597, - "loss": 1.2242, - "step": 798 - }, - { - "epoch": 0.061456811014537344, - "learning_rate": 0.0029721290651891245, - "loss": 1.0346, - "step": 799 - }, - { - "epoch": 0.06153372817475579, - "learning_rate": 0.002972059474519439, - "loss": 1.258, - "step": 800 - }, - { - "epoch": 0.061610645334974234, - "learning_rate": 0.002971989797894666, - "loss": 1.3715, - "step": 801 - }, - { - "epoch": 0.06168756249519268, - "learning_rate": 0.0029719200353188753, - "loss": 1.5083, - "step": 802 - }, - { - "epoch": 0.061764479655411125, - "learning_rate": 0.0029718501867961396, - "loss": 0.7944, - "step": 803 - }, - { - "epoch": 0.061841396815629564, - "learning_rate": 0.0029717802523305374, - "loss": 1.3351, - "step": 804 - }, - { - "epoch": 0.06191831397584801, - "learning_rate": 0.0029717102319261526, - "loss": 1.1212, - "step": 805 - }, - { - "epoch": 0.061995231136066455, - "learning_rate": 0.0029716401255870735, - "loss": 1.1144, - "step": 806 - }, - { - "epoch": 0.0620721482962849, - "learning_rate": 0.0029715699333173937, - "loss": 1.0605, - "step": 807 - }, - { - "epoch": 0.062149065456503345, - "learning_rate": 0.002971499655121212, - "loss": 1.3306, - "step": 808 - }, - { - "epoch": 0.06222598261672179, - "learning_rate": 0.002971429291002632, - "loss": 0.7732, - "step": 809 - }, - { - "epoch": 0.062302899776940236, - "learning_rate": 0.0029713588409657618, - "loss": 1.21, - "step": 810 - }, - { - "epoch": 0.06237981693715868, - "learning_rate": 0.0029712883050147156, - "loss": 1.0372, - "step": 811 - }, - { - "epoch": 0.06245673409737713, - "learning_rate": 0.0029712176831536118, - "loss": 1.3111, - "step": 812 - }, - { - "epoch": 0.06253365125759557, - "learning_rate": 0.0029711469753865745, - "loss": 0.9122, - "step": 813 - }, - { - "epoch": 0.06261056841781401, - "learning_rate": 0.002971076181717732, - "loss": 1.1221, - "step": 814 - }, - { - "epoch": 0.06268748557803246, - "learning_rate": 0.002971005302151218, - "loss": 1.3591, - "step": 815 - }, - { - "epoch": 0.0627644027382509, - "learning_rate": 0.0029709343366911716, - "loss": 1.5147, - "step": 816 - }, - { - "epoch": 0.06284131989846935, - "learning_rate": 0.002970863285341736, - "loss": 1.1363, - "step": 817 - }, - { - "epoch": 0.06291823705868779, - "learning_rate": 0.0029707921481070608, - "loss": 0.8343, - "step": 818 - }, - { - "epoch": 0.06299515421890624, - "learning_rate": 0.002970720924991299, - "loss": 1.0197, - "step": 819 - }, - { - "epoch": 0.06307207137912468, - "learning_rate": 0.0029706496159986094, - "loss": 1.3745, - "step": 820 - }, - { - "epoch": 0.06314898853934313, - "learning_rate": 0.0029705782211331567, - "loss": 1.0971, - "step": 821 - }, - { - "epoch": 0.06322590569956157, - "learning_rate": 0.0029705067403991087, - "loss": 1.1213, - "step": 822 - }, - { - "epoch": 0.06330282285978002, - "learning_rate": 0.0029704351738006392, - "loss": 1.0577, - "step": 823 - }, - { - "epoch": 0.06337974001999847, - "learning_rate": 0.0029703635213419277, - "loss": 0.8942, - "step": 824 - }, - { - "epoch": 0.06345665718021691, - "learning_rate": 0.0029702917830271578, - "loss": 1.5873, - "step": 825 - }, - { - "epoch": 0.06353357434043536, - "learning_rate": 0.002970219958860519, - "loss": 1.5163, - "step": 826 - }, - { - "epoch": 0.0636104915006538, - "learning_rate": 0.0029701480488462037, - "loss": 0.8876, - "step": 827 - }, - { - "epoch": 0.06368740866087225, - "learning_rate": 0.002970076052988412, - "loss": 1.1676, - "step": 828 - }, - { - "epoch": 0.06376432582109069, - "learning_rate": 0.002970003971291348, - "loss": 1.2498, - "step": 829 - }, - { - "epoch": 0.06384124298130912, - "learning_rate": 0.002969931803759219, - "loss": 0.9933, - "step": 830 - }, - { - "epoch": 0.06391816014152757, - "learning_rate": 0.0029698595503962408, - "loss": 1.4362, - "step": 831 - }, - { - "epoch": 0.06399507730174601, - "learning_rate": 0.002969787211206632, - "loss": 0.9662, - "step": 832 - }, - { - "epoch": 0.06407199446196446, - "learning_rate": 0.0029697147861946155, - "loss": 0.9095, - "step": 833 - }, - { - "epoch": 0.0641489116221829, - "learning_rate": 0.0029696422753644207, - "loss": 1.1928, - "step": 834 - }, - { - "epoch": 0.06422582878240135, - "learning_rate": 0.0029695696787202823, - "loss": 1.0297, - "step": 835 - }, - { - "epoch": 0.0643027459426198, - "learning_rate": 0.002969496996266439, - "loss": 0.7538, - "step": 836 - }, - { - "epoch": 0.06437966310283824, - "learning_rate": 0.002969424228007134, - "loss": 1.1861, - "step": 837 - }, - { - "epoch": 0.06445658026305669, - "learning_rate": 0.0029693513739466174, - "loss": 1.2194, - "step": 838 - }, - { - "epoch": 0.06453349742327513, - "learning_rate": 0.0029692784340891423, - "loss": 1.1569, - "step": 839 - }, - { - "epoch": 0.06461041458349358, - "learning_rate": 0.0029692054084389688, - "loss": 1.3693, - "step": 840 - }, - { - "epoch": 0.06468733174371202, - "learning_rate": 0.0029691322970003594, - "loss": 1.275, - "step": 841 - }, - { - "epoch": 0.06476424890393047, - "learning_rate": 0.002969059099777585, - "loss": 0.8621, - "step": 842 - }, - { - "epoch": 0.06484116606414891, - "learning_rate": 0.0029689858167749185, - "loss": 1.258, - "step": 843 - }, - { - "epoch": 0.06491808322436736, - "learning_rate": 0.002968912447996639, - "loss": 1.0037, - "step": 844 - }, - { - "epoch": 0.0649950003845858, - "learning_rate": 0.0029688389934470314, - "loss": 1.148, - "step": 845 - }, - { - "epoch": 0.06507191754480425, - "learning_rate": 0.0029687654531303834, - "loss": 1.1333, - "step": 846 - }, - { - "epoch": 0.0651488347050227, - "learning_rate": 0.0029686918270509905, - "loss": 1.1125, - "step": 847 - }, - { - "epoch": 0.06522575186524114, - "learning_rate": 0.0029686181152131508, - "loss": 1.1169, - "step": 848 - }, - { - "epoch": 0.06530266902545959, - "learning_rate": 0.002968544317621169, - "loss": 1.3645, - "step": 849 - }, - { - "epoch": 0.06537958618567802, - "learning_rate": 0.0029684704342793538, - "loss": 1.1042, - "step": 850 - }, - { - "epoch": 0.06545650334589646, - "learning_rate": 0.0029683964651920198, - "loss": 1.1865, - "step": 851 - }, - { - "epoch": 0.06553342050611491, - "learning_rate": 0.0029683224103634856, - "loss": 1.224, - "step": 852 - }, - { - "epoch": 0.06561033766633335, - "learning_rate": 0.002968248269798076, - "loss": 1.6174, - "step": 853 - }, - { - "epoch": 0.0656872548265518, - "learning_rate": 0.0029681740435001194, - "loss": 1.5593, - "step": 854 - }, - { - "epoch": 0.06576417198677025, - "learning_rate": 0.00296809973147395, - "loss": 1.2401, - "step": 855 - }, - { - "epoch": 0.06584108914698869, - "learning_rate": 0.002968025333723908, - "loss": 1.0987, - "step": 856 - }, - { - "epoch": 0.06591800630720714, - "learning_rate": 0.0029679508502543366, - "loss": 1.2755, - "step": 857 - }, - { - "epoch": 0.06599492346742558, - "learning_rate": 0.002967876281069585, - "loss": 1.6386, - "step": 858 - }, - { - "epoch": 0.06607184062764403, - "learning_rate": 0.0029678016261740076, - "loss": 1.5125, - "step": 859 - }, - { - "epoch": 0.06614875778786247, - "learning_rate": 0.002967726885571964, - "loss": 1.4287, - "step": 860 - }, - { - "epoch": 0.06622567494808092, - "learning_rate": 0.002967652059267817, - "loss": 1.1767, - "step": 861 - }, - { - "epoch": 0.06630259210829936, - "learning_rate": 0.002967577147265937, - "loss": 1.3266, - "step": 862 - }, - { - "epoch": 0.06637950926851781, - "learning_rate": 0.0029675021495706986, - "loss": 1.2988, - "step": 863 - }, - { - "epoch": 0.06645642642873625, - "learning_rate": 0.00296742706618648, - "loss": 0.8827, - "step": 864 - }, - { - "epoch": 0.0665333435889547, - "learning_rate": 0.002967351897117665, - "loss": 1.3811, - "step": 865 - }, - { - "epoch": 0.06661026074917314, - "learning_rate": 0.0029672766423686444, - "loss": 1.1978, - "step": 866 - }, - { - "epoch": 0.06668717790939159, - "learning_rate": 0.002967201301943811, - "loss": 1.3499, - "step": 867 - }, - { - "epoch": 0.06676409506961004, - "learning_rate": 0.0029671258758475643, - "loss": 1.174, - "step": 868 - }, - { - "epoch": 0.06684101222982848, - "learning_rate": 0.0029670503640843093, - "loss": 1.0905, - "step": 869 - }, - { - "epoch": 0.06691792939004693, - "learning_rate": 0.002966974766658454, - "loss": 1.262, - "step": 870 - }, - { - "epoch": 0.06699484655026536, - "learning_rate": 0.0029668990835744137, - "loss": 0.9186, - "step": 871 - }, - { - "epoch": 0.0670717637104838, - "learning_rate": 0.0029668233148366072, - "loss": 0.9934, - "step": 872 - }, - { - "epoch": 0.06714868087070225, - "learning_rate": 0.002966747460449458, - "loss": 1.2823, - "step": 873 - }, - { - "epoch": 0.0672255980309207, - "learning_rate": 0.002966671520417397, - "loss": 1.0951, - "step": 874 - }, - { - "epoch": 0.06730251519113914, - "learning_rate": 0.0029665954947448565, - "loss": 1.2555, - "step": 875 - }, - { - "epoch": 0.06737943235135758, - "learning_rate": 0.002966519383436277, - "loss": 1.0027, - "step": 876 - }, - { - "epoch": 0.06745634951157603, - "learning_rate": 0.0029664431864961024, - "loss": 1.2581, - "step": 877 - }, - { - "epoch": 0.06753326667179448, - "learning_rate": 0.0029663669039287814, - "loss": 1.1946, - "step": 878 - }, - { - "epoch": 0.06761018383201292, - "learning_rate": 0.0029662905357387692, - "loss": 1.1934, - "step": 879 - }, - { - "epoch": 0.06768710099223137, - "learning_rate": 0.002966214081930524, - "loss": 1.2912, - "step": 880 - }, - { - "epoch": 0.06776401815244981, - "learning_rate": 0.002966137542508511, - "loss": 1.4963, - "step": 881 - }, - { - "epoch": 0.06784093531266826, - "learning_rate": 0.0029660609174771983, - "loss": 1.047, - "step": 882 - }, - { - "epoch": 0.0679178524728867, - "learning_rate": 0.0029659842068410617, - "loss": 1.3658, - "step": 883 - }, - { - "epoch": 0.06799476963310515, - "learning_rate": 0.0029659074106045784, - "loss": 1.5428, - "step": 884 - }, - { - "epoch": 0.0680716867933236, - "learning_rate": 0.0029658305287722344, - "loss": 1.0194, - "step": 885 - }, - { - "epoch": 0.06814860395354204, - "learning_rate": 0.002965753561348518, - "loss": 1.2364, - "step": 886 - }, - { - "epoch": 0.06822552111376048, - "learning_rate": 0.0029656765083379234, - "loss": 1.0498, - "step": 887 - }, - { - "epoch": 0.06830243827397893, - "learning_rate": 0.00296559936974495, - "loss": 1.0967, - "step": 888 - }, - { - "epoch": 0.06837935543419738, - "learning_rate": 0.002965522145574102, - "loss": 1.9147, - "step": 889 - }, - { - "epoch": 0.06845627259441582, - "learning_rate": 0.0029654448358298886, - "loss": 1.3101, - "step": 890 - }, - { - "epoch": 0.06853318975463425, - "learning_rate": 0.0029653674405168244, - "loss": 0.8805, - "step": 891 - }, - { - "epoch": 0.0686101069148527, - "learning_rate": 0.0029652899596394277, - "loss": 1.0904, - "step": 892 - }, - { - "epoch": 0.06868702407507114, - "learning_rate": 0.0029652123932022234, - "loss": 0.8722, - "step": 893 - }, - { - "epoch": 0.06876394123528959, - "learning_rate": 0.00296513474120974, - "loss": 1.304, - "step": 894 - }, - { - "epoch": 0.06884085839550803, - "learning_rate": 0.0029650570036665125, - "loss": 0.849, - "step": 895 - }, - { - "epoch": 0.06891777555572648, - "learning_rate": 0.0029649791805770797, - "loss": 0.9992, - "step": 896 - }, - { - "epoch": 0.06899469271594492, - "learning_rate": 0.002964901271945986, - "loss": 1.0795, - "step": 897 - }, - { - "epoch": 0.06907160987616337, - "learning_rate": 0.00296482327777778, - "loss": 1.304, - "step": 898 - }, - { - "epoch": 0.06914852703638182, - "learning_rate": 0.0029647451980770166, - "loss": 1.0488, - "step": 899 - }, - { - "epoch": 0.06922544419660026, - "learning_rate": 0.0029646670328482543, - "loss": 1.2691, - "step": 900 - }, - { - "epoch": 0.0693023613568187, - "learning_rate": 0.0029645887820960574, - "loss": 1.3094, - "step": 901 - }, - { - "epoch": 0.06937927851703715, - "learning_rate": 0.002964510445824995, - "loss": 1.2892, - "step": 902 - }, - { - "epoch": 0.0694561956772556, - "learning_rate": 0.002964432024039642, - "loss": 1.2423, - "step": 903 - }, - { - "epoch": 0.06953311283747404, - "learning_rate": 0.0029643535167445767, - "loss": 1.2492, - "step": 904 - }, - { - "epoch": 0.06961002999769249, - "learning_rate": 0.002964274923944383, - "loss": 1.3041, - "step": 905 - }, - { - "epoch": 0.06968694715791093, - "learning_rate": 0.002964196245643651, - "loss": 1.0715, - "step": 906 - }, - { - "epoch": 0.06976386431812938, - "learning_rate": 0.0029641174818469743, - "loss": 1.0728, - "step": 907 - }, - { - "epoch": 0.06984078147834782, - "learning_rate": 0.0029640386325589514, - "loss": 1.042, - "step": 908 - }, - { - "epoch": 0.06991769863856627, - "learning_rate": 0.0029639596977841876, - "loss": 1.4161, - "step": 909 - }, - { - "epoch": 0.06999461579878472, - "learning_rate": 0.0029638806775272907, - "loss": 1.4001, - "step": 910 - }, - { - "epoch": 0.07007153295900316, - "learning_rate": 0.0029638015717928755, - "loss": 1.355, - "step": 911 - }, - { - "epoch": 0.07014845011922159, - "learning_rate": 0.0029637223805855616, - "loss": 1.1305, - "step": 912 - }, - { - "epoch": 0.07022536727944004, - "learning_rate": 0.002963643103909972, - "loss": 1.1159, - "step": 913 - }, - { - "epoch": 0.07030228443965848, - "learning_rate": 0.002963563741770736, - "loss": 1.3449, - "step": 914 - }, - { - "epoch": 0.07037920159987693, - "learning_rate": 0.002963484294172488, - "loss": 1.338, - "step": 915 - }, - { - "epoch": 0.07045611876009537, - "learning_rate": 0.002963404761119867, - "loss": 1.2533, - "step": 916 - }, - { - "epoch": 0.07053303592031382, - "learning_rate": 0.002963325142617517, - "loss": 1.0319, - "step": 917 - }, - { - "epoch": 0.07060995308053226, - "learning_rate": 0.0029632454386700866, - "loss": 1.2467, - "step": 918 - }, - { - "epoch": 0.07068687024075071, - "learning_rate": 0.0029631656492822302, - "loss": 1.2088, - "step": 919 - }, - { - "epoch": 0.07076378740096916, - "learning_rate": 0.0029630857744586065, - "loss": 1.0837, - "step": 920 - }, - { - "epoch": 0.0708407045611876, - "learning_rate": 0.00296300581420388, - "loss": 1.2227, - "step": 921 - }, - { - "epoch": 0.07091762172140605, - "learning_rate": 0.002962925768522719, - "loss": 0.9956, - "step": 922 - }, - { - "epoch": 0.07099453888162449, - "learning_rate": 0.002962845637419798, - "loss": 1.1343, - "step": 923 - }, - { - "epoch": 0.07107145604184294, - "learning_rate": 0.0029627654208997955, - "loss": 1.0709, - "step": 924 - }, - { - "epoch": 0.07114837320206138, - "learning_rate": 0.0029626851189673955, - "loss": 1.1055, - "step": 925 - }, - { - "epoch": 0.07122529036227983, - "learning_rate": 0.002962604731627287, - "loss": 1.0965, - "step": 926 - }, - { - "epoch": 0.07130220752249827, - "learning_rate": 0.0029625242588841643, - "loss": 0.9764, - "step": 927 - }, - { - "epoch": 0.07137912468271672, - "learning_rate": 0.002962443700742726, - "loss": 1.503, - "step": 928 - }, - { - "epoch": 0.07145604184293516, - "learning_rate": 0.002962363057207675, - "loss": 1.1844, - "step": 929 - }, - { - "epoch": 0.07153295900315361, - "learning_rate": 0.002962282328283722, - "loss": 1.3772, - "step": 930 - }, - { - "epoch": 0.07160987616337205, - "learning_rate": 0.0029622015139755794, - "loss": 1.0582, - "step": 931 - }, - { - "epoch": 0.07168679332359049, - "learning_rate": 0.002962120614287967, - "loss": 1.126, - "step": 932 - }, - { - "epoch": 0.07176371048380893, - "learning_rate": 0.0029620396292256075, - "loss": 1.2031, - "step": 933 - }, - { - "epoch": 0.07184062764402738, - "learning_rate": 0.0029619585587932306, - "loss": 1.3518, - "step": 934 - }, - { - "epoch": 0.07191754480424582, - "learning_rate": 0.00296187740299557, - "loss": 0.9528, - "step": 935 - }, - { - "epoch": 0.07199446196446427, - "learning_rate": 0.0029617961618373647, - "loss": 1.2507, - "step": 936 - }, - { - "epoch": 0.07207137912468271, - "learning_rate": 0.002961714835323357, - "loss": 1.2534, - "step": 937 - }, - { - "epoch": 0.07214829628490116, - "learning_rate": 0.0029616334234582975, - "loss": 1.1605, - "step": 938 - }, - { - "epoch": 0.0722252134451196, - "learning_rate": 0.0029615519262469387, - "loss": 1.1648, - "step": 939 - }, - { - "epoch": 0.07230213060533805, - "learning_rate": 0.00296147034369404, - "loss": 1.0868, - "step": 940 - }, - { - "epoch": 0.0723790477655565, - "learning_rate": 0.0029613886758043653, - "loss": 1.1404, - "step": 941 - }, - { - "epoch": 0.07245596492577494, - "learning_rate": 0.002961306922582682, - "loss": 1.1598, - "step": 942 - }, - { - "epoch": 0.07253288208599339, - "learning_rate": 0.002961225084033765, - "loss": 0.9223, - "step": 943 - }, - { - "epoch": 0.07260979924621183, - "learning_rate": 0.002961143160162392, - "loss": 1.2376, - "step": 944 - }, - { - "epoch": 0.07268671640643028, - "learning_rate": 0.002961061150973347, - "loss": 1.1766, - "step": 945 - }, - { - "epoch": 0.07276363356664872, - "learning_rate": 0.0029609790564714194, - "loss": 1.0539, - "step": 946 - }, - { - "epoch": 0.07284055072686717, - "learning_rate": 0.002960896876661402, - "loss": 1.3398, - "step": 947 - }, - { - "epoch": 0.07291746788708561, - "learning_rate": 0.002960814611548093, - "loss": 1.0852, - "step": 948 - }, - { - "epoch": 0.07299438504730406, - "learning_rate": 0.0029607322611362973, - "loss": 1.0945, - "step": 949 - }, - { - "epoch": 0.0730713022075225, - "learning_rate": 0.0029606498254308214, - "loss": 1.0968, - "step": 950 - }, - { - "epoch": 0.07314821936774095, - "learning_rate": 0.0029605673044364807, - "loss": 0.8015, - "step": 951 - }, - { - "epoch": 0.0732251365279594, - "learning_rate": 0.0029604846981580927, - "loss": 1.1704, - "step": 952 - }, - { - "epoch": 0.07330205368817783, - "learning_rate": 0.002960402006600481, - "loss": 1.2461, - "step": 953 - }, - { - "epoch": 0.07337897084839627, - "learning_rate": 0.0029603192297684745, - "loss": 1.2593, - "step": 954 - }, - { - "epoch": 0.07345588800861472, - "learning_rate": 0.002960236367666906, - "loss": 0.9799, - "step": 955 - }, - { - "epoch": 0.07353280516883316, - "learning_rate": 0.0029601534203006143, - "loss": 1.098, - "step": 956 - }, - { - "epoch": 0.07360972232905161, - "learning_rate": 0.002960070387674442, - "loss": 1.2879, - "step": 957 - }, - { - "epoch": 0.07368663948927005, - "learning_rate": 0.0029599872697932393, - "loss": 1.3842, - "step": 958 - }, - { - "epoch": 0.0737635566494885, - "learning_rate": 0.002959904066661857, - "loss": 1.1728, - "step": 959 - }, - { - "epoch": 0.07384047380970694, - "learning_rate": 0.0029598207782851557, - "loss": 1.1599, - "step": 960 - }, - { - "epoch": 0.07391739096992539, - "learning_rate": 0.0029597374046679975, - "loss": 1.1854, - "step": 961 - }, - { - "epoch": 0.07399430813014383, - "learning_rate": 0.0029596539458152512, - "loss": 1.1199, - "step": 962 - }, - { - "epoch": 0.07407122529036228, - "learning_rate": 0.0029595704017317897, - "loss": 1.3585, - "step": 963 - }, - { - "epoch": 0.07414814245058073, - "learning_rate": 0.002959486772422491, - "loss": 1.2377, - "step": 964 - }, - { - "epoch": 0.07422505961079917, - "learning_rate": 0.0029594030578922385, - "loss": 1.1028, - "step": 965 - }, - { - "epoch": 0.07430197677101762, - "learning_rate": 0.002959319258145921, - "loss": 1.1915, - "step": 966 - }, - { - "epoch": 0.07437889393123606, - "learning_rate": 0.002959235373188431, - "loss": 0.9092, - "step": 967 - }, - { - "epoch": 0.07445581109145451, - "learning_rate": 0.0029591514030246667, - "loss": 0.9983, - "step": 968 - }, - { - "epoch": 0.07453272825167295, - "learning_rate": 0.002959067347659531, - "loss": 1.3972, - "step": 969 - }, - { - "epoch": 0.0746096454118914, - "learning_rate": 0.002958983207097933, - "loss": 1.1719, - "step": 970 - }, - { - "epoch": 0.07468656257210984, - "learning_rate": 0.0029588989813447843, - "loss": 1.009, - "step": 971 - }, - { - "epoch": 0.07476347973232829, - "learning_rate": 0.0029588146704050042, - "loss": 1.3163, - "step": 972 - }, - { - "epoch": 0.07484039689254672, - "learning_rate": 0.002958730274283515, - "loss": 1.0569, - "step": 973 - }, - { - "epoch": 0.07491731405276517, - "learning_rate": 0.0029586457929852445, - "loss": 1.647, - "step": 974 - }, - { - "epoch": 0.07499423121298361, - "learning_rate": 0.0029585612265151263, - "loss": 1.0079, - "step": 975 - }, - { - "epoch": 0.07507114837320206, - "learning_rate": 0.0029584765748780976, - "loss": 1.3997, - "step": 976 - }, - { - "epoch": 0.0751480655334205, - "learning_rate": 0.002958391838079102, - "loss": 0.9782, - "step": 977 - }, - { - "epoch": 0.07522498269363895, - "learning_rate": 0.0029583070161230875, - "loss": 0.8678, - "step": 978 - }, - { - "epoch": 0.07530189985385739, - "learning_rate": 0.002958222109015006, - "loss": 1.0651, - "step": 979 - }, - { - "epoch": 0.07537881701407584, - "learning_rate": 0.002958137116759816, - "loss": 1.3772, - "step": 980 - }, - { - "epoch": 0.07545573417429428, - "learning_rate": 0.00295805203936248, - "loss": 1.3194, - "step": 981 - }, - { - "epoch": 0.07553265133451273, - "learning_rate": 0.0029579668768279664, - "loss": 1.5207, - "step": 982 - }, - { - "epoch": 0.07560956849473117, - "learning_rate": 0.0029578816291612464, - "loss": 1.2981, - "step": 983 - }, - { - "epoch": 0.07568648565494962, - "learning_rate": 0.0029577962963672996, - "loss": 1.4814, - "step": 984 - }, - { - "epoch": 0.07576340281516807, - "learning_rate": 0.002957710878451107, - "loss": 1.1017, - "step": 985 - }, - { - "epoch": 0.07584031997538651, - "learning_rate": 0.0029576253754176576, - "loss": 1.1641, - "step": 986 - }, - { - "epoch": 0.07591723713560496, - "learning_rate": 0.0029575397872719433, - "loss": 1.505, - "step": 987 - }, - { - "epoch": 0.0759941542958234, - "learning_rate": 0.002957454114018962, - "loss": 1.3036, - "step": 988 - }, - { - "epoch": 0.07607107145604185, - "learning_rate": 0.0029573683556637156, - "loss": 0.8347, - "step": 989 - }, - { - "epoch": 0.07614798861626029, - "learning_rate": 0.0029572825122112126, - "loss": 1.1208, - "step": 990 - }, - { - "epoch": 0.07622490577647874, - "learning_rate": 0.0029571965836664646, - "loss": 0.9651, - "step": 991 - }, - { - "epoch": 0.07630182293669718, - "learning_rate": 0.00295711057003449, - "loss": 1.2866, - "step": 992 - }, - { - "epoch": 0.07637874009691561, - "learning_rate": 0.0029570244713203095, - "loss": 1.2155, - "step": 993 - }, - { - "epoch": 0.07645565725713406, - "learning_rate": 0.0029569382875289526, - "loss": 1.4266, - "step": 994 - }, - { - "epoch": 0.0765325744173525, - "learning_rate": 0.0029568520186654505, - "loss": 1.0505, - "step": 995 - }, - { - "epoch": 0.07660949157757095, - "learning_rate": 0.0029567656647348406, - "loss": 1.3778, - "step": 996 - }, - { - "epoch": 0.0766864087377894, - "learning_rate": 0.0029566792257421657, - "loss": 1.2593, - "step": 997 - }, - { - "epoch": 0.07676332589800784, - "learning_rate": 0.002956592701692472, - "loss": 1.1632, - "step": 998 - }, - { - "epoch": 0.07684024305822629, - "learning_rate": 0.002956506092590813, - "loss": 1.1734, - "step": 999 - }, - { - "epoch": 0.07691716021844473, - "learning_rate": 0.002956419398442245, - "loss": 1.1082, - "step": 1000 - }, - { - "epoch": 0.07699407737866318, - "learning_rate": 0.0029563326192518307, - "loss": 1.7411, - "step": 1001 - }, - { - "epoch": 0.07707099453888162, - "learning_rate": 0.0029562457550246367, - "loss": 1.5031, - "step": 1002 - }, - { - "epoch": 0.07714791169910007, - "learning_rate": 0.0029561588057657358, - "loss": 0.7835, - "step": 1003 - }, - { - "epoch": 0.07722482885931851, - "learning_rate": 0.0029560717714802046, - "loss": 1.5095, - "step": 1004 - }, - { - "epoch": 0.07730174601953696, - "learning_rate": 0.002955984652173125, - "loss": 1.4018, - "step": 1005 - }, - { - "epoch": 0.0773786631797554, - "learning_rate": 0.002955897447849584, - "loss": 1.3244, - "step": 1006 - }, - { - "epoch": 0.07745558033997385, - "learning_rate": 0.0029558101585146746, - "loss": 1.0778, - "step": 1007 - }, - { - "epoch": 0.0775324975001923, - "learning_rate": 0.002955722784173492, - "loss": 1.5623, - "step": 1008 - }, - { - "epoch": 0.07760941466041074, - "learning_rate": 0.002955635324831139, - "loss": 0.9722, - "step": 1009 - }, - { - "epoch": 0.07768633182062919, - "learning_rate": 0.0029555477804927223, - "loss": 0.7629, - "step": 1010 - }, - { - "epoch": 0.07776324898084763, - "learning_rate": 0.002955460151163354, - "loss": 1.4302, - "step": 1011 - }, - { - "epoch": 0.07784016614106608, - "learning_rate": 0.002955372436848151, - "loss": 1.8574, - "step": 1012 - }, - { - "epoch": 0.07791708330128452, - "learning_rate": 0.002955284637552234, - "loss": 1.0181, - "step": 1013 - }, - { - "epoch": 0.07799400046150295, - "learning_rate": 0.00295519675328073, - "loss": 1.1129, - "step": 1014 - }, - { - "epoch": 0.0780709176217214, - "learning_rate": 0.002955108784038772, - "loss": 1.1438, - "step": 1015 - }, - { - "epoch": 0.07814783478193985, - "learning_rate": 0.002955020729831495, - "loss": 1.0609, - "step": 1016 - }, - { - "epoch": 0.07822475194215829, - "learning_rate": 0.0029549325906640415, - "loss": 1.307, - "step": 1017 - }, - { - "epoch": 0.07830166910237674, - "learning_rate": 0.002954844366541558, - "loss": 0.9569, - "step": 1018 - }, - { - "epoch": 0.07837858626259518, - "learning_rate": 0.002954756057469195, - "loss": 0.955, - "step": 1019 - }, - { - "epoch": 0.07845550342281363, - "learning_rate": 0.00295466766345211, - "loss": 1.2528, - "step": 1020 - }, - { - "epoch": 0.07853242058303207, - "learning_rate": 0.002954579184495464, - "loss": 1.4933, - "step": 1021 - }, - { - "epoch": 0.07860933774325052, - "learning_rate": 0.002954490620604424, - "loss": 1.0781, - "step": 1022 - }, - { - "epoch": 0.07868625490346896, - "learning_rate": 0.0029544019717841602, - "loss": 0.9809, - "step": 1023 - }, - { - "epoch": 0.07876317206368741, - "learning_rate": 0.00295431323803985, - "loss": 0.8879, - "step": 1024 - }, - { - "epoch": 0.07884008922390585, - "learning_rate": 0.0029542244193766737, - "loss": 0.7664, - "step": 1025 - }, - { - "epoch": 0.0789170063841243, - "learning_rate": 0.0029541355157998187, - "loss": 0.8898, - "step": 1026 - }, - { - "epoch": 0.07899392354434275, - "learning_rate": 0.002954046527314475, - "loss": 1.248, - "step": 1027 - }, - { - "epoch": 0.07907084070456119, - "learning_rate": 0.0029539574539258393, - "loss": 1.0316, - "step": 1028 - }, - { - "epoch": 0.07914775786477964, - "learning_rate": 0.0029538682956391127, - "loss": 0.968, - "step": 1029 - }, - { - "epoch": 0.07922467502499808, - "learning_rate": 0.0029537790524595004, - "loss": 1.1932, - "step": 1030 - }, - { - "epoch": 0.07930159218521653, - "learning_rate": 0.0029536897243922153, - "loss": 1.1205, - "step": 1031 - }, - { - "epoch": 0.07937850934543497, - "learning_rate": 0.0029536003114424714, - "loss": 1.2685, - "step": 1032 - }, - { - "epoch": 0.07945542650565342, - "learning_rate": 0.0029535108136154904, - "loss": 1.2933, - "step": 1033 - }, - { - "epoch": 0.07953234366587185, - "learning_rate": 0.002953421230916499, - "loss": 1.051, - "step": 1034 - }, - { - "epoch": 0.0796092608260903, - "learning_rate": 0.0029533315633507263, - "loss": 1.0509, - "step": 1035 - }, - { - "epoch": 0.07968617798630874, - "learning_rate": 0.0029532418109234093, - "loss": 0.8834, - "step": 1036 - }, - { - "epoch": 0.07976309514652719, - "learning_rate": 0.0029531519736397884, - "loss": 1.2259, - "step": 1037 - }, - { - "epoch": 0.07984001230674563, - "learning_rate": 0.0029530620515051094, - "loss": 1.0971, - "step": 1038 - }, - { - "epoch": 0.07991692946696408, - "learning_rate": 0.0029529720445246226, - "loss": 1.0709, - "step": 1039 - }, - { - "epoch": 0.07999384662718252, - "learning_rate": 0.002952881952703584, - "loss": 1.0895, - "step": 1040 - }, - { - "epoch": 0.08007076378740097, - "learning_rate": 0.0029527917760472544, - "loss": 1.3178, - "step": 1041 - }, - { - "epoch": 0.08014768094761941, - "learning_rate": 0.0029527015145608985, - "loss": 1.3647, - "step": 1042 - }, - { - "epoch": 0.08022459810783786, - "learning_rate": 0.0029526111682497873, - "loss": 0.541, - "step": 1043 - }, - { - "epoch": 0.0803015152680563, - "learning_rate": 0.002952520737119196, - "loss": 1.0206, - "step": 1044 - }, - { - "epoch": 0.08037843242827475, - "learning_rate": 0.0029524302211744054, - "loss": 1.4368, - "step": 1045 - }, - { - "epoch": 0.0804553495884932, - "learning_rate": 0.0029523396204207, - "loss": 1.3588, - "step": 1046 - }, - { - "epoch": 0.08053226674871164, - "learning_rate": 0.0029522489348633714, - "loss": 1.2697, - "step": 1047 - }, - { - "epoch": 0.08060918390893008, - "learning_rate": 0.0029521581645077133, - "loss": 1.1707, - "step": 1048 - }, - { - "epoch": 0.08068610106914853, - "learning_rate": 0.0029520673093590267, - "loss": 1.2145, - "step": 1049 - }, - { - "epoch": 0.08076301822936698, - "learning_rate": 0.0029519763694226164, - "loss": 1.1222, - "step": 1050 - }, - { - "epoch": 0.08083993538958542, - "learning_rate": 0.0029518853447037933, - "loss": 1.0963, - "step": 1051 - }, - { - "epoch": 0.08091685254980387, - "learning_rate": 0.002951794235207871, - "loss": 1.1351, - "step": 1052 - }, - { - "epoch": 0.08099376971002231, - "learning_rate": 0.002951703040940171, - "loss": 1.0011, - "step": 1053 - }, - { - "epoch": 0.08107068687024076, - "learning_rate": 0.0029516117619060173, - "loss": 1.1586, - "step": 1054 - }, - { - "epoch": 0.08114760403045919, - "learning_rate": 0.0029515203981107397, - "loss": 0.9994, - "step": 1055 - }, - { - "epoch": 0.08122452119067763, - "learning_rate": 0.002951428949559674, - "loss": 1.0701, - "step": 1056 - }, - { - "epoch": 0.08130143835089608, - "learning_rate": 0.0029513374162581586, - "loss": 0.9321, - "step": 1057 - }, - { - "epoch": 0.08137835551111453, - "learning_rate": 0.0029512457982115393, - "loss": 1.3266, - "step": 1058 - }, - { - "epoch": 0.08145527267133297, - "learning_rate": 0.0029511540954251653, - "loss": 1.2703, - "step": 1059 - }, - { - "epoch": 0.08153218983155142, - "learning_rate": 0.002951062307904391, - "loss": 1.1536, - "step": 1060 - }, - { - "epoch": 0.08160910699176986, - "learning_rate": 0.002950970435654577, - "loss": 1.0513, - "step": 1061 - }, - { - "epoch": 0.0816860241519883, - "learning_rate": 0.0029508784786810866, - "loss": 1.0564, - "step": 1062 - }, - { - "epoch": 0.08176294131220675, - "learning_rate": 0.0029507864369892898, - "loss": 1.1673, - "step": 1063 - }, - { - "epoch": 0.0818398584724252, - "learning_rate": 0.002950694310584561, - "loss": 1.2823, - "step": 1064 - }, - { - "epoch": 0.08191677563264364, - "learning_rate": 0.002950602099472279, - "loss": 1.1043, - "step": 1065 - }, - { - "epoch": 0.08199369279286209, - "learning_rate": 0.002950509803657829, - "loss": 0.9982, - "step": 1066 - }, - { - "epoch": 0.08207060995308053, - "learning_rate": 0.0029504174231466005, - "loss": 0.8833, - "step": 1067 - }, - { - "epoch": 0.08214752711329898, - "learning_rate": 0.0029503249579439866, - "loss": 1.4851, - "step": 1068 - }, - { - "epoch": 0.08222444427351742, - "learning_rate": 0.002950232408055387, - "loss": 1.4989, - "step": 1069 - }, - { - "epoch": 0.08230136143373587, - "learning_rate": 0.0029501397734862053, - "loss": 1.3504, - "step": 1070 - }, - { - "epoch": 0.08237827859395432, - "learning_rate": 0.0029500470542418514, - "loss": 1.2241, - "step": 1071 - }, - { - "epoch": 0.08245519575417276, - "learning_rate": 0.0029499542503277384, - "loss": 1.1753, - "step": 1072 - }, - { - "epoch": 0.0825321129143912, - "learning_rate": 0.0029498613617492857, - "loss": 1.2218, - "step": 1073 - }, - { - "epoch": 0.08260903007460965, - "learning_rate": 0.0029497683885119173, - "loss": 0.9293, - "step": 1074 - }, - { - "epoch": 0.08268594723482808, - "learning_rate": 0.0029496753306210615, - "loss": 1.1212, - "step": 1075 - }, - { - "epoch": 0.08276286439504653, - "learning_rate": 0.002949582188082153, - "loss": 1.2674, - "step": 1076 - }, - { - "epoch": 0.08283978155526497, - "learning_rate": 0.002949488960900629, - "loss": 1.1751, - "step": 1077 - }, - { - "epoch": 0.08291669871548342, - "learning_rate": 0.0029493956490819345, - "loss": 1.1667, - "step": 1078 - }, - { - "epoch": 0.08299361587570186, - "learning_rate": 0.002949302252631517, - "loss": 1.0874, - "step": 1079 - }, - { - "epoch": 0.08307053303592031, - "learning_rate": 0.002949208771554831, - "loss": 1.1288, - "step": 1080 - }, - { - "epoch": 0.08314745019613876, - "learning_rate": 0.002949115205857334, - "loss": 0.9737, - "step": 1081 - }, - { - "epoch": 0.0832243673563572, - "learning_rate": 0.00294902155554449, - "loss": 1.3464, - "step": 1082 - }, - { - "epoch": 0.08330128451657565, - "learning_rate": 0.002948927820621768, - "loss": 1.1628, - "step": 1083 - }, - { - "epoch": 0.08337820167679409, - "learning_rate": 0.00294883400109464, - "loss": 1.159, - "step": 1084 - }, - { - "epoch": 0.08345511883701254, - "learning_rate": 0.0029487400969685847, - "loss": 1.2899, - "step": 1085 - }, - { - "epoch": 0.08353203599723098, - "learning_rate": 0.0029486461082490856, - "loss": 1.4605, - "step": 1086 - }, - { - "epoch": 0.08360895315744943, - "learning_rate": 0.00294855203494163, - "loss": 1.0857, - "step": 1087 - }, - { - "epoch": 0.08368587031766787, - "learning_rate": 0.0029484578770517115, - "loss": 1.1297, - "step": 1088 - }, - { - "epoch": 0.08376278747788632, - "learning_rate": 0.0029483636345848285, - "loss": 1.0929, - "step": 1089 - }, - { - "epoch": 0.08383970463810476, - "learning_rate": 0.002948269307546483, - "loss": 1.1001, - "step": 1090 - }, - { - "epoch": 0.08391662179832321, - "learning_rate": 0.0029481748959421833, - "loss": 1.2571, - "step": 1091 - }, - { - "epoch": 0.08399353895854166, - "learning_rate": 0.0029480803997774425, - "loss": 1.1372, - "step": 1092 - }, - { - "epoch": 0.0840704561187601, - "learning_rate": 0.0029479858190577775, - "loss": 1.5051, - "step": 1093 - }, - { - "epoch": 0.08414737327897855, - "learning_rate": 0.002947891153788712, - "loss": 1.0785, - "step": 1094 - }, - { - "epoch": 0.08422429043919699, - "learning_rate": 0.002947796403975773, - "loss": 1.3196, - "step": 1095 - }, - { - "epoch": 0.08430120759941542, - "learning_rate": 0.002947701569624493, - "loss": 0.9348, - "step": 1096 - }, - { - "epoch": 0.08437812475963387, - "learning_rate": 0.0029476066507404092, - "loss": 1.3281, - "step": 1097 - }, - { - "epoch": 0.08445504191985231, - "learning_rate": 0.002947511647329065, - "loss": 1.1641, - "step": 1098 - }, - { - "epoch": 0.08453195908007076, - "learning_rate": 0.0029474165593960065, - "loss": 1.2422, - "step": 1099 - }, - { - "epoch": 0.0846088762402892, - "learning_rate": 0.0029473213869467873, - "loss": 0.9473, - "step": 1100 - }, - { - "epoch": 0.08468579340050765, - "learning_rate": 0.0029472261299869633, - "loss": 1.2083, - "step": 1101 - }, - { - "epoch": 0.0847627105607261, - "learning_rate": 0.002947130788522098, - "loss": 1.1972, - "step": 1102 - }, - { - "epoch": 0.08483962772094454, - "learning_rate": 0.0029470353625577574, - "loss": 1.3026, - "step": 1103 - }, - { - "epoch": 0.08491654488116299, - "learning_rate": 0.002946939852099514, - "loss": 1.2338, - "step": 1104 - }, - { - "epoch": 0.08499346204138143, - "learning_rate": 0.002946844257152945, - "loss": 1.2036, - "step": 1105 - }, - { - "epoch": 0.08507037920159988, - "learning_rate": 0.0029467485777236314, - "loss": 0.9109, - "step": 1106 - }, - { - "epoch": 0.08514729636181832, - "learning_rate": 0.002946652813817161, - "loss": 1.0046, - "step": 1107 - }, - { - "epoch": 0.08522421352203677, - "learning_rate": 0.0029465569654391244, - "loss": 1.2388, - "step": 1108 - }, - { - "epoch": 0.08530113068225521, - "learning_rate": 0.00294646103259512, - "loss": 1.1558, - "step": 1109 - }, - { - "epoch": 0.08537804784247366, - "learning_rate": 0.0029463650152907477, - "loss": 0.8834, - "step": 1110 - }, - { - "epoch": 0.0854549650026921, - "learning_rate": 0.002946268913531615, - "loss": 1.1247, - "step": 1111 - }, - { - "epoch": 0.08553188216291055, - "learning_rate": 0.0029461727273233334, - "loss": 0.9333, - "step": 1112 - }, - { - "epoch": 0.085608799323129, - "learning_rate": 0.0029460764566715193, - "loss": 1.2425, - "step": 1113 - }, - { - "epoch": 0.08568571648334744, - "learning_rate": 0.0029459801015817933, - "loss": 0.9309, - "step": 1114 - }, - { - "epoch": 0.08576263364356589, - "learning_rate": 0.0029458836620597823, - "loss": 1.3583, - "step": 1115 - }, - { - "epoch": 0.08583955080378432, - "learning_rate": 0.0029457871381111172, - "loss": 0.9936, - "step": 1116 - }, - { - "epoch": 0.08591646796400276, - "learning_rate": 0.002945690529741435, - "loss": 1.1197, - "step": 1117 - }, - { - "epoch": 0.08599338512422121, - "learning_rate": 0.002945593836956376, - "loss": 0.935, - "step": 1118 - }, - { - "epoch": 0.08607030228443965, - "learning_rate": 0.002945497059761586, - "loss": 1.1282, - "step": 1119 - }, - { - "epoch": 0.0861472194446581, - "learning_rate": 0.002945400198162716, - "loss": 1.2256, - "step": 1120 - }, - { - "epoch": 0.08622413660487654, - "learning_rate": 0.0029453032521654225, - "loss": 1.156, - "step": 1121 - }, - { - "epoch": 0.08630105376509499, - "learning_rate": 0.0029452062217753656, - "loss": 1.3209, - "step": 1122 - }, - { - "epoch": 0.08637797092531344, - "learning_rate": 0.0029451091069982115, - "loss": 1.1248, - "step": 1123 - }, - { - "epoch": 0.08645488808553188, - "learning_rate": 0.0029450119078396303, - "loss": 1.3843, - "step": 1124 - }, - { - "epoch": 0.08653180524575033, - "learning_rate": 0.002944914624305298, - "loss": 1.3654, - "step": 1125 - }, - { - "epoch": 0.08660872240596877, - "learning_rate": 0.0029448172564008946, - "loss": 1.0795, - "step": 1126 - }, - { - "epoch": 0.08668563956618722, - "learning_rate": 0.0029447198041321065, - "loss": 1.3927, - "step": 1127 - }, - { - "epoch": 0.08676255672640566, - "learning_rate": 0.002944622267504623, - "loss": 0.9433, - "step": 1128 - }, - { - "epoch": 0.08683947388662411, - "learning_rate": 0.00294452464652414, - "loss": 0.9105, - "step": 1129 - }, - { - "epoch": 0.08691639104684255, - "learning_rate": 0.0029444269411963565, - "loss": 1.0921, - "step": 1130 - }, - { - "epoch": 0.086993308207061, - "learning_rate": 0.0029443291515269796, - "loss": 1.4, - "step": 1131 - }, - { - "epoch": 0.08707022536727944, - "learning_rate": 0.002944231277521718, - "loss": 1.5666, - "step": 1132 - }, - { - "epoch": 0.08714714252749789, - "learning_rate": 0.002944133319186287, - "loss": 1.0999, - "step": 1133 - }, - { - "epoch": 0.08722405968771633, - "learning_rate": 0.002944035276526406, - "loss": 1.1352, - "step": 1134 - }, - { - "epoch": 0.08730097684793478, - "learning_rate": 0.002943937149547801, - "loss": 1.3064, - "step": 1135 - }, - { - "epoch": 0.08737789400815323, - "learning_rate": 0.0029438389382562004, - "loss": 1.0006, - "step": 1136 - }, - { - "epoch": 0.08745481116837166, - "learning_rate": 0.00294374064265734, - "loss": 1.2764, - "step": 1137 - }, - { - "epoch": 0.0875317283285901, - "learning_rate": 0.0029436422627569583, - "loss": 1.3404, - "step": 1138 - }, - { - "epoch": 0.08760864548880855, - "learning_rate": 0.002943543798560801, - "loss": 1.3129, - "step": 1139 - }, - { - "epoch": 0.087685562649027, - "learning_rate": 0.0029434452500746162, - "loss": 1.1383, - "step": 1140 - }, - { - "epoch": 0.08776247980924544, - "learning_rate": 0.002943346617304159, - "loss": 1.2198, - "step": 1141 - }, - { - "epoch": 0.08783939696946388, - "learning_rate": 0.0029432479002551894, - "loss": 1.0944, - "step": 1142 - }, - { - "epoch": 0.08791631412968233, - "learning_rate": 0.00294314909893347, - "loss": 1.2941, - "step": 1143 - }, - { - "epoch": 0.08799323128990078, - "learning_rate": 0.0029430502133447707, - "loss": 1.426, - "step": 1144 - }, - { - "epoch": 0.08807014845011922, - "learning_rate": 0.002942951243494866, - "loss": 1.2258, - "step": 1145 - }, - { - "epoch": 0.08814706561033767, - "learning_rate": 0.002942852189389534, - "loss": 1.2612, - "step": 1146 - }, - { - "epoch": 0.08822398277055611, - "learning_rate": 0.002942753051034559, - "loss": 1.0856, - "step": 1147 - }, - { - "epoch": 0.08830089993077456, - "learning_rate": 0.0029426538284357297, - "loss": 1.2527, - "step": 1148 - }, - { - "epoch": 0.088377817090993, - "learning_rate": 0.00294255452159884, - "loss": 1.212, - "step": 1149 - }, - { - "epoch": 0.08845473425121145, - "learning_rate": 0.0029424551305296887, - "loss": 0.6959, - "step": 1150 - }, - { - "epoch": 0.08853165141142989, - "learning_rate": 0.0029423556552340786, - "loss": 1.2818, - "step": 1151 - }, - { - "epoch": 0.08860856857164834, - "learning_rate": 0.0029422560957178185, - "loss": 0.7935, - "step": 1152 - }, - { - "epoch": 0.08868548573186678, - "learning_rate": 0.0029421564519867224, - "loss": 1.2913, - "step": 1153 - }, - { - "epoch": 0.08876240289208523, - "learning_rate": 0.0029420567240466076, - "loss": 0.7006, - "step": 1154 - }, - { - "epoch": 0.08883932005230367, - "learning_rate": 0.002941956911903298, - "loss": 1.3979, - "step": 1155 - }, - { - "epoch": 0.08891623721252212, - "learning_rate": 0.0029418570155626215, - "loss": 0.8993, - "step": 1156 - }, - { - "epoch": 0.08899315437274055, - "learning_rate": 0.002941757035030412, - "loss": 1.0958, - "step": 1157 - }, - { - "epoch": 0.089070071532959, - "learning_rate": 0.002941656970312505, - "loss": 1.0649, - "step": 1158 - }, - { - "epoch": 0.08914698869317744, - "learning_rate": 0.0029415568214147467, - "loss": 1.014, - "step": 1159 - }, - { - "epoch": 0.08922390585339589, - "learning_rate": 0.002941456588342982, - "loss": 1.2312, - "step": 1160 - }, - { - "epoch": 0.08930082301361433, - "learning_rate": 0.0029413562711030658, - "loss": 1.2211, - "step": 1161 - }, - { - "epoch": 0.08937774017383278, - "learning_rate": 0.002941255869700854, - "loss": 1.1923, - "step": 1162 - }, - { - "epoch": 0.08945465733405122, - "learning_rate": 0.00294115538414221, - "loss": 1.2505, - "step": 1163 - }, - { - "epoch": 0.08953157449426967, - "learning_rate": 0.0029410548144330017, - "loss": 0.8847, - "step": 1164 - }, - { - "epoch": 0.08960849165448811, - "learning_rate": 0.0029409541605791008, - "loss": 1.242, - "step": 1165 - }, - { - "epoch": 0.08968540881470656, - "learning_rate": 0.0029408534225863845, - "loss": 1.1686, - "step": 1166 - }, - { - "epoch": 0.089762325974925, - "learning_rate": 0.002940752600460736, - "loss": 0.9645, - "step": 1167 - }, - { - "epoch": 0.08983924313514345, - "learning_rate": 0.0029406516942080403, - "loss": 1.215, - "step": 1168 - }, - { - "epoch": 0.0899161602953619, - "learning_rate": 0.0029405507038341916, - "loss": 1.117, - "step": 1169 - }, - { - "epoch": 0.08999307745558034, - "learning_rate": 0.002940449629345086, - "loss": 1.1072, - "step": 1170 - }, - { - "epoch": 0.09006999461579879, - "learning_rate": 0.002940348470746625, - "loss": 1.0377, - "step": 1171 - }, - { - "epoch": 0.09014691177601723, - "learning_rate": 0.002940247228044716, - "loss": 0.8495, - "step": 1172 - }, - { - "epoch": 0.09022382893623568, - "learning_rate": 0.00294014590124527, - "loss": 1.0416, - "step": 1173 - }, - { - "epoch": 0.09030074609645412, - "learning_rate": 0.002940044490354205, - "loss": 1.1154, - "step": 1174 - }, - { - "epoch": 0.09037766325667257, - "learning_rate": 0.0029399429953774403, - "loss": 1.1448, - "step": 1175 - }, - { - "epoch": 0.09045458041689101, - "learning_rate": 0.0029398414163209034, - "loss": 1.407, - "step": 1176 - }, - { - "epoch": 0.09053149757710946, - "learning_rate": 0.0029397397531905262, - "loss": 1.0101, - "step": 1177 - }, - { - "epoch": 0.09060841473732789, - "learning_rate": 0.002939638005992244, - "loss": 0.9946, - "step": 1178 - }, - { - "epoch": 0.09068533189754634, - "learning_rate": 0.002939536174731999, - "loss": 1.0516, - "step": 1179 - }, - { - "epoch": 0.09076224905776478, - "learning_rate": 0.0029394342594157357, - "loss": 1.7657, - "step": 1180 - }, - { - "epoch": 0.09083916621798323, - "learning_rate": 0.002939332260049406, - "loss": 1.4462, - "step": 1181 - }, - { - "epoch": 0.09091608337820167, - "learning_rate": 0.0029392301766389657, - "loss": 1.2188, - "step": 1182 - }, - { - "epoch": 0.09099300053842012, - "learning_rate": 0.0029391280091903755, - "loss": 0.9906, - "step": 1183 - }, - { - "epoch": 0.09106991769863856, - "learning_rate": 0.002939025757709601, - "loss": 1.3368, - "step": 1184 - }, - { - "epoch": 0.09114683485885701, - "learning_rate": 0.0029389234222026127, - "loss": 1.2898, - "step": 1185 - }, - { - "epoch": 0.09122375201907545, - "learning_rate": 0.0029388210026753863, - "loss": 1.1105, - "step": 1186 - }, - { - "epoch": 0.0913006691792939, - "learning_rate": 0.002938718499133902, - "loss": 1.0106, - "step": 1187 - }, - { - "epoch": 0.09137758633951235, - "learning_rate": 0.0029386159115841453, - "loss": 1.1443, - "step": 1188 - }, - { - "epoch": 0.09145450349973079, - "learning_rate": 0.0029385132400321055, - "loss": 1.2002, - "step": 1189 - }, - { - "epoch": 0.09153142065994924, - "learning_rate": 0.0029384104844837793, - "loss": 1.2615, - "step": 1190 - }, - { - "epoch": 0.09160833782016768, - "learning_rate": 0.0029383076449451657, - "loss": 1.1345, - "step": 1191 - }, - { - "epoch": 0.09168525498038613, - "learning_rate": 0.0029382047214222694, - "loss": 1.4055, - "step": 1192 - }, - { - "epoch": 0.09176217214060457, - "learning_rate": 0.0029381017139211004, - "loss": 1.4962, - "step": 1193 - }, - { - "epoch": 0.09183908930082302, - "learning_rate": 0.002937998622447674, - "loss": 1.1178, - "step": 1194 - }, - { - "epoch": 0.09191600646104146, - "learning_rate": 0.0029378954470080094, - "loss": 1.0983, - "step": 1195 - }, - { - "epoch": 0.09199292362125991, - "learning_rate": 0.0029377921876081307, - "loss": 0.9965, - "step": 1196 - }, - { - "epoch": 0.09206984078147835, - "learning_rate": 0.002937688844254068, - "loss": 1.1954, - "step": 1197 - }, - { - "epoch": 0.09214675794169679, - "learning_rate": 0.002937585416951856, - "loss": 1.6167, - "step": 1198 - }, - { - "epoch": 0.09222367510191523, - "learning_rate": 0.0029374819057075324, - "loss": 1.2936, - "step": 1199 - }, - { - "epoch": 0.09230059226213368, - "learning_rate": 0.0029373783105271427, - "loss": 0.9425, - "step": 1200 - }, - { - "epoch": 0.09237750942235212, - "learning_rate": 0.002937274631416735, - "loss": 1.5481, - "step": 1201 - }, - { - "epoch": 0.09245442658257057, - "learning_rate": 0.002937170868382364, - "loss": 1.1663, - "step": 1202 - }, - { - "epoch": 0.09253134374278901, - "learning_rate": 0.002937067021430088, - "loss": 1.0448, - "step": 1203 - }, - { - "epoch": 0.09260826090300746, - "learning_rate": 0.0029369630905659713, - "loss": 1.3691, - "step": 1204 - }, - { - "epoch": 0.0926851780632259, - "learning_rate": 0.002936859075796082, - "loss": 1.1063, - "step": 1205 - }, - { - "epoch": 0.09276209522344435, - "learning_rate": 0.0029367549771264936, - "loss": 1.2637, - "step": 1206 - }, - { - "epoch": 0.0928390123836628, - "learning_rate": 0.002936650794563285, - "loss": 1.298, - "step": 1207 - }, - { - "epoch": 0.09291592954388124, - "learning_rate": 0.0029365465281125393, - "loss": 1.0207, - "step": 1208 - }, - { - "epoch": 0.09299284670409969, - "learning_rate": 0.002936442177780344, - "loss": 0.8481, - "step": 1209 - }, - { - "epoch": 0.09306976386431813, - "learning_rate": 0.0029363377435727934, - "loss": 1.1319, - "step": 1210 - }, - { - "epoch": 0.09314668102453658, - "learning_rate": 0.0029362332254959855, - "loss": 1.1559, - "step": 1211 - }, - { - "epoch": 0.09322359818475502, - "learning_rate": 0.002936128623556022, - "loss": 1.5641, - "step": 1212 - }, - { - "epoch": 0.09330051534497347, - "learning_rate": 0.002936023937759012, - "loss": 1.5606, - "step": 1213 - }, - { - "epoch": 0.09337743250519191, - "learning_rate": 0.0029359191681110675, - "loss": 1.3077, - "step": 1214 - }, - { - "epoch": 0.09345434966541036, - "learning_rate": 0.0029358143146183064, - "loss": 1.3394, - "step": 1215 - }, - { - "epoch": 0.0935312668256288, - "learning_rate": 0.002935709377286851, - "loss": 1.0419, - "step": 1216 - }, - { - "epoch": 0.09360818398584725, - "learning_rate": 0.002935604356122829, - "loss": 1.3052, - "step": 1217 - }, - { - "epoch": 0.0936851011460657, - "learning_rate": 0.002935499251132372, - "loss": 1.0781, - "step": 1218 - }, - { - "epoch": 0.09376201830628413, - "learning_rate": 0.0029353940623216184, - "loss": 1.1302, - "step": 1219 - }, - { - "epoch": 0.09383893546650257, - "learning_rate": 0.002935288789696709, - "loss": 1.2968, - "step": 1220 - }, - { - "epoch": 0.09391585262672102, - "learning_rate": 0.0029351834332637912, - "loss": 1.3628, - "step": 1221 - }, - { - "epoch": 0.09399276978693946, - "learning_rate": 0.002935077993029018, - "loss": 1.2263, - "step": 1222 - }, - { - "epoch": 0.09406968694715791, - "learning_rate": 0.002934972468998544, - "loss": 1.4475, - "step": 1223 - }, - { - "epoch": 0.09414660410737635, - "learning_rate": 0.0029348668611785325, - "loss": 1.2735, - "step": 1224 - }, - { - "epoch": 0.0942235212675948, - "learning_rate": 0.0029347611695751496, - "loss": 1.124, - "step": 1225 - }, - { - "epoch": 0.09430043842781324, - "learning_rate": 0.0029346553941945667, - "loss": 1.5155, - "step": 1226 - }, - { - "epoch": 0.09437735558803169, - "learning_rate": 0.0029345495350429603, - "loss": 1.2345, - "step": 1227 - }, - { - "epoch": 0.09445427274825013, - "learning_rate": 0.002934443592126512, - "loss": 1.0955, - "step": 1228 - }, - { - "epoch": 0.09453118990846858, - "learning_rate": 0.002934337565451406, - "loss": 1.3493, - "step": 1229 - }, - { - "epoch": 0.09460810706868703, - "learning_rate": 0.002934231455023836, - "loss": 1.1968, - "step": 1230 - }, - { - "epoch": 0.09468502422890547, - "learning_rate": 0.0029341252608499962, - "loss": 1.3625, - "step": 1231 - }, - { - "epoch": 0.09476194138912392, - "learning_rate": 0.0029340189829360876, - "loss": 1.2965, - "step": 1232 - }, - { - "epoch": 0.09483885854934236, - "learning_rate": 0.0029339126212883167, - "loss": 1.1667, - "step": 1233 - }, - { - "epoch": 0.0949157757095608, - "learning_rate": 0.002933806175912893, - "loss": 1.4469, - "step": 1234 - }, - { - "epoch": 0.09499269286977925, - "learning_rate": 0.0029336996468160323, - "loss": 0.9501, - "step": 1235 - }, - { - "epoch": 0.0950696100299977, - "learning_rate": 0.0029335930340039554, - "loss": 1.132, - "step": 1236 - }, - { - "epoch": 0.09514652719021614, - "learning_rate": 0.002933486337482887, - "loss": 1.2609, - "step": 1237 - }, - { - "epoch": 0.09522344435043459, - "learning_rate": 0.0029333795572590573, - "loss": 1.0248, - "step": 1238 - }, - { - "epoch": 0.09530036151065302, - "learning_rate": 0.0029332726933387018, - "loss": 1.0056, - "step": 1239 - }, - { - "epoch": 0.09537727867087147, - "learning_rate": 0.0029331657457280593, - "loss": 1.3739, - "step": 1240 - }, - { - "epoch": 0.09545419583108991, - "learning_rate": 0.002933058714433376, - "loss": 1.1242, - "step": 1241 - }, - { - "epoch": 0.09553111299130836, - "learning_rate": 0.0029329515994609007, - "loss": 1.238, - "step": 1242 - }, - { - "epoch": 0.0956080301515268, - "learning_rate": 0.002932844400816888, - "loss": 1.7216, - "step": 1243 - }, - { - "epoch": 0.09568494731174525, - "learning_rate": 0.0029327371185075973, - "loss": 1.3232, - "step": 1244 - }, - { - "epoch": 0.09576186447196369, - "learning_rate": 0.0029326297525392934, - "loss": 0.8693, - "step": 1245 - }, - { - "epoch": 0.09583878163218214, - "learning_rate": 0.002932522302918245, - "loss": 1.1943, - "step": 1246 - }, - { - "epoch": 0.09591569879240058, - "learning_rate": 0.0029324147696507268, - "loss": 1.289, - "step": 1247 - }, - { - "epoch": 0.09599261595261903, - "learning_rate": 0.002932307152743017, - "loss": 0.98, - "step": 1248 - }, - { - "epoch": 0.09606953311283747, - "learning_rate": 0.0029321994522013995, - "loss": 1.4621, - "step": 1249 - }, - { - "epoch": 0.09614645027305592, - "learning_rate": 0.002932091668032164, - "loss": 1.1523, - "step": 1250 - }, - { - "epoch": 0.09622336743327436, - "learning_rate": 0.0029319838002416034, - "loss": 1.1686, - "step": 1251 - }, - { - "epoch": 0.09630028459349281, - "learning_rate": 0.0029318758488360163, - "loss": 1.2622, - "step": 1252 - }, - { - "epoch": 0.09637720175371126, - "learning_rate": 0.002931767813821706, - "loss": 1.2453, - "step": 1253 - }, - { - "epoch": 0.0964541189139297, - "learning_rate": 0.002931659695204981, - "loss": 1.4202, - "step": 1254 - }, - { - "epoch": 0.09653103607414815, - "learning_rate": 0.002931551492992154, - "loss": 1.1503, - "step": 1255 - }, - { - "epoch": 0.09660795323436659, - "learning_rate": 0.002931443207189544, - "loss": 1.1302, - "step": 1256 - }, - { - "epoch": 0.09668487039458504, - "learning_rate": 0.002931334837803473, - "loss": 1.2203, - "step": 1257 - }, - { - "epoch": 0.09676178755480348, - "learning_rate": 0.002931226384840269, - "loss": 1.2188, - "step": 1258 - }, - { - "epoch": 0.09683870471502193, - "learning_rate": 0.002931117848306265, - "loss": 1.0731, - "step": 1259 - }, - { - "epoch": 0.09691562187524036, - "learning_rate": 0.002931009228207798, - "loss": 1.2517, - "step": 1260 - }, - { - "epoch": 0.0969925390354588, - "learning_rate": 0.0029309005245512117, - "loss": 1.1722, - "step": 1261 - }, - { - "epoch": 0.09706945619567725, - "learning_rate": 0.0029307917373428516, - "loss": 1.2239, - "step": 1262 - }, - { - "epoch": 0.0971463733558957, - "learning_rate": 0.002930682866589071, - "loss": 0.8266, - "step": 1263 - }, - { - "epoch": 0.09722329051611414, - "learning_rate": 0.002930573912296227, - "loss": 1.2538, - "step": 1264 - }, - { - "epoch": 0.09730020767633259, - "learning_rate": 0.002930464874470681, - "loss": 1.2797, - "step": 1265 - }, - { - "epoch": 0.09737712483655103, - "learning_rate": 0.0029303557531188008, - "loss": 1.3732, - "step": 1266 - }, - { - "epoch": 0.09745404199676948, - "learning_rate": 0.0029302465482469574, - "loss": 1.1501, - "step": 1267 - }, - { - "epoch": 0.09753095915698792, - "learning_rate": 0.0029301372598615275, - "loss": 1.3214, - "step": 1268 - }, - { - "epoch": 0.09760787631720637, - "learning_rate": 0.002930027887968892, - "loss": 0.9647, - "step": 1269 - }, - { - "epoch": 0.09768479347742481, - "learning_rate": 0.002929918432575438, - "loss": 1.1907, - "step": 1270 - }, - { - "epoch": 0.09776171063764326, - "learning_rate": 0.002929808893687556, - "loss": 0.8753, - "step": 1271 - }, - { - "epoch": 0.0978386277978617, - "learning_rate": 0.0029296992713116433, - "loss": 1.0652, - "step": 1272 - }, - { - "epoch": 0.09791554495808015, - "learning_rate": 0.0029295895654541004, - "loss": 0.9914, - "step": 1273 - }, - { - "epoch": 0.0979924621182986, - "learning_rate": 0.0029294797761213326, - "loss": 1.0565, - "step": 1274 - }, - { - "epoch": 0.09806937927851704, - "learning_rate": 0.0029293699033197504, - "loss": 1.1092, - "step": 1275 - }, - { - "epoch": 0.09814629643873549, - "learning_rate": 0.0029292599470557706, - "loss": 1.1121, - "step": 1276 - }, - { - "epoch": 0.09822321359895393, - "learning_rate": 0.0029291499073358126, - "loss": 1.0894, - "step": 1277 - }, - { - "epoch": 0.09830013075917238, - "learning_rate": 0.002929039784166302, - "loss": 0.7796, - "step": 1278 - }, - { - "epoch": 0.09837704791939082, - "learning_rate": 0.0029289295775536696, - "loss": 1.0671, - "step": 1279 - }, - { - "epoch": 0.09845396507960925, - "learning_rate": 0.0029288192875043494, - "loss": 0.8831, - "step": 1280 - }, - { - "epoch": 0.0985308822398277, - "learning_rate": 0.0029287089140247824, - "loss": 0.9901, - "step": 1281 - }, - { - "epoch": 0.09860779940004614, - "learning_rate": 0.0029285984571214125, - "loss": 1.1922, - "step": 1282 - }, - { - "epoch": 0.09868471656026459, - "learning_rate": 0.0029284879168006896, - "loss": 1.2796, - "step": 1283 - }, - { - "epoch": 0.09876163372048304, - "learning_rate": 0.0029283772930690692, - "loss": 0.9585, - "step": 1284 - }, - { - "epoch": 0.09883855088070148, - "learning_rate": 0.0029282665859330097, - "loss": 1.2213, - "step": 1285 - }, - { - "epoch": 0.09891546804091993, - "learning_rate": 0.002928155795398976, - "loss": 1.2803, - "step": 1286 - }, - { - "epoch": 0.09899238520113837, - "learning_rate": 0.0029280449214734365, - "loss": 1.4555, - "step": 1287 - }, - { - "epoch": 0.09906930236135682, - "learning_rate": 0.002927933964162866, - "loss": 1.3525, - "step": 1288 - }, - { - "epoch": 0.09914621952157526, - "learning_rate": 0.0029278229234737433, - "loss": 1.4479, - "step": 1289 - }, - { - "epoch": 0.09922313668179371, - "learning_rate": 0.002927711799412552, - "loss": 0.7878, - "step": 1290 - }, - { - "epoch": 0.09930005384201215, - "learning_rate": 0.002927600591985781, - "loss": 0.7441, - "step": 1291 - }, - { - "epoch": 0.0993769710022306, - "learning_rate": 0.0029274893011999236, - "loss": 1.5054, - "step": 1292 - }, - { - "epoch": 0.09945388816244904, - "learning_rate": 0.0029273779270614776, - "loss": 1.2258, - "step": 1293 - }, - { - "epoch": 0.09953080532266749, - "learning_rate": 0.002927266469576947, - "loss": 1.2355, - "step": 1294 - }, - { - "epoch": 0.09960772248288594, - "learning_rate": 0.0029271549287528406, - "loss": 0.9092, - "step": 1295 - }, - { - "epoch": 0.09968463964310438, - "learning_rate": 0.00292704330459567, - "loss": 1.1802, - "step": 1296 - }, - { - "epoch": 0.09976155680332283, - "learning_rate": 0.002926931597111954, - "loss": 1.2831, - "step": 1297 - }, - { - "epoch": 0.09983847396354127, - "learning_rate": 0.0029268198063082144, - "loss": 1.1867, - "step": 1298 - }, - { - "epoch": 0.09991539112375972, - "learning_rate": 0.0029267079321909795, - "loss": 1.1651, - "step": 1299 - }, - { - "epoch": 0.09999230828397816, - "learning_rate": 0.0029265959747667813, - "loss": 1.1498, - "step": 1300 - }, - { - "epoch": 0.1000692254441966, - "learning_rate": 0.002926483934042158, - "loss": 1.1721, - "step": 1301 - }, - { - "epoch": 0.10014614260441504, - "learning_rate": 0.0029263718100236507, - "loss": 1.0412, - "step": 1302 - }, - { - "epoch": 0.10022305976463348, - "learning_rate": 0.0029262596027178067, - "loss": 1.5262, - "step": 1303 - }, - { - "epoch": 0.10029997692485193, - "learning_rate": 0.0029261473121311786, - "loss": 1.2988, - "step": 1304 - }, - { - "epoch": 0.10037689408507038, - "learning_rate": 0.002926034938270322, - "loss": 0.9277, - "step": 1305 - }, - { - "epoch": 0.10045381124528882, - "learning_rate": 0.0029259224811418, - "loss": 1.4692, - "step": 1306 - }, - { - "epoch": 0.10053072840550727, - "learning_rate": 0.0029258099407521777, - "loss": 1.1353, - "step": 1307 - }, - { - "epoch": 0.10060764556572571, - "learning_rate": 0.0029256973171080274, - "loss": 1.0995, - "step": 1308 - }, - { - "epoch": 0.10068456272594416, - "learning_rate": 0.0029255846102159244, - "loss": 1.1535, - "step": 1309 - }, - { - "epoch": 0.1007614798861626, - "learning_rate": 0.002925471820082451, - "loss": 1.4002, - "step": 1310 - }, - { - "epoch": 0.10083839704638105, - "learning_rate": 0.0029253589467141917, - "loss": 1.2245, - "step": 1311 - }, - { - "epoch": 0.1009153142065995, - "learning_rate": 0.0029252459901177386, - "loss": 1.2784, - "step": 1312 - }, - { - "epoch": 0.10099223136681794, - "learning_rate": 0.0029251329502996866, - "loss": 1.3721, - "step": 1313 - }, - { - "epoch": 0.10106914852703638, - "learning_rate": 0.0029250198272666363, - "loss": 1.288, - "step": 1314 - }, - { - "epoch": 0.10114606568725483, - "learning_rate": 0.002924906621025193, - "loss": 1.091, - "step": 1315 - }, - { - "epoch": 0.10122298284747328, - "learning_rate": 0.0029247933315819674, - "loss": 0.844, - "step": 1316 - }, - { - "epoch": 0.10129990000769172, - "learning_rate": 0.0029246799589435738, - "loss": 0.9985, - "step": 1317 - }, - { - "epoch": 0.10137681716791017, - "learning_rate": 0.0029245665031166334, - "loss": 1.2714, - "step": 1318 - }, - { - "epoch": 0.10145373432812861, - "learning_rate": 0.00292445296410777, - "loss": 1.3008, - "step": 1319 - }, - { - "epoch": 0.10153065148834706, - "learning_rate": 0.0029243393419236133, - "loss": 1.233, - "step": 1320 - }, - { - "epoch": 0.10160756864856549, - "learning_rate": 0.002924225636570798, - "loss": 0.8218, - "step": 1321 - }, - { - "epoch": 0.10168448580878393, - "learning_rate": 0.0029241118480559636, - "loss": 1.3125, - "step": 1322 - }, - { - "epoch": 0.10176140296900238, - "learning_rate": 0.002923997976385754, - "loss": 1.3914, - "step": 1323 - }, - { - "epoch": 0.10183832012922082, - "learning_rate": 0.0029238840215668185, - "loss": 1.1458, - "step": 1324 - }, - { - "epoch": 0.10191523728943927, - "learning_rate": 0.0029237699836058115, - "loss": 1.2458, - "step": 1325 - }, - { - "epoch": 0.10199215444965772, - "learning_rate": 0.0029236558625093906, - "loss": 1.1755, - "step": 1326 - }, - { - "epoch": 0.10206907160987616, - "learning_rate": 0.002923541658284221, - "loss": 1.1569, - "step": 1327 - }, - { - "epoch": 0.1021459887700946, - "learning_rate": 0.00292342737093697, - "loss": 1.127, - "step": 1328 - }, - { - "epoch": 0.10222290593031305, - "learning_rate": 0.0029233130004743114, - "loss": 1.0872, - "step": 1329 - }, - { - "epoch": 0.1022998230905315, - "learning_rate": 0.0029231985469029232, - "loss": 1.2195, - "step": 1330 - }, - { - "epoch": 0.10237674025074994, - "learning_rate": 0.002923084010229489, - "loss": 1.3099, - "step": 1331 - }, - { - "epoch": 0.10245365741096839, - "learning_rate": 0.002922969390460696, - "loss": 1.4398, - "step": 1332 - }, - { - "epoch": 0.10253057457118683, - "learning_rate": 0.0029228546876032373, - "loss": 0.9453, - "step": 1333 - }, - { - "epoch": 0.10260749173140528, - "learning_rate": 0.002922739901663811, - "loss": 1.2617, - "step": 1334 - }, - { - "epoch": 0.10268440889162372, - "learning_rate": 0.0029226250326491185, - "loss": 1.1086, - "step": 1335 - }, - { - "epoch": 0.10276132605184217, - "learning_rate": 0.002922510080565868, - "loss": 1.055, - "step": 1336 - }, - { - "epoch": 0.10283824321206061, - "learning_rate": 0.0029223950454207713, - "loss": 1.0562, - "step": 1337 - }, - { - "epoch": 0.10291516037227906, - "learning_rate": 0.002922279927220546, - "loss": 0.9574, - "step": 1338 - }, - { - "epoch": 0.1029920775324975, - "learning_rate": 0.002922164725971913, - "loss": 0.9332, - "step": 1339 - }, - { - "epoch": 0.10306899469271595, - "learning_rate": 0.0029220494416815996, - "loss": 1.2528, - "step": 1340 - }, - { - "epoch": 0.1031459118529344, - "learning_rate": 0.002921934074356337, - "loss": 1.23, - "step": 1341 - }, - { - "epoch": 0.10322282901315283, - "learning_rate": 0.002921818624002862, - "loss": 1.1342, - "step": 1342 - }, - { - "epoch": 0.10329974617337127, - "learning_rate": 0.002921703090627916, - "loss": 1.157, - "step": 1343 - }, - { - "epoch": 0.10337666333358972, - "learning_rate": 0.002921587474238245, - "loss": 0.9476, - "step": 1344 - }, - { - "epoch": 0.10345358049380816, - "learning_rate": 0.0029214717748405994, - "loss": 1.3534, - "step": 1345 - }, - { - "epoch": 0.10353049765402661, - "learning_rate": 0.0029213559924417356, - "loss": 0.888, - "step": 1346 - }, - { - "epoch": 0.10360741481424506, - "learning_rate": 0.002921240127048414, - "loss": 0.6896, - "step": 1347 - }, - { - "epoch": 0.1036843319744635, - "learning_rate": 0.0029211241786674005, - "loss": 1.3125, - "step": 1348 - }, - { - "epoch": 0.10376124913468195, - "learning_rate": 0.0029210081473054652, - "loss": 1.1898, - "step": 1349 - }, - { - "epoch": 0.10383816629490039, - "learning_rate": 0.002920892032969383, - "loss": 1.3149, - "step": 1350 - }, - { - "epoch": 0.10391508345511884, - "learning_rate": 0.0029207758356659336, - "loss": 1.0003, - "step": 1351 - }, - { - "epoch": 0.10399200061533728, - "learning_rate": 0.0029206595554019037, - "loss": 1.3983, - "step": 1352 - }, - { - "epoch": 0.10406891777555573, - "learning_rate": 0.002920543192184081, - "loss": 1.2183, - "step": 1353 - }, - { - "epoch": 0.10414583493577417, - "learning_rate": 0.002920426746019261, - "loss": 0.9984, - "step": 1354 - }, - { - "epoch": 0.10422275209599262, - "learning_rate": 0.002920310216914243, - "loss": 1.2361, - "step": 1355 - }, - { - "epoch": 0.10429966925621106, - "learning_rate": 0.002920193604875831, - "loss": 1.258, - "step": 1356 - }, - { - "epoch": 0.10437658641642951, - "learning_rate": 0.0029200769099108344, - "loss": 0.6998, - "step": 1357 - }, - { - "epoch": 0.10445350357664795, - "learning_rate": 0.0029199601320260673, - "loss": 1.3332, - "step": 1358 - }, - { - "epoch": 0.1045304207368664, - "learning_rate": 0.002919843271228348, - "loss": 1.24, - "step": 1359 - }, - { - "epoch": 0.10460733789708485, - "learning_rate": 0.0029197263275245007, - "loss": 1.3677, - "step": 1360 - }, - { - "epoch": 0.10468425505730329, - "learning_rate": 0.0029196093009213535, - "loss": 1.3422, - "step": 1361 - }, - { - "epoch": 0.10476117221752172, - "learning_rate": 0.0029194921914257395, - "loss": 0.9152, - "step": 1362 - }, - { - "epoch": 0.10483808937774017, - "learning_rate": 0.002919374999044497, - "loss": 1.4853, - "step": 1363 - }, - { - "epoch": 0.10491500653795861, - "learning_rate": 0.002919257723784469, - "loss": 0.9956, - "step": 1364 - }, - { - "epoch": 0.10499192369817706, - "learning_rate": 0.0029191403656525037, - "loss": 1.1749, - "step": 1365 - }, - { - "epoch": 0.1050688408583955, - "learning_rate": 0.0029190229246554536, - "loss": 1.02, - "step": 1366 - }, - { - "epoch": 0.10514575801861395, - "learning_rate": 0.002918905400800176, - "loss": 1.0859, - "step": 1367 - }, - { - "epoch": 0.1052226751788324, - "learning_rate": 0.0029187877940935326, - "loss": 1.2528, - "step": 1368 - }, - { - "epoch": 0.10529959233905084, - "learning_rate": 0.002918670104542392, - "loss": 1.1351, - "step": 1369 - }, - { - "epoch": 0.10537650949926929, - "learning_rate": 0.0029185523321536254, - "loss": 0.9711, - "step": 1370 - }, - { - "epoch": 0.10545342665948773, - "learning_rate": 0.0029184344769341095, - "loss": 1.2338, - "step": 1371 - }, - { - "epoch": 0.10553034381970618, - "learning_rate": 0.0029183165388907265, - "loss": 0.9078, - "step": 1372 - }, - { - "epoch": 0.10560726097992462, - "learning_rate": 0.0029181985180303624, - "loss": 0.9939, - "step": 1373 - }, - { - "epoch": 0.10568417814014307, - "learning_rate": 0.002918080414359909, - "loss": 0.8116, - "step": 1374 - }, - { - "epoch": 0.10576109530036151, - "learning_rate": 0.0029179622278862625, - "loss": 1.3753, - "step": 1375 - }, - { - "epoch": 0.10583801246057996, - "learning_rate": 0.0029178439586163233, - "loss": 1.0247, - "step": 1376 - }, - { - "epoch": 0.1059149296207984, - "learning_rate": 0.002917725606556998, - "loss": 1.6761, - "step": 1377 - }, - { - "epoch": 0.10599184678101685, - "learning_rate": 0.0029176071717151968, - "loss": 0.9708, - "step": 1378 - }, - { - "epoch": 0.1060687639412353, - "learning_rate": 0.0029174886540978356, - "loss": 1.2245, - "step": 1379 - }, - { - "epoch": 0.10614568110145374, - "learning_rate": 0.0029173700537118348, - "loss": 1.2728, - "step": 1380 - }, - { - "epoch": 0.10622259826167219, - "learning_rate": 0.002917251370564119, - "loss": 1.1181, - "step": 1381 - }, - { - "epoch": 0.10629951542189062, - "learning_rate": 0.002917132604661619, - "loss": 0.8526, - "step": 1382 - }, - { - "epoch": 0.10637643258210906, - "learning_rate": 0.002917013756011269, - "loss": 1.185, - "step": 1383 - }, - { - "epoch": 0.10645334974232751, - "learning_rate": 0.002916894824620009, - "loss": 1.3072, - "step": 1384 - }, - { - "epoch": 0.10653026690254595, - "learning_rate": 0.0029167758104947845, - "loss": 1.5782, - "step": 1385 - }, - { - "epoch": 0.1066071840627644, - "learning_rate": 0.0029166567136425427, - "loss": 1.4001, - "step": 1386 - }, - { - "epoch": 0.10668410122298284, - "learning_rate": 0.0029165375340702393, - "loss": 0.7133, - "step": 1387 - }, - { - "epoch": 0.10676101838320129, - "learning_rate": 0.002916418271784833, - "loss": 1.149, - "step": 1388 - }, - { - "epoch": 0.10683793554341973, - "learning_rate": 0.0029162989267932883, - "loss": 1.1076, - "step": 1389 - }, - { - "epoch": 0.10691485270363818, - "learning_rate": 0.0029161794991025723, - "loss": 1.0589, - "step": 1390 - }, - { - "epoch": 0.10699176986385663, - "learning_rate": 0.00291605998871966, - "loss": 0.9592, - "step": 1391 - }, - { - "epoch": 0.10706868702407507, - "learning_rate": 0.002915940395651529, - "loss": 1.1013, - "step": 1392 - }, - { - "epoch": 0.10714560418429352, - "learning_rate": 0.002915820719905163, - "loss": 1.2462, - "step": 1393 - }, - { - "epoch": 0.10722252134451196, - "learning_rate": 0.002915700961487549, - "loss": 0.9328, - "step": 1394 - }, - { - "epoch": 0.10729943850473041, - "learning_rate": 0.0029155811204056806, - "loss": 1.3122, - "step": 1395 - }, - { - "epoch": 0.10737635566494885, - "learning_rate": 0.0029154611966665557, - "loss": 1.1443, - "step": 1396 - }, - { - "epoch": 0.1074532728251673, - "learning_rate": 0.002915341190277176, - "loss": 0.9814, - "step": 1397 - }, - { - "epoch": 0.10753018998538574, - "learning_rate": 0.0029152211012445496, - "loss": 1.0536, - "step": 1398 - }, - { - "epoch": 0.10760710714560419, - "learning_rate": 0.002915100929575688, - "loss": 1.1837, - "step": 1399 - }, - { - "epoch": 0.10768402430582263, - "learning_rate": 0.002914980675277608, - "loss": 1.275, - "step": 1400 - }, - { - "epoch": 0.10776094146604108, - "learning_rate": 0.002914860338357332, - "loss": 1.1234, - "step": 1401 - }, - { - "epoch": 0.10783785862625953, - "learning_rate": 0.002914739918821886, - "loss": 1.1729, - "step": 1402 - }, - { - "epoch": 0.10791477578647796, - "learning_rate": 0.002914619416678302, - "loss": 0.9903, - "step": 1403 - }, - { - "epoch": 0.1079916929466964, - "learning_rate": 0.002914498831933616, - "loss": 1.1748, - "step": 1404 - }, - { - "epoch": 0.10806861010691485, - "learning_rate": 0.002914378164594869, - "loss": 1.4379, - "step": 1405 - }, - { - "epoch": 0.10814552726713329, - "learning_rate": 0.002914257414669107, - "loss": 0.8967, - "step": 1406 - }, - { - "epoch": 0.10822244442735174, - "learning_rate": 0.0029141365821633808, - "loss": 0.9649, - "step": 1407 - }, - { - "epoch": 0.10829936158757018, - "learning_rate": 0.0029140156670847454, - "loss": 1.4865, - "step": 1408 - }, - { - "epoch": 0.10837627874778863, - "learning_rate": 0.0029138946694402617, - "loss": 0.5231, - "step": 1409 - }, - { - "epoch": 0.10845319590800707, - "learning_rate": 0.002913773589236995, - "loss": 1.3454, - "step": 1410 - }, - { - "epoch": 0.10853011306822552, - "learning_rate": 0.0029136524264820147, - "loss": 1.2543, - "step": 1411 - }, - { - "epoch": 0.10860703022844397, - "learning_rate": 0.0029135311811823955, - "loss": 1.2049, - "step": 1412 - }, - { - "epoch": 0.10868394738866241, - "learning_rate": 0.0029134098533452183, - "loss": 1.1168, - "step": 1413 - }, - { - "epoch": 0.10876086454888086, - "learning_rate": 0.0029132884429775665, - "loss": 0.8141, - "step": 1414 - }, - { - "epoch": 0.1088377817090993, - "learning_rate": 0.00291316695008653, - "loss": 1.2728, - "step": 1415 - }, - { - "epoch": 0.10891469886931775, - "learning_rate": 0.0029130453746792017, - "loss": 0.832, - "step": 1416 - }, - { - "epoch": 0.10899161602953619, - "learning_rate": 0.0029129237167626817, - "loss": 1.3316, - "step": 1417 - }, - { - "epoch": 0.10906853318975464, - "learning_rate": 0.0029128019763440733, - "loss": 1.3856, - "step": 1418 - }, - { - "epoch": 0.10914545034997308, - "learning_rate": 0.0029126801534304853, - "loss": 1.1343, - "step": 1419 - }, - { - "epoch": 0.10922236751019153, - "learning_rate": 0.002912558248029031, - "loss": 0.9837, - "step": 1420 - }, - { - "epoch": 0.10929928467040997, - "learning_rate": 0.002912436260146828, - "loss": 1.0679, - "step": 1421 - }, - { - "epoch": 0.10937620183062842, - "learning_rate": 0.002912314189791, - "loss": 1.2493, - "step": 1422 - }, - { - "epoch": 0.10945311899084685, - "learning_rate": 0.0029121920369686747, - "loss": 0.9199, - "step": 1423 - }, - { - "epoch": 0.1095300361510653, - "learning_rate": 0.0029120698016869844, - "loss": 1.4005, - "step": 1424 - }, - { - "epoch": 0.10960695331128374, - "learning_rate": 0.002911947483953067, - "loss": 1.2883, - "step": 1425 - }, - { - "epoch": 0.10968387047150219, - "learning_rate": 0.0029118250837740643, - "loss": 1.1278, - "step": 1426 - }, - { - "epoch": 0.10976078763172063, - "learning_rate": 0.0029117026011571236, - "loss": 1.1546, - "step": 1427 - }, - { - "epoch": 0.10983770479193908, - "learning_rate": 0.002911580036109397, - "loss": 1.0478, - "step": 1428 - }, - { - "epoch": 0.10991462195215752, - "learning_rate": 0.0029114573886380407, - "loss": 1.1066, - "step": 1429 - }, - { - "epoch": 0.10999153911237597, - "learning_rate": 0.0029113346587502165, - "loss": 1.1094, - "step": 1430 - }, - { - "epoch": 0.11006845627259441, - "learning_rate": 0.002911211846453091, - "loss": 0.882, - "step": 1431 - }, - { - "epoch": 0.11014537343281286, - "learning_rate": 0.0029110889517538346, - "loss": 1.0255, - "step": 1432 - }, - { - "epoch": 0.1102222905930313, - "learning_rate": 0.002910965974659624, - "loss": 1.2265, - "step": 1433 - }, - { - "epoch": 0.11029920775324975, - "learning_rate": 0.00291084291517764, - "loss": 1.204, - "step": 1434 - }, - { - "epoch": 0.1103761249134682, - "learning_rate": 0.0029107197733150673, - "loss": 1.0782, - "step": 1435 - }, - { - "epoch": 0.11045304207368664, - "learning_rate": 0.002910596549079097, - "loss": 0.9163, - "step": 1436 - }, - { - "epoch": 0.11052995923390509, - "learning_rate": 0.002910473242476924, - "loss": 1.0607, - "step": 1437 - }, - { - "epoch": 0.11060687639412353, - "learning_rate": 0.002910349853515749, - "loss": 1.1778, - "step": 1438 - }, - { - "epoch": 0.11068379355434198, - "learning_rate": 0.0029102263822027756, - "loss": 1.3244, - "step": 1439 - }, - { - "epoch": 0.11076071071456042, - "learning_rate": 0.002910102828545214, - "loss": 1.2257, - "step": 1440 - }, - { - "epoch": 0.11083762787477887, - "learning_rate": 0.0029099791925502787, - "loss": 1.2334, - "step": 1441 - }, - { - "epoch": 0.11091454503499731, - "learning_rate": 0.0029098554742251893, - "loss": 1.2848, - "step": 1442 - }, - { - "epoch": 0.11099146219521576, - "learning_rate": 0.002909731673577169, - "loss": 0.6049, - "step": 1443 - }, - { - "epoch": 0.11106837935543419, - "learning_rate": 0.002909607790613448, - "loss": 0.5912, - "step": 1444 - }, - { - "epoch": 0.11114529651565264, - "learning_rate": 0.002909483825341258, - "loss": 0.964, - "step": 1445 - }, - { - "epoch": 0.11122221367587108, - "learning_rate": 0.0029093597777678387, - "loss": 0.9893, - "step": 1446 - }, - { - "epoch": 0.11129913083608953, - "learning_rate": 0.0029092356479004334, - "loss": 1.3322, - "step": 1447 - }, - { - "epoch": 0.11137604799630797, - "learning_rate": 0.00290911143574629, - "loss": 1.5149, - "step": 1448 - }, - { - "epoch": 0.11145296515652642, - "learning_rate": 0.0029089871413126613, - "loss": 1.0509, - "step": 1449 - }, - { - "epoch": 0.11152988231674486, - "learning_rate": 0.002908862764606805, - "loss": 1.0904, - "step": 1450 - }, - { - "epoch": 0.11160679947696331, - "learning_rate": 0.0029087383056359832, - "loss": 1.0722, - "step": 1451 - }, - { - "epoch": 0.11168371663718175, - "learning_rate": 0.002908613764407464, - "loss": 1.4654, - "step": 1452 - }, - { - "epoch": 0.1117606337974002, - "learning_rate": 0.0029084891409285186, - "loss": 0.9479, - "step": 1453 - }, - { - "epoch": 0.11183755095761864, - "learning_rate": 0.002908364435206425, - "loss": 1.4954, - "step": 1454 - }, - { - "epoch": 0.11191446811783709, - "learning_rate": 0.002908239647248464, - "loss": 1.2428, - "step": 1455 - }, - { - "epoch": 0.11199138527805554, - "learning_rate": 0.0029081147770619225, - "loss": 0.7893, - "step": 1456 - }, - { - "epoch": 0.11206830243827398, - "learning_rate": 0.0029079898246540917, - "loss": 1.233, - "step": 1457 - }, - { - "epoch": 0.11214521959849243, - "learning_rate": 0.0029078647900322675, - "loss": 1.2769, - "step": 1458 - }, - { - "epoch": 0.11222213675871087, - "learning_rate": 0.002907739673203751, - "loss": 0.9771, - "step": 1459 - }, - { - "epoch": 0.11229905391892932, - "learning_rate": 0.0029076144741758477, - "loss": 1.0689, - "step": 1460 - }, - { - "epoch": 0.11237597107914776, - "learning_rate": 0.0029074891929558684, - "loss": 1.2342, - "step": 1461 - }, - { - "epoch": 0.11245288823936621, - "learning_rate": 0.0029073638295511284, - "loss": 1.099, - "step": 1462 - }, - { - "epoch": 0.11252980539958465, - "learning_rate": 0.0029072383839689477, - "loss": 1.1543, - "step": 1463 - }, - { - "epoch": 0.11260672255980309, - "learning_rate": 0.002907112856216651, - "loss": 1.1191, - "step": 1464 - }, - { - "epoch": 0.11268363972002153, - "learning_rate": 0.0029069872463015674, - "loss": 1.0351, - "step": 1465 - }, - { - "epoch": 0.11276055688023998, - "learning_rate": 0.002906861554231033, - "loss": 1.2029, - "step": 1466 - }, - { - "epoch": 0.11283747404045842, - "learning_rate": 0.0029067357800123864, - "loss": 1.2132, - "step": 1467 - }, - { - "epoch": 0.11291439120067687, - "learning_rate": 0.002906609923652972, - "loss": 0.8952, - "step": 1468 - }, - { - "epoch": 0.11299130836089531, - "learning_rate": 0.002906483985160137, - "loss": 1.1924, - "step": 1469 - }, - { - "epoch": 0.11306822552111376, - "learning_rate": 0.0029063579645412373, - "loss": 0.6819, - "step": 1470 - }, - { - "epoch": 0.1131451426813322, - "learning_rate": 0.0029062318618036298, - "loss": 1.1736, - "step": 1471 - }, - { - "epoch": 0.11322205984155065, - "learning_rate": 0.0029061056769546785, - "loss": 1.297, - "step": 1472 - }, - { - "epoch": 0.1132989770017691, - "learning_rate": 0.0029059794100017512, - "loss": 1.1583, - "step": 1473 - }, - { - "epoch": 0.11337589416198754, - "learning_rate": 0.002905853060952221, - "loss": 1.4155, - "step": 1474 - }, - { - "epoch": 0.11345281132220598, - "learning_rate": 0.0029057266298134657, - "loss": 1.332, - "step": 1475 - }, - { - "epoch": 0.11352972848242443, - "learning_rate": 0.0029056001165928672, - "loss": 1.2283, - "step": 1476 - }, - { - "epoch": 0.11360664564264288, - "learning_rate": 0.002905473521297813, - "loss": 0.999, - "step": 1477 - }, - { - "epoch": 0.11368356280286132, - "learning_rate": 0.002905346843935695, - "loss": 1.3447, - "step": 1478 - }, - { - "epoch": 0.11376047996307977, - "learning_rate": 0.0029052200845139102, - "loss": 1.4668, - "step": 1479 - }, - { - "epoch": 0.11383739712329821, - "learning_rate": 0.0029050932430398605, - "loss": 1.0701, - "step": 1480 - }, - { - "epoch": 0.11391431428351666, - "learning_rate": 0.0029049663195209515, - "loss": 1.2138, - "step": 1481 - }, - { - "epoch": 0.1139912314437351, - "learning_rate": 0.0029048393139645954, - "loss": 1.2256, - "step": 1482 - }, - { - "epoch": 0.11406814860395355, - "learning_rate": 0.0029047122263782072, - "loss": 1.254, - "step": 1483 - }, - { - "epoch": 0.114145065764172, - "learning_rate": 0.0029045850567692086, - "loss": 0.9742, - "step": 1484 - }, - { - "epoch": 0.11422198292439042, - "learning_rate": 0.0029044578051450245, - "loss": 1.1314, - "step": 1485 - }, - { - "epoch": 0.11429890008460887, - "learning_rate": 0.002904330471513085, - "loss": 0.9521, - "step": 1486 - }, - { - "epoch": 0.11437581724482732, - "learning_rate": 0.002904203055880826, - "loss": 1.2706, - "step": 1487 - }, - { - "epoch": 0.11445273440504576, - "learning_rate": 0.0029040755582556877, - "loss": 0.8659, - "step": 1488 - }, - { - "epoch": 0.1145296515652642, - "learning_rate": 0.002903947978645114, - "loss": 1.2421, - "step": 1489 - }, - { - "epoch": 0.11460656872548265, - "learning_rate": 0.0029038203170565543, - "loss": 1.1359, - "step": 1490 - }, - { - "epoch": 0.1146834858857011, - "learning_rate": 0.0029036925734974637, - "loss": 0.8697, - "step": 1491 - }, - { - "epoch": 0.11476040304591954, - "learning_rate": 0.0029035647479753006, - "loss": 1.4948, - "step": 1492 - }, - { - "epoch": 0.11483732020613799, - "learning_rate": 0.0029034368404975293, - "loss": 1.2606, - "step": 1493 - }, - { - "epoch": 0.11491423736635643, - "learning_rate": 0.002903308851071618, - "loss": 0.9511, - "step": 1494 - }, - { - "epoch": 0.11499115452657488, - "learning_rate": 0.0029031807797050405, - "loss": 1.1019, - "step": 1495 - }, - { - "epoch": 0.11506807168679332, - "learning_rate": 0.002903052626405275, - "loss": 1.3968, - "step": 1496 - }, - { - "epoch": 0.11514498884701177, - "learning_rate": 0.002902924391179804, - "loss": 0.9133, - "step": 1497 - }, - { - "epoch": 0.11522190600723022, - "learning_rate": 0.0029027960740361162, - "loss": 1.3496, - "step": 1498 - }, - { - "epoch": 0.11529882316744866, - "learning_rate": 0.002902667674981703, - "loss": 0.7661, - "step": 1499 - }, - { - "epoch": 0.1153757403276671, - "learning_rate": 0.0029025391940240633, - "loss": 1.001, - "step": 1500 - }, - { - "epoch": 0.11545265748788555, - "learning_rate": 0.0029024106311706984, - "loss": 1.0003, - "step": 1501 - }, - { - "epoch": 0.115529574648104, - "learning_rate": 0.0029022819864291147, - "loss": 1.336, - "step": 1502 - }, - { - "epoch": 0.11560649180832244, - "learning_rate": 0.0029021532598068243, - "loss": 1.4581, - "step": 1503 - }, - { - "epoch": 0.11568340896854089, - "learning_rate": 0.0029020244513113438, - "loss": 1.194, - "step": 1504 - }, - { - "epoch": 0.11576032612875932, - "learning_rate": 0.002901895560950195, - "loss": 0.9644, - "step": 1505 - }, - { - "epoch": 0.11583724328897776, - "learning_rate": 0.0029017665887309026, - "loss": 1.1786, - "step": 1506 - }, - { - "epoch": 0.11591416044919621, - "learning_rate": 0.002901637534660999, - "loss": 1.0125, - "step": 1507 - }, - { - "epoch": 0.11599107760941466, - "learning_rate": 0.0029015083987480182, - "loss": 1.2137, - "step": 1508 - }, - { - "epoch": 0.1160679947696331, - "learning_rate": 0.002901379180999502, - "loss": 1.1543, - "step": 1509 - }, - { - "epoch": 0.11614491192985155, - "learning_rate": 0.0029012498814229943, - "loss": 1.0397, - "step": 1510 - }, - { - "epoch": 0.11622182909006999, - "learning_rate": 0.0029011205000260463, - "loss": 1.5104, - "step": 1511 - }, - { - "epoch": 0.11629874625028844, - "learning_rate": 0.002900991036816212, - "loss": 1.2308, - "step": 1512 - }, - { - "epoch": 0.11637566341050688, - "learning_rate": 0.0029008614918010506, - "loss": 1.1723, - "step": 1513 - }, - { - "epoch": 0.11645258057072533, - "learning_rate": 0.0029007318649881273, - "loss": 1.0011, - "step": 1514 - }, - { - "epoch": 0.11652949773094377, - "learning_rate": 0.0029006021563850104, - "loss": 1.0574, - "step": 1515 - }, - { - "epoch": 0.11660641489116222, - "learning_rate": 0.0029004723659992733, - "loss": 1.2372, - "step": 1516 - }, - { - "epoch": 0.11668333205138066, - "learning_rate": 0.002900342493838496, - "loss": 1.1541, - "step": 1517 - }, - { - "epoch": 0.11676024921159911, - "learning_rate": 0.0029002125399102607, - "loss": 1.0897, - "step": 1518 - }, - { - "epoch": 0.11683716637181756, - "learning_rate": 0.002900082504222156, - "loss": 1.3226, - "step": 1519 - }, - { - "epoch": 0.116914083532036, - "learning_rate": 0.0028999523867817745, - "loss": 1.8294, - "step": 1520 - }, - { - "epoch": 0.11699100069225445, - "learning_rate": 0.002899822187596715, - "loss": 1.4568, - "step": 1521 - }, - { - "epoch": 0.11706791785247289, - "learning_rate": 0.002899691906674578, - "loss": 1.2607, - "step": 1522 - }, - { - "epoch": 0.11714483501269134, - "learning_rate": 0.0028995615440229722, - "loss": 1.2988, - "step": 1523 - }, - { - "epoch": 0.11722175217290978, - "learning_rate": 0.0028994310996495096, - "loss": 1.024, - "step": 1524 - }, - { - "epoch": 0.11729866933312823, - "learning_rate": 0.0028993005735618066, - "loss": 1.2166, - "step": 1525 - }, - { - "epoch": 0.11737558649334666, - "learning_rate": 0.0028991699657674847, - "loss": 1.3122, - "step": 1526 - }, - { - "epoch": 0.1174525036535651, - "learning_rate": 0.00289903927627417, - "loss": 1.1074, - "step": 1527 - }, - { - "epoch": 0.11752942081378355, - "learning_rate": 0.0028989085050894945, - "loss": 1.1349, - "step": 1528 - }, - { - "epoch": 0.117606337974002, - "learning_rate": 0.002898777652221093, - "loss": 0.9543, - "step": 1529 - }, - { - "epoch": 0.11768325513422044, - "learning_rate": 0.002898646717676607, - "loss": 1.1482, - "step": 1530 - }, - { - "epoch": 0.11776017229443889, - "learning_rate": 0.0028985157014636814, - "loss": 1.4885, - "step": 1531 - }, - { - "epoch": 0.11783708945465733, - "learning_rate": 0.002898384603589966, - "loss": 1.0584, - "step": 1532 - }, - { - "epoch": 0.11791400661487578, - "learning_rate": 0.0028982534240631166, - "loss": 1.3822, - "step": 1533 - }, - { - "epoch": 0.11799092377509422, - "learning_rate": 0.0028981221628907924, - "loss": 1.074, - "step": 1534 - }, - { - "epoch": 0.11806784093531267, - "learning_rate": 0.002897990820080659, - "loss": 1.5324, - "step": 1535 - }, - { - "epoch": 0.11814475809553111, - "learning_rate": 0.0028978593956403833, - "loss": 0.8464, - "step": 1536 - }, - { - "epoch": 0.11822167525574956, - "learning_rate": 0.0028977278895776413, - "loss": 1.0793, - "step": 1537 - }, - { - "epoch": 0.118298592415968, - "learning_rate": 0.002897596301900111, - "loss": 1.324, - "step": 1538 - }, - { - "epoch": 0.11837550957618645, - "learning_rate": 0.0028974646326154763, - "loss": 1.1548, - "step": 1539 - }, - { - "epoch": 0.1184524267364049, - "learning_rate": 0.002897332881731425, - "loss": 1.3605, - "step": 1540 - }, - { - "epoch": 0.11852934389662334, - "learning_rate": 0.0028972010492556506, - "loss": 1.192, - "step": 1541 - }, - { - "epoch": 0.11860626105684179, - "learning_rate": 0.002897069135195851, - "loss": 0.6856, - "step": 1542 - }, - { - "epoch": 0.11868317821706023, - "learning_rate": 0.0028969371395597284, - "loss": 1.0507, - "step": 1543 - }, - { - "epoch": 0.11876009537727868, - "learning_rate": 0.0028968050623549907, - "loss": 1.3073, - "step": 1544 - }, - { - "epoch": 0.11883701253749712, - "learning_rate": 0.002896672903589349, - "loss": 1.2736, - "step": 1545 - }, - { - "epoch": 0.11891392969771555, - "learning_rate": 0.0028965406632705214, - "loss": 1.3829, - "step": 1546 - }, - { - "epoch": 0.118990846857934, - "learning_rate": 0.002896408341406229, - "loss": 1.2508, - "step": 1547 - }, - { - "epoch": 0.11906776401815244, - "learning_rate": 0.0028962759380041984, - "loss": 1.176, - "step": 1548 - }, - { - "epoch": 0.11914468117837089, - "learning_rate": 0.00289614345307216, - "loss": 1.1064, - "step": 1549 - }, - { - "epoch": 0.11922159833858934, - "learning_rate": 0.002896010886617851, - "loss": 1.2934, - "step": 1550 - }, - { - "epoch": 0.11929851549880778, - "learning_rate": 0.002895878238649011, - "loss": 0.9442, - "step": 1551 - }, - { - "epoch": 0.11937543265902623, - "learning_rate": 0.002895745509173386, - "loss": 1.0226, - "step": 1552 - }, - { - "epoch": 0.11945234981924467, - "learning_rate": 0.002895612698198726, - "loss": 1.0044, - "step": 1553 - }, - { - "epoch": 0.11952926697946312, - "learning_rate": 0.0028954798057327863, - "loss": 1.4746, - "step": 1554 - }, - { - "epoch": 0.11960618413968156, - "learning_rate": 0.0028953468317833267, - "loss": 1.0465, - "step": 1555 - }, - { - "epoch": 0.11968310129990001, - "learning_rate": 0.002895213776358111, - "loss": 0.9802, - "step": 1556 - }, - { - "epoch": 0.11976001846011845, - "learning_rate": 0.002895080639464909, - "loss": 0.9997, - "step": 1557 - }, - { - "epoch": 0.1198369356203369, - "learning_rate": 0.0028949474211114943, - "loss": 0.9378, - "step": 1558 - }, - { - "epoch": 0.11991385278055534, - "learning_rate": 0.002894814121305646, - "loss": 1.0979, - "step": 1559 - }, - { - "epoch": 0.11999076994077379, - "learning_rate": 0.0028946807400551475, - "loss": 1.1207, - "step": 1560 - }, - { - "epoch": 0.12006768710099223, - "learning_rate": 0.0028945472773677875, - "loss": 1.6021, - "step": 1561 - }, - { - "epoch": 0.12014460426121068, - "learning_rate": 0.0028944137332513583, - "loss": 1.1666, - "step": 1562 - }, - { - "epoch": 0.12022152142142913, - "learning_rate": 0.0028942801077136574, - "loss": 1.1044, - "step": 1563 - }, - { - "epoch": 0.12029843858164757, - "learning_rate": 0.0028941464007624885, - "loss": 1.0011, - "step": 1564 - }, - { - "epoch": 0.12037535574186602, - "learning_rate": 0.002894012612405659, - "loss": 1.0823, - "step": 1565 - }, - { - "epoch": 0.12045227290208446, - "learning_rate": 0.0028938787426509792, - "loss": 1.0855, - "step": 1566 - }, - { - "epoch": 0.1205291900623029, - "learning_rate": 0.002893744791506267, - "loss": 1.0098, - "step": 1567 - }, - { - "epoch": 0.12060610722252134, - "learning_rate": 0.0028936107589793447, - "loss": 1.3746, - "step": 1568 - }, - { - "epoch": 0.12068302438273978, - "learning_rate": 0.0028934766450780373, - "loss": 1.0474, - "step": 1569 - }, - { - "epoch": 0.12075994154295823, - "learning_rate": 0.0028933424498101762, - "loss": 0.7657, - "step": 1570 - }, - { - "epoch": 0.12083685870317667, - "learning_rate": 0.0028932081731835976, - "loss": 1.2252, - "step": 1571 - }, - { - "epoch": 0.12091377586339512, - "learning_rate": 0.0028930738152061417, - "loss": 1.1658, - "step": 1572 - }, - { - "epoch": 0.12099069302361357, - "learning_rate": 0.002892939375885654, - "loss": 1.1699, - "step": 1573 - }, - { - "epoch": 0.12106761018383201, - "learning_rate": 0.002892804855229984, - "loss": 1.1706, - "step": 1574 - }, - { - "epoch": 0.12114452734405046, - "learning_rate": 0.0028926702532469876, - "loss": 1.1747, - "step": 1575 - }, - { - "epoch": 0.1212214445042689, - "learning_rate": 0.0028925355699445233, - "loss": 1.2883, - "step": 1576 - }, - { - "epoch": 0.12129836166448735, - "learning_rate": 0.002892400805330456, - "loss": 1.0269, - "step": 1577 - }, - { - "epoch": 0.12137527882470579, - "learning_rate": 0.0028922659594126546, - "loss": 1.1479, - "step": 1578 - }, - { - "epoch": 0.12145219598492424, - "learning_rate": 0.002892131032198992, - "loss": 1.1261, - "step": 1579 - }, - { - "epoch": 0.12152911314514268, - "learning_rate": 0.0028919960236973487, - "loss": 1.2145, - "step": 1580 - }, - { - "epoch": 0.12160603030536113, - "learning_rate": 0.0028918609339156067, - "loss": 1.2964, - "step": 1581 - }, - { - "epoch": 0.12168294746557957, - "learning_rate": 0.002891725762861654, - "loss": 0.8897, - "step": 1582 - }, - { - "epoch": 0.12175986462579802, - "learning_rate": 0.0028915905105433835, - "loss": 1.0423, - "step": 1583 - }, - { - "epoch": 0.12183678178601647, - "learning_rate": 0.0028914551769686925, - "loss": 1.2013, - "step": 1584 - }, - { - "epoch": 0.12191369894623491, - "learning_rate": 0.0028913197621454846, - "loss": 1.2252, - "step": 1585 - }, - { - "epoch": 0.12199061610645336, - "learning_rate": 0.0028911842660816647, - "loss": 1.0297, - "step": 1586 - }, - { - "epoch": 0.12206753326667179, - "learning_rate": 0.0028910486887851463, - "loss": 1.1314, - "step": 1587 - }, - { - "epoch": 0.12214445042689023, - "learning_rate": 0.002890913030263845, - "loss": 1.1527, - "step": 1588 - }, - { - "epoch": 0.12222136758710868, - "learning_rate": 0.0028907772905256823, - "loss": 1.4608, - "step": 1589 - }, - { - "epoch": 0.12229828474732712, - "learning_rate": 0.0028906414695785846, - "loss": 1.0999, - "step": 1590 - }, - { - "epoch": 0.12237520190754557, - "learning_rate": 0.0028905055674304813, - "loss": 1.2517, - "step": 1591 - }, - { - "epoch": 0.12245211906776401, - "learning_rate": 0.0028903695840893093, - "loss": 1.0303, - "step": 1592 - }, - { - "epoch": 0.12252903622798246, - "learning_rate": 0.0028902335195630084, - "loss": 1.153, - "step": 1593 - }, - { - "epoch": 0.1226059533882009, - "learning_rate": 0.002890097373859523, - "loss": 1.199, - "step": 1594 - }, - { - "epoch": 0.12268287054841935, - "learning_rate": 0.0028899611469868037, - "loss": 1.2185, - "step": 1595 - }, - { - "epoch": 0.1227597877086378, - "learning_rate": 0.002889824838952804, - "loss": 0.7877, - "step": 1596 - }, - { - "epoch": 0.12283670486885624, - "learning_rate": 0.002889688449765484, - "loss": 1.426, - "step": 1597 - }, - { - "epoch": 0.12291362202907469, - "learning_rate": 0.0028895519794328067, - "loss": 1.2392, - "step": 1598 - }, - { - "epoch": 0.12299053918929313, - "learning_rate": 0.0028894154279627414, - "loss": 1.1887, - "step": 1599 - }, - { - "epoch": 0.12306745634951158, - "learning_rate": 0.0028892787953632613, - "loss": 0.842, - "step": 1600 - }, - { - "epoch": 0.12314437350973002, - "learning_rate": 0.0028891420816423443, - "loss": 1.1089, - "step": 1601 - }, - { - "epoch": 0.12322129066994847, - "learning_rate": 0.0028890052868079737, - "loss": 1.207, - "step": 1602 - }, - { - "epoch": 0.12329820783016691, - "learning_rate": 0.0028888684108681366, - "loss": 1.1223, - "step": 1603 - }, - { - "epoch": 0.12337512499038536, - "learning_rate": 0.0028887314538308256, - "loss": 1.1937, - "step": 1604 - }, - { - "epoch": 0.1234520421506038, - "learning_rate": 0.0028885944157040376, - "loss": 1.0141, - "step": 1605 - }, - { - "epoch": 0.12352895931082225, - "learning_rate": 0.002888457296495774, - "loss": 1.2021, - "step": 1606 - }, - { - "epoch": 0.1236058764710407, - "learning_rate": 0.0028883200962140426, - "loss": 1.2874, - "step": 1607 - }, - { - "epoch": 0.12368279363125913, - "learning_rate": 0.0028881828148668537, - "loss": 1.3708, - "step": 1608 - }, - { - "epoch": 0.12375971079147757, - "learning_rate": 0.0028880454524622236, - "loss": 1.1228, - "step": 1609 - }, - { - "epoch": 0.12383662795169602, - "learning_rate": 0.002887908009008173, - "loss": 0.7563, - "step": 1610 - }, - { - "epoch": 0.12391354511191446, - "learning_rate": 0.0028877704845127273, - "loss": 1.0775, - "step": 1611 - }, - { - "epoch": 0.12399046227213291, - "learning_rate": 0.0028876328789839167, - "loss": 0.855, - "step": 1612 - }, - { - "epoch": 0.12406737943235135, - "learning_rate": 0.002887495192429776, - "loss": 1.0989, - "step": 1613 - }, - { - "epoch": 0.1241442965925698, - "learning_rate": 0.002887357424858345, - "loss": 1.3884, - "step": 1614 - }, - { - "epoch": 0.12422121375278825, - "learning_rate": 0.0028872195762776683, - "loss": 0.7945, - "step": 1615 - }, - { - "epoch": 0.12429813091300669, - "learning_rate": 0.0028870816466957942, - "loss": 1.2136, - "step": 1616 - }, - { - "epoch": 0.12437504807322514, - "learning_rate": 0.0028869436361207775, - "loss": 1.1295, - "step": 1617 - }, - { - "epoch": 0.12445196523344358, - "learning_rate": 0.0028868055445606766, - "loss": 1.2467, - "step": 1618 - }, - { - "epoch": 0.12452888239366203, - "learning_rate": 0.0028866673720235546, - "loss": 1.2908, - "step": 1619 - }, - { - "epoch": 0.12460579955388047, - "learning_rate": 0.0028865291185174798, - "loss": 1.2547, - "step": 1620 - }, - { - "epoch": 0.12468271671409892, - "learning_rate": 0.0028863907840505246, - "loss": 1.543, - "step": 1621 - }, - { - "epoch": 0.12475963387431736, - "learning_rate": 0.0028862523686307663, - "loss": 1.0831, - "step": 1622 - }, - { - "epoch": 0.12483655103453581, - "learning_rate": 0.002886113872266288, - "loss": 1.2436, - "step": 1623 - }, - { - "epoch": 0.12491346819475425, - "learning_rate": 0.0028859752949651762, - "loss": 1.5413, - "step": 1624 - }, - { - "epoch": 0.1249903853549727, - "learning_rate": 0.002885836636735522, - "loss": 1.1666, - "step": 1625 - }, - { - "epoch": 0.12506730251519113, - "learning_rate": 0.0028856978975854222, - "loss": 1.1694, - "step": 1626 - }, - { - "epoch": 0.1251442196754096, - "learning_rate": 0.0028855590775229783, - "loss": 0.9153, - "step": 1627 - }, - { - "epoch": 0.12522113683562802, - "learning_rate": 0.002885420176556296, - "loss": 1.4449, - "step": 1628 - }, - { - "epoch": 0.12529805399584648, - "learning_rate": 0.0028852811946934857, - "loss": 1.1671, - "step": 1629 - }, - { - "epoch": 0.1253749711560649, - "learning_rate": 0.002885142131942663, - "loss": 1.253, - "step": 1630 - }, - { - "epoch": 0.12545188831628337, - "learning_rate": 0.0028850029883119473, - "loss": 1.1113, - "step": 1631 - }, - { - "epoch": 0.1255288054765018, - "learning_rate": 0.0028848637638094637, - "loss": 0.9715, - "step": 1632 - }, - { - "epoch": 0.12560572263672026, - "learning_rate": 0.0028847244584433414, - "loss": 0.9045, - "step": 1633 - }, - { - "epoch": 0.1256826397969387, - "learning_rate": 0.0028845850722217154, - "loss": 1.2711, - "step": 1634 - }, - { - "epoch": 0.12575955695715715, - "learning_rate": 0.0028844456051527237, - "loss": 0.9732, - "step": 1635 - }, - { - "epoch": 0.12583647411737559, - "learning_rate": 0.0028843060572445105, - "loss": 1.3341, - "step": 1636 - }, - { - "epoch": 0.12591339127759404, - "learning_rate": 0.0028841664285052236, - "loss": 0.8971, - "step": 1637 - }, - { - "epoch": 0.12599030843781248, - "learning_rate": 0.0028840267189430167, - "loss": 1.0697, - "step": 1638 - }, - { - "epoch": 0.1260672255980309, - "learning_rate": 0.002883886928566047, - "loss": 0.9462, - "step": 1639 - }, - { - "epoch": 0.12614414275824937, - "learning_rate": 0.0028837470573824775, - "loss": 0.4884, - "step": 1640 - }, - { - "epoch": 0.1262210599184678, - "learning_rate": 0.0028836071054004755, - "loss": 1.8954, - "step": 1641 - }, - { - "epoch": 0.12629797707868626, - "learning_rate": 0.0028834670726282123, - "loss": 0.9863, - "step": 1642 - }, - { - "epoch": 0.1263748942389047, - "learning_rate": 0.002883326959073865, - "loss": 1.015, - "step": 1643 - }, - { - "epoch": 0.12645181139912315, - "learning_rate": 0.0028831867647456143, - "loss": 1.1744, - "step": 1644 - }, - { - "epoch": 0.12652872855934158, - "learning_rate": 0.002883046489651648, - "loss": 0.8641, - "step": 1645 - }, - { - "epoch": 0.12660564571956004, - "learning_rate": 0.0028829061338001547, - "loss": 1.0122, - "step": 1646 - }, - { - "epoch": 0.12668256287977847, - "learning_rate": 0.0028827656971993315, - "loss": 1.2379, - "step": 1647 - }, - { - "epoch": 0.12675948003999693, - "learning_rate": 0.0028826251798573783, - "loss": 0.9072, - "step": 1648 - }, - { - "epoch": 0.12683639720021536, - "learning_rate": 0.0028824845817825005, - "loss": 0.8084, - "step": 1649 - }, - { - "epoch": 0.12691331436043382, - "learning_rate": 0.002882343902982906, - "loss": 1.1727, - "step": 1650 - }, - { - "epoch": 0.12699023152065225, - "learning_rate": 0.0028822031434668108, - "loss": 1.1908, - "step": 1651 - }, - { - "epoch": 0.1270671486808707, - "learning_rate": 0.0028820623032424337, - "loss": 1.2055, - "step": 1652 - }, - { - "epoch": 0.12714406584108914, - "learning_rate": 0.0028819213823179984, - "loss": 1.3214, - "step": 1653 - }, - { - "epoch": 0.1272209830013076, - "learning_rate": 0.002881780380701733, - "loss": 1.1241, - "step": 1654 - }, - { - "epoch": 0.12729790016152603, - "learning_rate": 0.0028816392984018714, - "loss": 0.9759, - "step": 1655 - }, - { - "epoch": 0.1273748173217445, - "learning_rate": 0.0028814981354266514, - "loss": 1.2301, - "step": 1656 - }, - { - "epoch": 0.12745173448196292, - "learning_rate": 0.0028813568917843155, - "loss": 1.2305, - "step": 1657 - }, - { - "epoch": 0.12752865164218138, - "learning_rate": 0.0028812155674831103, - "loss": 0.9721, - "step": 1658 - }, - { - "epoch": 0.12760556880239982, - "learning_rate": 0.0028810741625312896, - "loss": 1.0428, - "step": 1659 - }, - { - "epoch": 0.12768248596261825, - "learning_rate": 0.002880932676937109, - "loss": 1.1266, - "step": 1660 - }, - { - "epoch": 0.1277594031228367, - "learning_rate": 0.00288079111070883, - "loss": 1.5436, - "step": 1661 - }, - { - "epoch": 0.12783632028305514, - "learning_rate": 0.002880649463854719, - "loss": 1.0405, - "step": 1662 - }, - { - "epoch": 0.1279132374432736, - "learning_rate": 0.002880507736383047, - "loss": 1.3491, - "step": 1663 - }, - { - "epoch": 0.12799015460349203, - "learning_rate": 0.0028803659283020895, - "loss": 1.2567, - "step": 1664 - }, - { - "epoch": 0.1280670717637105, - "learning_rate": 0.0028802240396201273, - "loss": 1.1244, - "step": 1665 - }, - { - "epoch": 0.12814398892392892, - "learning_rate": 0.0028800820703454447, - "loss": 1.061, - "step": 1666 - }, - { - "epoch": 0.12822090608414738, - "learning_rate": 0.0028799400204863315, - "loss": 1.0545, - "step": 1667 - }, - { - "epoch": 0.1282978232443658, - "learning_rate": 0.0028797978900510822, - "loss": 0.9163, - "step": 1668 - }, - { - "epoch": 0.12837474040458427, - "learning_rate": 0.0028796556790479967, - "loss": 1.3919, - "step": 1669 - }, - { - "epoch": 0.1284516575648027, - "learning_rate": 0.0028795133874853778, - "loss": 1.1179, - "step": 1670 - }, - { - "epoch": 0.12852857472502116, - "learning_rate": 0.0028793710153715347, - "loss": 1.1639, - "step": 1671 - }, - { - "epoch": 0.1286054918852396, - "learning_rate": 0.0028792285627147808, - "loss": 1.3275, - "step": 1672 - }, - { - "epoch": 0.12868240904545805, - "learning_rate": 0.002879086029523433, - "loss": 1.1799, - "step": 1673 - }, - { - "epoch": 0.12875932620567648, - "learning_rate": 0.0028789434158058153, - "loss": 1.23, - "step": 1674 - }, - { - "epoch": 0.12883624336589494, - "learning_rate": 0.0028788007215702544, - "loss": 1.0948, - "step": 1675 - }, - { - "epoch": 0.12891316052611337, - "learning_rate": 0.002878657946825082, - "loss": 1.1994, - "step": 1676 - }, - { - "epoch": 0.12899007768633183, - "learning_rate": 0.0028785150915786356, - "loss": 1.4292, - "step": 1677 - }, - { - "epoch": 0.12906699484655026, - "learning_rate": 0.0028783721558392566, - "loss": 1.3145, - "step": 1678 - }, - { - "epoch": 0.1291439120067687, - "learning_rate": 0.00287822913961529, - "loss": 1.2676, - "step": 1679 - }, - { - "epoch": 0.12922082916698716, - "learning_rate": 0.0028780860429150883, - "loss": 1.4908, - "step": 1680 - }, - { - "epoch": 0.1292977463272056, - "learning_rate": 0.0028779428657470063, - "loss": 1.2185, - "step": 1681 - }, - { - "epoch": 0.12937466348742405, - "learning_rate": 0.002877799608119404, - "loss": 1.4401, - "step": 1682 - }, - { - "epoch": 0.12945158064764248, - "learning_rate": 0.002877656270040647, - "loss": 1.0149, - "step": 1683 - }, - { - "epoch": 0.12952849780786094, - "learning_rate": 0.0028775128515191053, - "loss": 0.9654, - "step": 1684 - }, - { - "epoch": 0.12960541496807937, - "learning_rate": 0.0028773693525631513, - "loss": 1.1157, - "step": 1685 - }, - { - "epoch": 0.12968233212829783, - "learning_rate": 0.002877225773181166, - "loss": 0.9185, - "step": 1686 - }, - { - "epoch": 0.12975924928851626, - "learning_rate": 0.0028770821133815325, - "loss": 1.1889, - "step": 1687 - }, - { - "epoch": 0.12983616644873472, - "learning_rate": 0.002876938373172639, - "loss": 1.491, - "step": 1688 - }, - { - "epoch": 0.12991308360895315, - "learning_rate": 0.002876794552562879, - "loss": 1.3061, - "step": 1689 - }, - { - "epoch": 0.1299900007691716, - "learning_rate": 0.00287665065156065, - "loss": 1.2791, - "step": 1690 - }, - { - "epoch": 0.13006691792939004, - "learning_rate": 0.0028765066701743553, - "loss": 1.3298, - "step": 1691 - }, - { - "epoch": 0.1301438350896085, - "learning_rate": 0.002876362608412401, - "loss": 1.3656, - "step": 1692 - }, - { - "epoch": 0.13022075224982693, - "learning_rate": 0.0028762184662832, - "loss": 1.0401, - "step": 1693 - }, - { - "epoch": 0.1302976694100454, - "learning_rate": 0.0028760742437951682, - "loss": 1.1322, - "step": 1694 - }, - { - "epoch": 0.13037458657026382, - "learning_rate": 0.002875929940956727, - "loss": 1.0038, - "step": 1695 - }, - { - "epoch": 0.13045150373048228, - "learning_rate": 0.002875785557776303, - "loss": 0.963, - "step": 1696 - }, - { - "epoch": 0.1305284208907007, - "learning_rate": 0.0028756410942623266, - "loss": 0.9686, - "step": 1697 - }, - { - "epoch": 0.13060533805091917, - "learning_rate": 0.002875496550423233, - "loss": 1.0261, - "step": 1698 - }, - { - "epoch": 0.1306822552111376, - "learning_rate": 0.0028753519262674616, - "loss": 1.0338, - "step": 1699 - }, - { - "epoch": 0.13075917237135604, - "learning_rate": 0.0028752072218034587, - "loss": 1.2176, - "step": 1700 - }, - { - "epoch": 0.1308360895315745, - "learning_rate": 0.002875062437039672, - "loss": 1.1064, - "step": 1701 - }, - { - "epoch": 0.13091300669179293, - "learning_rate": 0.0028749175719845576, - "loss": 1.1406, - "step": 1702 - }, - { - "epoch": 0.1309899238520114, - "learning_rate": 0.002874772626646573, - "loss": 1.3195, - "step": 1703 - }, - { - "epoch": 0.13106684101222982, - "learning_rate": 0.0028746276010341815, - "loss": 0.963, - "step": 1704 - }, - { - "epoch": 0.13114375817244828, - "learning_rate": 0.0028744824951558523, - "loss": 0.9756, - "step": 1705 - }, - { - "epoch": 0.1312206753326667, - "learning_rate": 0.002874337309020057, - "loss": 0.9419, - "step": 1706 - }, - { - "epoch": 0.13129759249288517, - "learning_rate": 0.002874192042635275, - "loss": 0.9682, - "step": 1707 - }, - { - "epoch": 0.1313745096531036, - "learning_rate": 0.0028740466960099865, - "loss": 1.1678, - "step": 1708 - }, - { - "epoch": 0.13145142681332206, - "learning_rate": 0.00287390126915268, - "loss": 1.7509, - "step": 1709 - }, - { - "epoch": 0.1315283439735405, - "learning_rate": 0.0028737557620718466, - "loss": 0.9172, - "step": 1710 - }, - { - "epoch": 0.13160526113375895, - "learning_rate": 0.0028736101747759823, - "loss": 1.3299, - "step": 1711 - }, - { - "epoch": 0.13168217829397738, - "learning_rate": 0.0028734645072735886, - "loss": 1.1382, - "step": 1712 - }, - { - "epoch": 0.13175909545419584, - "learning_rate": 0.00287331875957317, - "loss": 1.2588, - "step": 1713 - }, - { - "epoch": 0.13183601261441427, - "learning_rate": 0.002873172931683239, - "loss": 0.9852, - "step": 1714 - }, - { - "epoch": 0.13191292977463273, - "learning_rate": 0.002873027023612309, - "loss": 1.3062, - "step": 1715 - }, - { - "epoch": 0.13198984693485116, - "learning_rate": 0.0028728810353689, - "loss": 0.9516, - "step": 1716 - }, - { - "epoch": 0.13206676409506962, - "learning_rate": 0.0028727349669615365, - "loss": 1.0101, - "step": 1717 - }, - { - "epoch": 0.13214368125528805, - "learning_rate": 0.0028725888183987478, - "loss": 0.9942, - "step": 1718 - }, - { - "epoch": 0.1322205984155065, - "learning_rate": 0.0028724425896890678, - "loss": 1.1523, - "step": 1719 - }, - { - "epoch": 0.13229751557572494, - "learning_rate": 0.0028722962808410343, - "loss": 1.1402, - "step": 1720 - }, - { - "epoch": 0.13237443273594338, - "learning_rate": 0.002872149891863191, - "loss": 1.0075, - "step": 1721 - }, - { - "epoch": 0.13245134989616184, - "learning_rate": 0.002872003422764085, - "loss": 1.3004, - "step": 1722 - }, - { - "epoch": 0.13252826705638027, - "learning_rate": 0.00287185687355227, - "loss": 1.0971, - "step": 1723 - }, - { - "epoch": 0.13260518421659873, - "learning_rate": 0.002871710244236302, - "loss": 1.4052, - "step": 1724 - }, - { - "epoch": 0.13268210137681716, - "learning_rate": 0.0028715635348247437, - "loss": 1.1598, - "step": 1725 - }, - { - "epoch": 0.13275901853703562, - "learning_rate": 0.0028714167453261605, - "loss": 1.0552, - "step": 1726 - }, - { - "epoch": 0.13283593569725405, - "learning_rate": 0.0028712698757491248, - "loss": 1.0522, - "step": 1727 - }, - { - "epoch": 0.1329128528574725, - "learning_rate": 0.002871122926102212, - "loss": 1.2255, - "step": 1728 - }, - { - "epoch": 0.13298977001769094, - "learning_rate": 0.0028709758963940024, - "loss": 0.9807, - "step": 1729 - }, - { - "epoch": 0.1330666871779094, - "learning_rate": 0.0028708287866330813, - "loss": 1.1592, - "step": 1730 - }, - { - "epoch": 0.13314360433812783, - "learning_rate": 0.0028706815968280387, - "loss": 1.0734, - "step": 1731 - }, - { - "epoch": 0.1332205214983463, - "learning_rate": 0.0028705343269874693, - "loss": 1.2188, - "step": 1732 - }, - { - "epoch": 0.13329743865856472, - "learning_rate": 0.002870386977119972, - "loss": 1.4174, - "step": 1733 - }, - { - "epoch": 0.13337435581878318, - "learning_rate": 0.0028702395472341513, - "loss": 1.3331, - "step": 1734 - }, - { - "epoch": 0.1334512729790016, - "learning_rate": 0.0028700920373386156, - "loss": 1.1743, - "step": 1735 - }, - { - "epoch": 0.13352819013922007, - "learning_rate": 0.0028699444474419773, - "loss": 1.0421, - "step": 1736 - }, - { - "epoch": 0.1336051072994385, - "learning_rate": 0.0028697967775528554, - "loss": 1.0958, - "step": 1737 - }, - { - "epoch": 0.13368202445965696, - "learning_rate": 0.0028696490276798716, - "loss": 1.1462, - "step": 1738 - }, - { - "epoch": 0.1337589416198754, - "learning_rate": 0.0028695011978316543, - "loss": 1.2942, - "step": 1739 - }, - { - "epoch": 0.13383585878009385, - "learning_rate": 0.0028693532880168346, - "loss": 1.1019, - "step": 1740 - }, - { - "epoch": 0.13391277594031228, - "learning_rate": 0.0028692052982440494, - "loss": 1.1589, - "step": 1741 - }, - { - "epoch": 0.13398969310053072, - "learning_rate": 0.0028690572285219396, - "loss": 1.3864, - "step": 1742 - }, - { - "epoch": 0.13406661026074917, - "learning_rate": 0.0028689090788591516, - "loss": 1.0047, - "step": 1743 - }, - { - "epoch": 0.1341435274209676, - "learning_rate": 0.0028687608492643357, - "loss": 1.1152, - "step": 1744 - }, - { - "epoch": 0.13422044458118607, - "learning_rate": 0.0028686125397461475, - "loss": 1.1046, - "step": 1745 - }, - { - "epoch": 0.1342973617414045, - "learning_rate": 0.002868464150313247, - "loss": 1.3783, - "step": 1746 - }, - { - "epoch": 0.13437427890162296, - "learning_rate": 0.0028683156809742975, - "loss": 1.0825, - "step": 1747 - }, - { - "epoch": 0.1344511960618414, - "learning_rate": 0.0028681671317379704, - "loss": 1.1973, - "step": 1748 - }, - { - "epoch": 0.13452811322205985, - "learning_rate": 0.002868018502612938, - "loss": 0.8653, - "step": 1749 - }, - { - "epoch": 0.13460503038227828, - "learning_rate": 0.0028678697936078803, - "loss": 1.3479, - "step": 1750 - }, - { - "epoch": 0.13468194754249674, - "learning_rate": 0.0028677210047314793, - "loss": 0.9689, - "step": 1751 - }, - { - "epoch": 0.13475886470271517, - "learning_rate": 0.0028675721359924236, - "loss": 1.2804, - "step": 1752 - }, - { - "epoch": 0.13483578186293363, - "learning_rate": 0.002867423187399405, - "loss": 1.017, - "step": 1753 - }, - { - "epoch": 0.13491269902315206, - "learning_rate": 0.0028672741589611224, - "loss": 0.9328, - "step": 1754 - }, - { - "epoch": 0.13498961618337052, - "learning_rate": 0.0028671250506862762, - "loss": 1.1762, - "step": 1755 - }, - { - "epoch": 0.13506653334358895, - "learning_rate": 0.002866975862583574, - "loss": 1.1972, - "step": 1756 - }, - { - "epoch": 0.1351434505038074, - "learning_rate": 0.0028668265946617255, - "loss": 1.0895, - "step": 1757 - }, - { - "epoch": 0.13522036766402584, - "learning_rate": 0.0028666772469294484, - "loss": 1.2591, - "step": 1758 - }, - { - "epoch": 0.1352972848242443, - "learning_rate": 0.0028665278193954623, - "loss": 1.5245, - "step": 1759 - }, - { - "epoch": 0.13537420198446273, - "learning_rate": 0.0028663783120684927, - "loss": 1.2559, - "step": 1760 - }, - { - "epoch": 0.13545111914468116, - "learning_rate": 0.0028662287249572697, - "loss": 1.2404, - "step": 1761 - }, - { - "epoch": 0.13552803630489962, - "learning_rate": 0.0028660790580705277, - "loss": 0.9549, - "step": 1762 - }, - { - "epoch": 0.13560495346511806, - "learning_rate": 0.002865929311417005, - "loss": 0.9313, - "step": 1763 - }, - { - "epoch": 0.13568187062533651, - "learning_rate": 0.002865779485005447, - "loss": 1.2338, - "step": 1764 - }, - { - "epoch": 0.13575878778555495, - "learning_rate": 0.0028656295788446013, - "loss": 1.2663, - "step": 1765 - }, - { - "epoch": 0.1358357049457734, - "learning_rate": 0.002865479592943221, - "loss": 1.0019, - "step": 1766 - }, - { - "epoch": 0.13591262210599184, - "learning_rate": 0.0028653295273100648, - "loss": 1.5426, - "step": 1767 - }, - { - "epoch": 0.1359895392662103, - "learning_rate": 0.0028651793819538937, - "loss": 1.3661, - "step": 1768 - }, - { - "epoch": 0.13606645642642873, - "learning_rate": 0.0028650291568834765, - "loss": 1.0231, - "step": 1769 - }, - { - "epoch": 0.1361433735866472, - "learning_rate": 0.002864878852107584, - "loss": 1.4282, - "step": 1770 - }, - { - "epoch": 0.13622029074686562, - "learning_rate": 0.0028647284676349928, - "loss": 1.0069, - "step": 1771 - }, - { - "epoch": 0.13629720790708408, - "learning_rate": 0.002864578003474484, - "loss": 1.207, - "step": 1772 - }, - { - "epoch": 0.1363741250673025, - "learning_rate": 0.0028644274596348438, - "loss": 1.0187, - "step": 1773 - }, - { - "epoch": 0.13645104222752097, - "learning_rate": 0.0028642768361248623, - "loss": 1.1986, - "step": 1774 - }, - { - "epoch": 0.1365279593877394, - "learning_rate": 0.0028641261329533344, - "loss": 1.0283, - "step": 1775 - }, - { - "epoch": 0.13660487654795786, - "learning_rate": 0.00286397535012906, - "loss": 1.2079, - "step": 1776 - }, - { - "epoch": 0.1366817937081763, - "learning_rate": 0.0028638244876608428, - "loss": 0.8502, - "step": 1777 - }, - { - "epoch": 0.13675871086839475, - "learning_rate": 0.002863673545557493, - "loss": 0.8472, - "step": 1778 - }, - { - "epoch": 0.13683562802861318, - "learning_rate": 0.0028635225238278234, - "loss": 1.2772, - "step": 1779 - }, - { - "epoch": 0.13691254518883164, - "learning_rate": 0.002863371422480653, - "loss": 1.1688, - "step": 1780 - }, - { - "epoch": 0.13698946234905007, - "learning_rate": 0.002863220241524804, - "loss": 0.7301, - "step": 1781 - }, - { - "epoch": 0.1370663795092685, - "learning_rate": 0.0028630689809691044, - "loss": 1.0963, - "step": 1782 - }, - { - "epoch": 0.13714329666948696, - "learning_rate": 0.002862917640822387, - "loss": 0.9698, - "step": 1783 - }, - { - "epoch": 0.1372202138297054, - "learning_rate": 0.0028627662210934874, - "loss": 1.1856, - "step": 1784 - }, - { - "epoch": 0.13729713098992385, - "learning_rate": 0.0028626147217912477, - "loss": 0.9043, - "step": 1785 - }, - { - "epoch": 0.13737404815014229, - "learning_rate": 0.002862463142924515, - "loss": 1.1654, - "step": 1786 - }, - { - "epoch": 0.13745096531036075, - "learning_rate": 0.0028623114845021396, - "loss": 1.2161, - "step": 1787 - }, - { - "epoch": 0.13752788247057918, - "learning_rate": 0.0028621597465329765, - "loss": 1.1165, - "step": 1788 - }, - { - "epoch": 0.13760479963079764, - "learning_rate": 0.0028620079290258863, - "loss": 1.0131, - "step": 1789 - }, - { - "epoch": 0.13768171679101607, - "learning_rate": 0.002861856031989733, - "loss": 0.8552, - "step": 1790 - }, - { - "epoch": 0.13775863395123453, - "learning_rate": 0.0028617040554333876, - "loss": 1.0169, - "step": 1791 - }, - { - "epoch": 0.13783555111145296, - "learning_rate": 0.0028615519993657234, - "loss": 1.3486, - "step": 1792 - }, - { - "epoch": 0.13791246827167142, - "learning_rate": 0.002861399863795618, - "loss": 0.9617, - "step": 1793 - }, - { - "epoch": 0.13798938543188985, - "learning_rate": 0.002861247648731957, - "loss": 0.9184, - "step": 1794 - }, - { - "epoch": 0.1380663025921083, - "learning_rate": 0.0028610953541836257, - "loss": 1.1691, - "step": 1795 - }, - { - "epoch": 0.13814321975232674, - "learning_rate": 0.002860942980159519, - "loss": 1.2551, - "step": 1796 - }, - { - "epoch": 0.1382201369125452, - "learning_rate": 0.0028607905266685336, - "loss": 1.2939, - "step": 1797 - }, - { - "epoch": 0.13829705407276363, - "learning_rate": 0.0028606379937195707, - "loss": 1.2948, - "step": 1798 - }, - { - "epoch": 0.1383739712329821, - "learning_rate": 0.0028604853813215376, - "loss": 0.8462, - "step": 1799 - }, - { - "epoch": 0.13845088839320052, - "learning_rate": 0.002860332689483345, - "loss": 1.3494, - "step": 1800 - }, - { - "epoch": 0.13852780555341898, - "learning_rate": 0.0028601799182139086, - "loss": 0.9626, - "step": 1801 - }, - { - "epoch": 0.1386047227136374, - "learning_rate": 0.00286002706752215, - "loss": 1.0662, - "step": 1802 - }, - { - "epoch": 0.13868163987385584, - "learning_rate": 0.002859874137416993, - "loss": 1.2386, - "step": 1803 - }, - { - "epoch": 0.1387585570340743, - "learning_rate": 0.002859721127907368, - "loss": 1.5507, - "step": 1804 - }, - { - "epoch": 0.13883547419429274, - "learning_rate": 0.0028595680390022093, - "loss": 1.2978, - "step": 1805 - }, - { - "epoch": 0.1389123913545112, - "learning_rate": 0.002859414870710456, - "loss": 1.1999, - "step": 1806 - }, - { - "epoch": 0.13898930851472963, - "learning_rate": 0.0028592616230410516, - "loss": 1.1199, - "step": 1807 - }, - { - "epoch": 0.13906622567494809, - "learning_rate": 0.0028591082960029443, - "loss": 0.9053, - "step": 1808 - }, - { - "epoch": 0.13914314283516652, - "learning_rate": 0.0028589548896050874, - "loss": 1.2272, - "step": 1809 - }, - { - "epoch": 0.13922005999538498, - "learning_rate": 0.002858801403856438, - "loss": 1.04, - "step": 1810 - }, - { - "epoch": 0.1392969771556034, - "learning_rate": 0.0028586478387659588, - "loss": 1.0117, - "step": 1811 - }, - { - "epoch": 0.13937389431582187, - "learning_rate": 0.0028584941943426163, - "loss": 1.0784, - "step": 1812 - }, - { - "epoch": 0.1394508114760403, - "learning_rate": 0.002858340470595382, - "loss": 0.914, - "step": 1813 - }, - { - "epoch": 0.13952772863625876, - "learning_rate": 0.002858186667533232, - "loss": 1.0431, - "step": 1814 - }, - { - "epoch": 0.1396046457964772, - "learning_rate": 0.0028580327851651464, - "loss": 1.0706, - "step": 1815 - }, - { - "epoch": 0.13968156295669565, - "learning_rate": 0.0028578788235001117, - "loss": 1.2811, - "step": 1816 - }, - { - "epoch": 0.13975848011691408, - "learning_rate": 0.0028577247825471177, - "loss": 1.2036, - "step": 1817 - }, - { - "epoch": 0.13983539727713254, - "learning_rate": 0.002857570662315158, - "loss": 1.3338, - "step": 1818 - }, - { - "epoch": 0.13991231443735097, - "learning_rate": 0.002857416462813233, - "loss": 1.1993, - "step": 1819 - }, - { - "epoch": 0.13998923159756943, - "learning_rate": 0.0028572621840503454, - "loss": 1.1732, - "step": 1820 - }, - { - "epoch": 0.14006614875778786, - "learning_rate": 0.002857107826035505, - "loss": 1.1578, - "step": 1821 - }, - { - "epoch": 0.14014306591800632, - "learning_rate": 0.0028569533887777237, - "loss": 1.3077, - "step": 1822 - }, - { - "epoch": 0.14021998307822475, - "learning_rate": 0.0028567988722860206, - "loss": 0.9864, - "step": 1823 - }, - { - "epoch": 0.14029690023844318, - "learning_rate": 0.002856644276569417, - "loss": 1.3024, - "step": 1824 - }, - { - "epoch": 0.14037381739866164, - "learning_rate": 0.00285648960163694, - "loss": 1.3467, - "step": 1825 - }, - { - "epoch": 0.14045073455888007, - "learning_rate": 0.002856334847497622, - "loss": 1.1539, - "step": 1826 - }, - { - "epoch": 0.14052765171909853, - "learning_rate": 0.002856180014160498, - "loss": 1.0856, - "step": 1827 - }, - { - "epoch": 0.14060456887931697, - "learning_rate": 0.0028560251016346103, - "loss": 1.2987, - "step": 1828 - }, - { - "epoch": 0.14068148603953542, - "learning_rate": 0.0028558701099290033, - "loss": 1.3895, - "step": 1829 - }, - { - "epoch": 0.14075840319975386, - "learning_rate": 0.0028557150390527276, - "loss": 1.4298, - "step": 1830 - }, - { - "epoch": 0.14083532035997232, - "learning_rate": 0.002855559889014838, - "loss": 1.4291, - "step": 1831 - }, - { - "epoch": 0.14091223752019075, - "learning_rate": 0.0028554046598243937, - "loss": 1.0573, - "step": 1832 - }, - { - "epoch": 0.1409891546804092, - "learning_rate": 0.0028552493514904585, - "loss": 1.2162, - "step": 1833 - }, - { - "epoch": 0.14106607184062764, - "learning_rate": 0.0028550939640221017, - "loss": 0.8612, - "step": 1834 - }, - { - "epoch": 0.1411429890008461, - "learning_rate": 0.002854938497428396, - "loss": 0.9214, - "step": 1835 - }, - { - "epoch": 0.14121990616106453, - "learning_rate": 0.002854782951718419, - "loss": 1.0045, - "step": 1836 - }, - { - "epoch": 0.141296823321283, - "learning_rate": 0.0028546273269012537, - "loss": 1.2892, - "step": 1837 - }, - { - "epoch": 0.14137374048150142, - "learning_rate": 0.002854471622985987, - "loss": 1.2209, - "step": 1838 - }, - { - "epoch": 0.14145065764171988, - "learning_rate": 0.0028543158399817106, - "loss": 1.263, - "step": 1839 - }, - { - "epoch": 0.1415275748019383, - "learning_rate": 0.0028541599778975205, - "loss": 1.2222, - "step": 1840 - }, - { - "epoch": 0.14160449196215677, - "learning_rate": 0.002854004036742519, - "loss": 1.1956, - "step": 1841 - }, - { - "epoch": 0.1416814091223752, - "learning_rate": 0.00285384801652581, - "loss": 1.1176, - "step": 1842 - }, - { - "epoch": 0.14175832628259363, - "learning_rate": 0.0028536919172565045, - "loss": 0.9716, - "step": 1843 - }, - { - "epoch": 0.1418352434428121, - "learning_rate": 0.0028535357389437173, - "loss": 1.0649, - "step": 1844 - }, - { - "epoch": 0.14191216060303052, - "learning_rate": 0.0028533794815965674, - "loss": 0.9432, - "step": 1845 - }, - { - "epoch": 0.14198907776324898, - "learning_rate": 0.0028532231452241797, - "loss": 0.5686, - "step": 1846 - }, - { - "epoch": 0.14206599492346741, - "learning_rate": 0.0028530667298356815, - "loss": 0.9443, - "step": 1847 - }, - { - "epoch": 0.14214291208368587, - "learning_rate": 0.002852910235440207, - "loss": 1.2505, - "step": 1848 - }, - { - "epoch": 0.1422198292439043, - "learning_rate": 0.0028527536620468943, - "loss": 1.3604, - "step": 1849 - }, - { - "epoch": 0.14229674640412276, - "learning_rate": 0.002852597009664885, - "loss": 1.1274, - "step": 1850 - }, - { - "epoch": 0.1423736635643412, - "learning_rate": 0.002852440278303327, - "loss": 1.2805, - "step": 1851 - }, - { - "epoch": 0.14245058072455966, - "learning_rate": 0.0028522834679713715, - "loss": 1.4961, - "step": 1852 - }, - { - "epoch": 0.1425274978847781, - "learning_rate": 0.0028521265786781752, - "loss": 1.5282, - "step": 1853 - }, - { - "epoch": 0.14260441504499655, - "learning_rate": 0.0028519696104328984, - "loss": 1.2071, - "step": 1854 - }, - { - "epoch": 0.14268133220521498, - "learning_rate": 0.0028518125632447074, - "loss": 0.8997, - "step": 1855 - }, - { - "epoch": 0.14275824936543344, - "learning_rate": 0.0028516554371227723, - "loss": 1.5829, - "step": 1856 - }, - { - "epoch": 0.14283516652565187, - "learning_rate": 0.0028514982320762667, - "loss": 1.0073, - "step": 1857 - }, - { - "epoch": 0.14291208368587033, - "learning_rate": 0.0028513409481143713, - "loss": 0.9761, - "step": 1858 - }, - { - "epoch": 0.14298900084608876, - "learning_rate": 0.0028511835852462696, - "loss": 1.5369, - "step": 1859 - }, - { - "epoch": 0.14306591800630722, - "learning_rate": 0.0028510261434811506, - "loss": 1.4548, - "step": 1860 - }, - { - "epoch": 0.14314283516652565, - "learning_rate": 0.0028508686228282064, - "loss": 0.9244, - "step": 1861 - }, - { - "epoch": 0.1432197523267441, - "learning_rate": 0.002850711023296635, - "loss": 1.0911, - "step": 1862 - }, - { - "epoch": 0.14329666948696254, - "learning_rate": 0.0028505533448956407, - "loss": 1.191, - "step": 1863 - }, - { - "epoch": 0.14337358664718097, - "learning_rate": 0.0028503955876344285, - "loss": 1.3685, - "step": 1864 - }, - { - "epoch": 0.14345050380739943, - "learning_rate": 0.00285023775152221, - "loss": 1.1106, - "step": 1865 - }, - { - "epoch": 0.14352742096761786, - "learning_rate": 0.002850079836568203, - "loss": 0.9043, - "step": 1866 - }, - { - "epoch": 0.14360433812783632, - "learning_rate": 0.0028499218427816267, - "loss": 1.0367, - "step": 1867 - }, - { - "epoch": 0.14368125528805475, - "learning_rate": 0.002849763770171707, - "loss": 0.9066, - "step": 1868 - }, - { - "epoch": 0.1437581724482732, - "learning_rate": 0.0028496056187476745, - "loss": 1.5269, - "step": 1869 - }, - { - "epoch": 0.14383508960849165, - "learning_rate": 0.002849447388518763, - "loss": 1.2532, - "step": 1870 - }, - { - "epoch": 0.1439120067687101, - "learning_rate": 0.0028492890794942125, - "loss": 0.57, - "step": 1871 - }, - { - "epoch": 0.14398892392892854, - "learning_rate": 0.0028491306916832663, - "loss": 1.0754, - "step": 1872 - }, - { - "epoch": 0.144065841089147, - "learning_rate": 0.002848972225095173, - "loss": 1.199, - "step": 1873 - }, - { - "epoch": 0.14414275824936543, - "learning_rate": 0.0028488136797391855, - "loss": 1.037, - "step": 1874 - }, - { - "epoch": 0.1442196754095839, - "learning_rate": 0.0028486550556245617, - "loss": 1.0015, - "step": 1875 - }, - { - "epoch": 0.14429659256980232, - "learning_rate": 0.0028484963527605637, - "loss": 1.2186, - "step": 1876 - }, - { - "epoch": 0.14437350973002078, - "learning_rate": 0.002848337571156458, - "loss": 1.2143, - "step": 1877 - }, - { - "epoch": 0.1444504268902392, - "learning_rate": 0.0028481787108215165, - "loss": 1.2161, - "step": 1878 - }, - { - "epoch": 0.14452734405045767, - "learning_rate": 0.0028480197717650154, - "loss": 0.712, - "step": 1879 - }, - { - "epoch": 0.1446042612106761, - "learning_rate": 0.002847860753996234, - "loss": 1.2457, - "step": 1880 - }, - { - "epoch": 0.14468117837089456, - "learning_rate": 0.002847701657524459, - "loss": 1.331, - "step": 1881 - }, - { - "epoch": 0.144758095531113, - "learning_rate": 0.0028475424823589802, - "loss": 1.4052, - "step": 1882 - }, - { - "epoch": 0.14483501269133145, - "learning_rate": 0.0028473832285090908, - "loss": 1.0963, - "step": 1883 - }, - { - "epoch": 0.14491192985154988, - "learning_rate": 0.002847223895984091, - "loss": 1.1275, - "step": 1884 - }, - { - "epoch": 0.1449888470117683, - "learning_rate": 0.0028470644847932835, - "loss": 1.5927, - "step": 1885 - }, - { - "epoch": 0.14506576417198677, - "learning_rate": 0.002846904994945977, - "loss": 1.0431, - "step": 1886 - }, - { - "epoch": 0.1451426813322052, - "learning_rate": 0.002846745426451484, - "loss": 1.37, - "step": 1887 - }, - { - "epoch": 0.14521959849242366, - "learning_rate": 0.0028465857793191223, - "loss": 1.3338, - "step": 1888 - }, - { - "epoch": 0.1452965156526421, - "learning_rate": 0.002846426053558213, - "loss": 0.9425, - "step": 1889 - }, - { - "epoch": 0.14537343281286055, - "learning_rate": 0.0028462662491780836, - "loss": 1.2258, - "step": 1890 - }, - { - "epoch": 0.14545034997307899, - "learning_rate": 0.0028461063661880653, - "loss": 0.8685, - "step": 1891 - }, - { - "epoch": 0.14552726713329744, - "learning_rate": 0.0028459464045974925, - "loss": 1.0388, - "step": 1892 - }, - { - "epoch": 0.14560418429351588, - "learning_rate": 0.0028457863644157067, - "loss": 1.2992, - "step": 1893 - }, - { - "epoch": 0.14568110145373434, - "learning_rate": 0.002845626245652053, - "loss": 1.0715, - "step": 1894 - }, - { - "epoch": 0.14575801861395277, - "learning_rate": 0.00284546604831588, - "loss": 1.5183, - "step": 1895 - }, - { - "epoch": 0.14583493577417123, - "learning_rate": 0.0028453057724165424, - "loss": 1.0368, - "step": 1896 - }, - { - "epoch": 0.14591185293438966, - "learning_rate": 0.0028451454179633986, - "loss": 1.0643, - "step": 1897 - }, - { - "epoch": 0.14598877009460812, - "learning_rate": 0.0028449849849658117, - "loss": 1.4871, - "step": 1898 - }, - { - "epoch": 0.14606568725482655, - "learning_rate": 0.0028448244734331505, - "loss": 0.7954, - "step": 1899 - }, - { - "epoch": 0.146142604415045, - "learning_rate": 0.0028446638833747864, - "loss": 0.9552, - "step": 1900 - }, - { - "epoch": 0.14621952157526344, - "learning_rate": 0.0028445032148000965, - "loss": 1.2515, - "step": 1901 - }, - { - "epoch": 0.1462964387354819, - "learning_rate": 0.002844342467718463, - "loss": 1.0473, - "step": 1902 - }, - { - "epoch": 0.14637335589570033, - "learning_rate": 0.0028441816421392718, - "loss": 0.8347, - "step": 1903 - }, - { - "epoch": 0.1464502730559188, - "learning_rate": 0.0028440207380719135, - "loss": 1.0339, - "step": 1904 - }, - { - "epoch": 0.14652719021613722, - "learning_rate": 0.0028438597555257837, - "loss": 1.388, - "step": 1905 - }, - { - "epoch": 0.14660410737635565, - "learning_rate": 0.002843698694510282, - "loss": 0.9474, - "step": 1906 - }, - { - "epoch": 0.1466810245365741, - "learning_rate": 0.0028435375550348134, - "loss": 1.3463, - "step": 1907 - }, - { - "epoch": 0.14675794169679254, - "learning_rate": 0.002843376337108787, - "loss": 1.4996, - "step": 1908 - }, - { - "epoch": 0.146834858857011, - "learning_rate": 0.002843215040741616, - "loss": 1.5202, - "step": 1909 - }, - { - "epoch": 0.14691177601722943, - "learning_rate": 0.002843053665942719, - "loss": 0.8599, - "step": 1910 - }, - { - "epoch": 0.1469886931774479, - "learning_rate": 0.0028428922127215184, - "loss": 0.9895, - "step": 1911 - }, - { - "epoch": 0.14706561033766632, - "learning_rate": 0.002842730681087442, - "loss": 1.2132, - "step": 1912 - }, - { - "epoch": 0.14714252749788478, - "learning_rate": 0.002842569071049922, - "loss": 1.0408, - "step": 1913 - }, - { - "epoch": 0.14721944465810322, - "learning_rate": 0.002842407382618395, - "loss": 0.9303, - "step": 1914 - }, - { - "epoch": 0.14729636181832167, - "learning_rate": 0.0028422456158023017, - "loss": 1.5299, - "step": 1915 - }, - { - "epoch": 0.1473732789785401, - "learning_rate": 0.0028420837706110876, - "loss": 1.1313, - "step": 1916 - }, - { - "epoch": 0.14745019613875857, - "learning_rate": 0.002841921847054204, - "loss": 1.3461, - "step": 1917 - }, - { - "epoch": 0.147527113298977, - "learning_rate": 0.002841759845141105, - "loss": 1.2145, - "step": 1918 - }, - { - "epoch": 0.14760403045919546, - "learning_rate": 0.0028415977648812503, - "loss": 1.4611, - "step": 1919 - }, - { - "epoch": 0.1476809476194139, - "learning_rate": 0.002841435606284104, - "loss": 0.866, - "step": 1920 - }, - { - "epoch": 0.14775786477963235, - "learning_rate": 0.0028412733693591345, - "loss": 1.0392, - "step": 1921 - }, - { - "epoch": 0.14783478193985078, - "learning_rate": 0.002841111054115815, - "loss": 1.1667, - "step": 1922 - }, - { - "epoch": 0.14791169910006924, - "learning_rate": 0.0028409486605636236, - "loss": 1.4217, - "step": 1923 - }, - { - "epoch": 0.14798861626028767, - "learning_rate": 0.002840786188712042, - "loss": 1.1517, - "step": 1924 - }, - { - "epoch": 0.1480655334205061, - "learning_rate": 0.0028406236385705584, - "loss": 0.9707, - "step": 1925 - }, - { - "epoch": 0.14814245058072456, - "learning_rate": 0.0028404610101486626, - "loss": 1.2975, - "step": 1926 - }, - { - "epoch": 0.148219367740943, - "learning_rate": 0.002840298303455852, - "loss": 1.2556, - "step": 1927 - }, - { - "epoch": 0.14829628490116145, - "learning_rate": 0.0028401355185016263, - "loss": 1.1761, - "step": 1928 - }, - { - "epoch": 0.14837320206137988, - "learning_rate": 0.002839972655295491, - "loss": 0.9831, - "step": 1929 - }, - { - "epoch": 0.14845011922159834, - "learning_rate": 0.002839809713846956, - "loss": 1.1987, - "step": 1930 - }, - { - "epoch": 0.14852703638181677, - "learning_rate": 0.0028396466941655357, - "loss": 1.0997, - "step": 1931 - }, - { - "epoch": 0.14860395354203523, - "learning_rate": 0.002839483596260748, - "loss": 1.0292, - "step": 1932 - }, - { - "epoch": 0.14868087070225366, - "learning_rate": 0.002839320420142118, - "loss": 1.0209, - "step": 1933 - }, - { - "epoch": 0.14875778786247212, - "learning_rate": 0.0028391571658191723, - "loss": 1.2296, - "step": 1934 - }, - { - "epoch": 0.14883470502269056, - "learning_rate": 0.0028389938333014445, - "loss": 1.1078, - "step": 1935 - }, - { - "epoch": 0.14891162218290901, - "learning_rate": 0.002838830422598471, - "loss": 0.9301, - "step": 1936 - }, - { - "epoch": 0.14898853934312745, - "learning_rate": 0.002838666933719794, - "loss": 1.3642, - "step": 1937 - }, - { - "epoch": 0.1490654565033459, - "learning_rate": 0.0028385033666749595, - "loss": 1.2228, - "step": 1938 - }, - { - "epoch": 0.14914237366356434, - "learning_rate": 0.002838339721473518, - "loss": 1.2052, - "step": 1939 - }, - { - "epoch": 0.1492192908237828, - "learning_rate": 0.0028381759981250263, - "loss": 1.1428, - "step": 1940 - }, - { - "epoch": 0.14929620798400123, - "learning_rate": 0.0028380121966390427, - "loss": 1.3212, - "step": 1941 - }, - { - "epoch": 0.1493731251442197, - "learning_rate": 0.0028378483170251327, - "loss": 0.8909, - "step": 1942 - }, - { - "epoch": 0.14945004230443812, - "learning_rate": 0.0028376843592928655, - "loss": 1.0285, - "step": 1943 - }, - { - "epoch": 0.14952695946465658, - "learning_rate": 0.002837520323451814, - "loss": 0.9375, - "step": 1944 - }, - { - "epoch": 0.149603876624875, - "learning_rate": 0.002837356209511557, - "loss": 1.2237, - "step": 1945 - }, - { - "epoch": 0.14968079378509344, - "learning_rate": 0.0028371920174816777, - "loss": 1.1826, - "step": 1946 - }, - { - "epoch": 0.1497577109453119, - "learning_rate": 0.0028370277473717628, - "loss": 1.1488, - "step": 1947 - }, - { - "epoch": 0.14983462810553033, - "learning_rate": 0.0028368633991914037, - "loss": 1.0076, - "step": 1948 - }, - { - "epoch": 0.1499115452657488, - "learning_rate": 0.0028366989729501977, - "loss": 0.9484, - "step": 1949 - }, - { - "epoch": 0.14998846242596722, - "learning_rate": 0.0028365344686577462, - "loss": 1.055, - "step": 1950 - }, - { - "epoch": 0.15006537958618568, - "learning_rate": 0.0028363698863236534, - "loss": 1.0813, - "step": 1951 - }, - { - "epoch": 0.1501422967464041, - "learning_rate": 0.0028362052259575305, - "loss": 1.2597, - "step": 1952 - }, - { - "epoch": 0.15021921390662257, - "learning_rate": 0.002836040487568992, - "loss": 1.1157, - "step": 1953 - }, - { - "epoch": 0.150296131066841, - "learning_rate": 0.002835875671167657, - "loss": 1.2661, - "step": 1954 - }, - { - "epoch": 0.15037304822705946, - "learning_rate": 0.002835710776763149, - "loss": 1.3811, - "step": 1955 - }, - { - "epoch": 0.1504499653872779, - "learning_rate": 0.002835545804365097, - "loss": 1.2374, - "step": 1956 - }, - { - "epoch": 0.15052688254749635, - "learning_rate": 0.0028353807539831335, - "loss": 1.2996, - "step": 1957 - }, - { - "epoch": 0.15060379970771479, - "learning_rate": 0.002835215625626896, - "loss": 1.39, - "step": 1958 - }, - { - "epoch": 0.15068071686793325, - "learning_rate": 0.002835050419306027, - "loss": 0.9618, - "step": 1959 - }, - { - "epoch": 0.15075763402815168, - "learning_rate": 0.0028348851350301725, - "loss": 0.8978, - "step": 1960 - }, - { - "epoch": 0.15083455118837014, - "learning_rate": 0.0028347197728089838, - "loss": 1.002, - "step": 1961 - }, - { - "epoch": 0.15091146834858857, - "learning_rate": 0.0028345543326521163, - "loss": 1.0203, - "step": 1962 - }, - { - "epoch": 0.15098838550880703, - "learning_rate": 0.002834388814569231, - "loss": 1.1797, - "step": 1963 - }, - { - "epoch": 0.15106530266902546, - "learning_rate": 0.0028342232185699914, - "loss": 1.3316, - "step": 1964 - }, - { - "epoch": 0.15114221982924392, - "learning_rate": 0.002834057544664068, - "loss": 0.8223, - "step": 1965 - }, - { - "epoch": 0.15121913698946235, - "learning_rate": 0.002833891792861134, - "loss": 1.2784, - "step": 1966 - }, - { - "epoch": 0.15129605414968078, - "learning_rate": 0.002833725963170868, - "loss": 0.9315, - "step": 1967 - }, - { - "epoch": 0.15137297130989924, - "learning_rate": 0.002833560055602954, - "loss": 0.9381, - "step": 1968 - }, - { - "epoch": 0.15144988847011767, - "learning_rate": 0.0028333940701670774, - "loss": 0.8995, - "step": 1969 - }, - { - "epoch": 0.15152680563033613, - "learning_rate": 0.002833228006872932, - "loss": 1.3886, - "step": 1970 - }, - { - "epoch": 0.15160372279055456, - "learning_rate": 0.0028330618657302136, - "loss": 1.0036, - "step": 1971 - }, - { - "epoch": 0.15168063995077302, - "learning_rate": 0.0028328956467486236, - "loss": 1.2779, - "step": 1972 - }, - { - "epoch": 0.15175755711099145, - "learning_rate": 0.0028327293499378677, - "loss": 1.2057, - "step": 1973 - }, - { - "epoch": 0.1518344742712099, - "learning_rate": 0.002832562975307656, - "loss": 1.0431, - "step": 1974 - }, - { - "epoch": 0.15191139143142834, - "learning_rate": 0.0028323965228677036, - "loss": 1.0746, - "step": 1975 - }, - { - "epoch": 0.1519883085916468, - "learning_rate": 0.00283222999262773, - "loss": 1.0914, - "step": 1976 - }, - { - "epoch": 0.15206522575186524, - "learning_rate": 0.0028320633845974577, - "loss": 1.291, - "step": 1977 - }, - { - "epoch": 0.1521421429120837, - "learning_rate": 0.002831896698786617, - "loss": 1.1502, - "step": 1978 - }, - { - "epoch": 0.15221906007230213, - "learning_rate": 0.00283172993520494, - "loss": 1.2315, - "step": 1979 - }, - { - "epoch": 0.15229597723252059, - "learning_rate": 0.0028315630938621633, - "loss": 1.1341, - "step": 1980 - }, - { - "epoch": 0.15237289439273902, - "learning_rate": 0.0028313961747680307, - "loss": 1.3974, - "step": 1981 - }, - { - "epoch": 0.15244981155295748, - "learning_rate": 0.0028312291779322876, - "loss": 1.0439, - "step": 1982 - }, - { - "epoch": 0.1525267287131759, - "learning_rate": 0.0028310621033646854, - "loss": 1.0077, - "step": 1983 - }, - { - "epoch": 0.15260364587339437, - "learning_rate": 0.00283089495107498, - "loss": 1.6118, - "step": 1984 - }, - { - "epoch": 0.1526805630336128, - "learning_rate": 0.0028307277210729313, - "loss": 0.9113, - "step": 1985 - }, - { - "epoch": 0.15275748019383123, - "learning_rate": 0.002830560413368304, - "loss": 1.3026, - "step": 1986 - }, - { - "epoch": 0.1528343973540497, - "learning_rate": 0.002830393027970868, - "loss": 1.131, - "step": 1987 - }, - { - "epoch": 0.15291131451426812, - "learning_rate": 0.002830225564890396, - "loss": 1.2247, - "step": 1988 - }, - { - "epoch": 0.15298823167448658, - "learning_rate": 0.002830058024136667, - "loss": 1.2465, - "step": 1989 - }, - { - "epoch": 0.153065148834705, - "learning_rate": 0.002829890405719464, - "loss": 1.0329, - "step": 1990 - }, - { - "epoch": 0.15314206599492347, - "learning_rate": 0.0028297227096485736, - "loss": 1.2137, - "step": 1991 - }, - { - "epoch": 0.1532189831551419, - "learning_rate": 0.0028295549359337894, - "loss": 1.1542, - "step": 1992 - }, - { - "epoch": 0.15329590031536036, - "learning_rate": 0.002829387084584906, - "loss": 1.127, - "step": 1993 - }, - { - "epoch": 0.1533728174755788, - "learning_rate": 0.0028292191556117258, - "loss": 0.8747, - "step": 1994 - }, - { - "epoch": 0.15344973463579725, - "learning_rate": 0.002829051149024054, - "loss": 1.1015, - "step": 1995 - }, - { - "epoch": 0.15352665179601568, - "learning_rate": 0.0028288830648317, - "loss": 0.9848, - "step": 1996 - }, - { - "epoch": 0.15360356895623414, - "learning_rate": 0.0028287149030444786, - "loss": 1.3574, - "step": 1997 - }, - { - "epoch": 0.15368048611645257, - "learning_rate": 0.00282854666367221, - "loss": 1.0259, - "step": 1998 - }, - { - "epoch": 0.15375740327667103, - "learning_rate": 0.002828378346724717, - "loss": 1.081, - "step": 1999 - }, - { - "epoch": 0.15383432043688947, - "learning_rate": 0.0028282099522118275, - "loss": 0.9127, - "step": 2000 - }, - { - "epoch": 0.15391123759710792, - "learning_rate": 0.002828041480143375, - "loss": 1.1192, - "step": 2001 - }, - { - "epoch": 0.15398815475732636, - "learning_rate": 0.0028278729305291966, - "loss": 1.2785, - "step": 2002 - }, - { - "epoch": 0.15406507191754482, - "learning_rate": 0.0028277043033791338, - "loss": 1.0425, - "step": 2003 - }, - { - "epoch": 0.15414198907776325, - "learning_rate": 0.0028275355987030333, - "loss": 1.3098, - "step": 2004 - }, - { - "epoch": 0.1542189062379817, - "learning_rate": 0.002827366816510745, - "loss": 1.0905, - "step": 2005 - }, - { - "epoch": 0.15429582339820014, - "learning_rate": 0.0028271979568121252, - "loss": 1.1252, - "step": 2006 - }, - { - "epoch": 0.15437274055841857, - "learning_rate": 0.0028270290196170337, - "loss": 1.1481, - "step": 2007 - }, - { - "epoch": 0.15444965771863703, - "learning_rate": 0.0028268600049353343, - "loss": 1.1522, - "step": 2008 - }, - { - "epoch": 0.15452657487885546, - "learning_rate": 0.002826690912776897, - "loss": 1.1175, - "step": 2009 - }, - { - "epoch": 0.15460349203907392, - "learning_rate": 0.0028265217431515943, - "loss": 0.8834, - "step": 2010 - }, - { - "epoch": 0.15468040919929235, - "learning_rate": 0.0028263524960693044, - "loss": 1.1786, - "step": 2011 - }, - { - "epoch": 0.1547573263595108, - "learning_rate": 0.0028261831715399107, - "loss": 1.085, - "step": 2012 - }, - { - "epoch": 0.15483424351972924, - "learning_rate": 0.0028260137695732984, - "loss": 1.3008, - "step": 2013 - }, - { - "epoch": 0.1549111606799477, - "learning_rate": 0.0028258442901793607, - "loss": 1.0226, - "step": 2014 - }, - { - "epoch": 0.15498807784016613, - "learning_rate": 0.0028256747333679933, - "loss": 1.5447, - "step": 2015 - }, - { - "epoch": 0.1550649950003846, - "learning_rate": 0.0028255050991490963, - "loss": 1.1576, - "step": 2016 - }, - { - "epoch": 0.15514191216060302, - "learning_rate": 0.0028253353875325755, - "loss": 0.6988, - "step": 2017 - }, - { - "epoch": 0.15521882932082148, - "learning_rate": 0.00282516559852834, - "loss": 1.0037, - "step": 2018 - }, - { - "epoch": 0.15529574648103991, - "learning_rate": 0.0028249957321463044, - "loss": 1.1871, - "step": 2019 - }, - { - "epoch": 0.15537266364125837, - "learning_rate": 0.002824825788396387, - "loss": 0.9937, - "step": 2020 - }, - { - "epoch": 0.1554495808014768, - "learning_rate": 0.0028246557672885106, - "loss": 1.0193, - "step": 2021 - }, - { - "epoch": 0.15552649796169526, - "learning_rate": 0.0028244856688326037, - "loss": 1.2292, - "step": 2022 - }, - { - "epoch": 0.1556034151219137, - "learning_rate": 0.0028243154930385986, - "loss": 1.3331, - "step": 2023 - }, - { - "epoch": 0.15568033228213216, - "learning_rate": 0.002824145239916432, - "loss": 1.2292, - "step": 2024 - }, - { - "epoch": 0.1557572494423506, - "learning_rate": 0.002823974909476044, - "loss": 1.4199, - "step": 2025 - }, - { - "epoch": 0.15583416660256905, - "learning_rate": 0.0028238045017273815, - "loss": 1.2479, - "step": 2026 - }, - { - "epoch": 0.15591108376278748, - "learning_rate": 0.0028236340166803947, - "loss": 1.1911, - "step": 2027 - }, - { - "epoch": 0.1559880009230059, - "learning_rate": 0.0028234634543450377, - "loss": 0.9725, - "step": 2028 - }, - { - "epoch": 0.15606491808322437, - "learning_rate": 0.002823292814731271, - "loss": 1.4719, - "step": 2029 - }, - { - "epoch": 0.1561418352434428, - "learning_rate": 0.0028231220978490574, - "loss": 0.932, - "step": 2030 - }, - { - "epoch": 0.15621875240366126, - "learning_rate": 0.002822951303708366, - "loss": 1.1865, - "step": 2031 - }, - { - "epoch": 0.1562956695638797, - "learning_rate": 0.0028227804323191687, - "loss": 1.0766, - "step": 2032 - }, - { - "epoch": 0.15637258672409815, - "learning_rate": 0.0028226094836914435, - "loss": 1.0572, - "step": 2033 - }, - { - "epoch": 0.15644950388431658, - "learning_rate": 0.002822438457835172, - "loss": 1.0668, - "step": 2034 - }, - { - "epoch": 0.15652642104453504, - "learning_rate": 0.002822267354760341, - "loss": 1.2249, - "step": 2035 - }, - { - "epoch": 0.15660333820475347, - "learning_rate": 0.002822096174476941, - "loss": 1.1557, - "step": 2036 - }, - { - "epoch": 0.15668025536497193, - "learning_rate": 0.0028219249169949678, - "loss": 1.1815, - "step": 2037 - }, - { - "epoch": 0.15675717252519036, - "learning_rate": 0.0028217535823244206, - "loss": 0.876, - "step": 2038 - }, - { - "epoch": 0.15683408968540882, - "learning_rate": 0.0028215821704753044, - "loss": 1.365, - "step": 2039 - }, - { - "epoch": 0.15691100684562725, - "learning_rate": 0.002821410681457628, - "loss": 1.1182, - "step": 2040 - }, - { - "epoch": 0.1569879240058457, - "learning_rate": 0.0028212391152814048, - "loss": 0.9862, - "step": 2041 - }, - { - "epoch": 0.15706484116606415, - "learning_rate": 0.0028210674719566526, - "loss": 1.2278, - "step": 2042 - }, - { - "epoch": 0.1571417583262826, - "learning_rate": 0.002820895751493394, - "loss": 1.8615, - "step": 2043 - }, - { - "epoch": 0.15721867548650104, - "learning_rate": 0.002820723953901656, - "loss": 1.2583, - "step": 2044 - }, - { - "epoch": 0.1572955926467195, - "learning_rate": 0.0028205520791914694, - "loss": 0.9442, - "step": 2045 - }, - { - "epoch": 0.15737250980693793, - "learning_rate": 0.0028203801273728713, - "loss": 1.3713, - "step": 2046 - }, - { - "epoch": 0.1574494269671564, - "learning_rate": 0.0028202080984559012, - "loss": 1.2726, - "step": 2047 - }, - { - "epoch": 0.15752634412737482, - "learning_rate": 0.002820035992450604, - "loss": 1.0752, - "step": 2048 - }, - { - "epoch": 0.15760326128759325, - "learning_rate": 0.00281986380936703, - "loss": 1.2511, - "step": 2049 - }, - { - "epoch": 0.1576801784478117, - "learning_rate": 0.002819691549215233, - "loss": 1.3435, - "step": 2050 - }, - { - "epoch": 0.15775709560803014, - "learning_rate": 0.00281951921200527, - "loss": 1.2046, - "step": 2051 - }, - { - "epoch": 0.1578340127682486, - "learning_rate": 0.0028193467977472055, - "loss": 1.1837, - "step": 2052 - }, - { - "epoch": 0.15791092992846703, - "learning_rate": 0.002819174306451107, - "loss": 1.4107, - "step": 2053 - }, - { - "epoch": 0.1579878470886855, - "learning_rate": 0.002819001738127045, - "loss": 1.3604, - "step": 2054 - }, - { - "epoch": 0.15806476424890392, - "learning_rate": 0.002818829092785098, - "loss": 1.2017, - "step": 2055 - }, - { - "epoch": 0.15814168140912238, - "learning_rate": 0.002818656370435345, - "loss": 1.2019, - "step": 2056 - }, - { - "epoch": 0.1582185985693408, - "learning_rate": 0.0028184835710878724, - "loss": 0.9851, - "step": 2057 - }, - { - "epoch": 0.15829551572955927, - "learning_rate": 0.00281831069475277, - "loss": 0.898, - "step": 2058 - }, - { - "epoch": 0.1583724328897777, - "learning_rate": 0.0028181377414401325, - "loss": 0.919, - "step": 2059 - }, - { - "epoch": 0.15844935004999616, - "learning_rate": 0.002817964711160058, - "loss": 1.5117, - "step": 2060 - }, - { - "epoch": 0.1585262672102146, - "learning_rate": 0.0028177916039226506, - "loss": 1.1425, - "step": 2061 - }, - { - "epoch": 0.15860318437043305, - "learning_rate": 0.0028176184197380182, - "loss": 1.2193, - "step": 2062 - }, - { - "epoch": 0.15868010153065149, - "learning_rate": 0.0028174451586162738, - "loss": 0.95, - "step": 2063 - }, - { - "epoch": 0.15875701869086994, - "learning_rate": 0.002817271820567533, - "loss": 1.0872, - "step": 2064 - }, - { - "epoch": 0.15883393585108838, - "learning_rate": 0.002817098405601917, - "loss": 1.1749, - "step": 2065 - }, - { - "epoch": 0.15891085301130684, - "learning_rate": 0.0028169249137295536, - "loss": 1.0426, - "step": 2066 - }, - { - "epoch": 0.15898777017152527, - "learning_rate": 0.0028167513449605715, - "loss": 1.4273, - "step": 2067 - }, - { - "epoch": 0.1590646873317437, - "learning_rate": 0.0028165776993051062, - "loss": 0.7961, - "step": 2068 - }, - { - "epoch": 0.15914160449196216, - "learning_rate": 0.002816403976773297, - "loss": 1.0494, - "step": 2069 - }, - { - "epoch": 0.1592185216521806, - "learning_rate": 0.0028162301773752875, - "loss": 1.3031, - "step": 2070 - }, - { - "epoch": 0.15929543881239905, - "learning_rate": 0.002816056301121226, - "loss": 0.9875, - "step": 2071 - }, - { - "epoch": 0.15937235597261748, - "learning_rate": 0.0028158823480212663, - "loss": 1.3561, - "step": 2072 - }, - { - "epoch": 0.15944927313283594, - "learning_rate": 0.002815708318085564, - "loss": 1.1828, - "step": 2073 - }, - { - "epoch": 0.15952619029305437, - "learning_rate": 0.0028155342113242825, - "loss": 1.1529, - "step": 2074 - }, - { - "epoch": 0.15960310745327283, - "learning_rate": 0.002815360027747587, - "loss": 1.2199, - "step": 2075 - }, - { - "epoch": 0.15968002461349126, - "learning_rate": 0.002815185767365649, - "loss": 1.6978, - "step": 2076 - }, - { - "epoch": 0.15975694177370972, - "learning_rate": 0.0028150114301886426, - "loss": 1.0705, - "step": 2077 - }, - { - "epoch": 0.15983385893392815, - "learning_rate": 0.0028148370162267493, - "loss": 1.4204, - "step": 2078 - }, - { - "epoch": 0.1599107760941466, - "learning_rate": 0.0028146625254901517, - "loss": 1.2823, - "step": 2079 - }, - { - "epoch": 0.15998769325436504, - "learning_rate": 0.0028144879579890393, - "loss": 1.0765, - "step": 2080 - }, - { - "epoch": 0.1600646104145835, - "learning_rate": 0.0028143133137336056, - "loss": 1.034, - "step": 2081 - }, - { - "epoch": 0.16014152757480193, - "learning_rate": 0.002814138592734047, - "loss": 1.1956, - "step": 2082 - }, - { - "epoch": 0.1602184447350204, - "learning_rate": 0.0028139637950005667, - "loss": 1.0665, - "step": 2083 - }, - { - "epoch": 0.16029536189523882, - "learning_rate": 0.002813788920543371, - "loss": 1.1069, - "step": 2084 - }, - { - "epoch": 0.16037227905545728, - "learning_rate": 0.002813613969372671, - "loss": 1.3421, - "step": 2085 - }, - { - "epoch": 0.16044919621567572, - "learning_rate": 0.002813438941498682, - "loss": 1.2892, - "step": 2086 - }, - { - "epoch": 0.16052611337589417, - "learning_rate": 0.002813263836931625, - "loss": 1.2271, - "step": 2087 - }, - { - "epoch": 0.1606030305361126, - "learning_rate": 0.002813088655681724, - "loss": 0.9485, - "step": 2088 - }, - { - "epoch": 0.16067994769633104, - "learning_rate": 0.0028129133977592073, - "loss": 1.8525, - "step": 2089 - }, - { - "epoch": 0.1607568648565495, - "learning_rate": 0.002812738063174309, - "loss": 1.0006, - "step": 2090 - }, - { - "epoch": 0.16083378201676793, - "learning_rate": 0.0028125626519372674, - "loss": 1.1479, - "step": 2091 - }, - { - "epoch": 0.1609106991769864, - "learning_rate": 0.002812387164058325, - "loss": 0.7483, - "step": 2092 - }, - { - "epoch": 0.16098761633720482, - "learning_rate": 0.002812211599547728, - "loss": 1.3081, - "step": 2093 - }, - { - "epoch": 0.16106453349742328, - "learning_rate": 0.002812035958415728, - "loss": 1.1483, - "step": 2094 - }, - { - "epoch": 0.1611414506576417, - "learning_rate": 0.0028118602406725815, - "loss": 1.334, - "step": 2095 - }, - { - "epoch": 0.16121836781786017, - "learning_rate": 0.002811684446328548, - "loss": 1.0362, - "step": 2096 - }, - { - "epoch": 0.1612952849780786, - "learning_rate": 0.0028115085753938926, - "loss": 0.8121, - "step": 2097 - }, - { - "epoch": 0.16137220213829706, - "learning_rate": 0.0028113326278788845, - "loss": 1.4638, - "step": 2098 - }, - { - "epoch": 0.1614491192985155, - "learning_rate": 0.002811156603793798, - "loss": 1.3577, - "step": 2099 - }, - { - "epoch": 0.16152603645873395, - "learning_rate": 0.002810980503148911, - "loss": 1.1596, - "step": 2100 - }, - { - "epoch": 0.16160295361895238, - "learning_rate": 0.0028108043259545057, - "loss": 1.3218, - "step": 2101 - }, - { - "epoch": 0.16167987077917084, - "learning_rate": 0.0028106280722208703, - "loss": 1.068, - "step": 2102 - }, - { - "epoch": 0.16175678793938927, - "learning_rate": 0.0028104517419582954, - "loss": 1.2523, - "step": 2103 - }, - { - "epoch": 0.16183370509960773, - "learning_rate": 0.0028102753351770776, - "loss": 1.2487, - "step": 2104 - }, - { - "epoch": 0.16191062225982616, - "learning_rate": 0.0028100988518875176, - "loss": 1.0539, - "step": 2105 - }, - { - "epoch": 0.16198753942004462, - "learning_rate": 0.0028099222920999205, - "loss": 0.9122, - "step": 2106 - }, - { - "epoch": 0.16206445658026306, - "learning_rate": 0.002809745655824595, - "loss": 1.2692, - "step": 2107 - }, - { - "epoch": 0.16214137374048151, - "learning_rate": 0.002809568943071856, - "loss": 1.0734, - "step": 2108 - }, - { - "epoch": 0.16221829090069995, - "learning_rate": 0.002809392153852022, - "loss": 1.3023, - "step": 2109 - }, - { - "epoch": 0.16229520806091838, - "learning_rate": 0.002809215288175415, - "loss": 1.0414, - "step": 2110 - }, - { - "epoch": 0.16237212522113684, - "learning_rate": 0.0028090383460523624, - "loss": 1.6666, - "step": 2111 - }, - { - "epoch": 0.16244904238135527, - "learning_rate": 0.0028088613274931977, - "loss": 1.0372, - "step": 2112 - }, - { - "epoch": 0.16252595954157373, - "learning_rate": 0.0028086842325082554, - "loss": 1.187, - "step": 2113 - }, - { - "epoch": 0.16260287670179216, - "learning_rate": 0.002808507061107877, - "loss": 1.1738, - "step": 2114 - }, - { - "epoch": 0.16267979386201062, - "learning_rate": 0.0028083298133024072, - "loss": 0.9106, - "step": 2115 - }, - { - "epoch": 0.16275671102222905, - "learning_rate": 0.0028081524891021966, - "loss": 1.1799, - "step": 2116 - }, - { - "epoch": 0.1628336281824475, - "learning_rate": 0.002807975088517599, - "loss": 1.3328, - "step": 2117 - }, - { - "epoch": 0.16291054534266594, - "learning_rate": 0.002807797611558972, - "loss": 1.0491, - "step": 2118 - }, - { - "epoch": 0.1629874625028844, - "learning_rate": 0.00280762005823668, - "loss": 1.1215, - "step": 2119 - }, - { - "epoch": 0.16306437966310283, - "learning_rate": 0.0028074424285610905, - "loss": 0.8615, - "step": 2120 - }, - { - "epoch": 0.1631412968233213, - "learning_rate": 0.0028072647225425747, - "loss": 1.1726, - "step": 2121 - }, - { - "epoch": 0.16321821398353972, - "learning_rate": 0.002807086940191509, - "loss": 1.2703, - "step": 2122 - }, - { - "epoch": 0.16329513114375818, - "learning_rate": 0.0028069090815182746, - "loss": 1.3984, - "step": 2123 - }, - { - "epoch": 0.1633720483039766, - "learning_rate": 0.0028067311465332576, - "loss": 0.8942, - "step": 2124 - }, - { - "epoch": 0.16344896546419507, - "learning_rate": 0.0028065531352468465, - "loss": 1.2872, - "step": 2125 - }, - { - "epoch": 0.1635258826244135, - "learning_rate": 0.002806375047669437, - "loss": 1.2614, - "step": 2126 - }, - { - "epoch": 0.16360279978463196, - "learning_rate": 0.002806196883811426, - "loss": 1.0975, - "step": 2127 - }, - { - "epoch": 0.1636797169448504, - "learning_rate": 0.002806018643683218, - "loss": 1.0155, - "step": 2128 - }, - { - "epoch": 0.16375663410506885, - "learning_rate": 0.0028058403272952204, - "loss": 1.3019, - "step": 2129 - }, - { - "epoch": 0.16383355126528729, - "learning_rate": 0.002805661934657845, - "loss": 1.2465, - "step": 2130 - }, - { - "epoch": 0.16391046842550572, - "learning_rate": 0.002805483465781509, - "loss": 1.0104, - "step": 2131 - }, - { - "epoch": 0.16398738558572418, - "learning_rate": 0.0028053049206766324, - "loss": 0.8387, - "step": 2132 - }, - { - "epoch": 0.1640643027459426, - "learning_rate": 0.002805126299353641, - "loss": 1.1378, - "step": 2133 - }, - { - "epoch": 0.16414121990616107, - "learning_rate": 0.002804947601822965, - "loss": 1.3031, - "step": 2134 - }, - { - "epoch": 0.1642181370663795, - "learning_rate": 0.002804768828095039, - "loss": 1.0428, - "step": 2135 - }, - { - "epoch": 0.16429505422659796, - "learning_rate": 0.0028045899781803008, - "loss": 1.4127, - "step": 2136 - }, - { - "epoch": 0.1643719713868164, - "learning_rate": 0.002804411052089194, - "loss": 1.2058, - "step": 2137 - }, - { - "epoch": 0.16444888854703485, - "learning_rate": 0.0028042320498321673, - "loss": 1.3751, - "step": 2138 - }, - { - "epoch": 0.16452580570725328, - "learning_rate": 0.002804052971419671, - "loss": 0.6894, - "step": 2139 - }, - { - "epoch": 0.16460272286747174, - "learning_rate": 0.002803873816862163, - "loss": 1.0824, - "step": 2140 - }, - { - "epoch": 0.16467964002769017, - "learning_rate": 0.002803694586170104, - "loss": 0.9984, - "step": 2141 - }, - { - "epoch": 0.16475655718790863, - "learning_rate": 0.0028035152793539596, - "loss": 1.0274, - "step": 2142 - }, - { - "epoch": 0.16483347434812706, - "learning_rate": 0.0028033358964241993, - "loss": 1.2265, - "step": 2143 - }, - { - "epoch": 0.16491039150834552, - "learning_rate": 0.0028031564373912983, - "loss": 1.3405, - "step": 2144 - }, - { - "epoch": 0.16498730866856395, - "learning_rate": 0.002802976902265734, - "loss": 1.0912, - "step": 2145 - }, - { - "epoch": 0.1650642258287824, - "learning_rate": 0.0028027972910579907, - "loss": 1.3862, - "step": 2146 - }, - { - "epoch": 0.16514114298900084, - "learning_rate": 0.002802617603778556, - "loss": 1.3538, - "step": 2147 - }, - { - "epoch": 0.1652180601492193, - "learning_rate": 0.0028024378404379216, - "loss": 1.4892, - "step": 2148 - }, - { - "epoch": 0.16529497730943774, - "learning_rate": 0.002802258001046585, - "loss": 1.4131, - "step": 2149 - }, - { - "epoch": 0.16537189446965617, - "learning_rate": 0.002802078085615046, - "loss": 1.2851, - "step": 2150 - }, - { - "epoch": 0.16544881162987463, - "learning_rate": 0.0028018980941538103, - "loss": 1.3294, - "step": 2151 - }, - { - "epoch": 0.16552572879009306, - "learning_rate": 0.0028017180266733882, - "loss": 1.2027, - "step": 2152 - }, - { - "epoch": 0.16560264595031152, - "learning_rate": 0.002801537883184294, - "loss": 1.0872, - "step": 2153 - }, - { - "epoch": 0.16567956311052995, - "learning_rate": 0.0028013576636970465, - "loss": 1.2291, - "step": 2154 - }, - { - "epoch": 0.1657564802707484, - "learning_rate": 0.0028011773682221686, - "loss": 1.332, - "step": 2155 - }, - { - "epoch": 0.16583339743096684, - "learning_rate": 0.0028009969967701886, - "loss": 0.7623, - "step": 2156 - }, - { - "epoch": 0.1659103145911853, - "learning_rate": 0.0028008165493516378, - "loss": 1.2742, - "step": 2157 - }, - { - "epoch": 0.16598723175140373, - "learning_rate": 0.002800636025977053, - "loss": 0.6873, - "step": 2158 - }, - { - "epoch": 0.1660641489116222, - "learning_rate": 0.002800455426656975, - "loss": 1.072, - "step": 2159 - }, - { - "epoch": 0.16614106607184062, - "learning_rate": 0.0028002747514019495, - "loss": 1.1598, - "step": 2160 - }, - { - "epoch": 0.16621798323205908, - "learning_rate": 0.002800094000222526, - "loss": 1.0351, - "step": 2161 - }, - { - "epoch": 0.1662949003922775, - "learning_rate": 0.002799913173129259, - "loss": 1.3774, - "step": 2162 - }, - { - "epoch": 0.16637181755249597, - "learning_rate": 0.0027997322701327067, - "loss": 1.1639, - "step": 2163 - }, - { - "epoch": 0.1664487347127144, - "learning_rate": 0.0027995512912434334, - "loss": 1.3926, - "step": 2164 - }, - { - "epoch": 0.16652565187293286, - "learning_rate": 0.0027993702364720054, - "loss": 1.5027, - "step": 2165 - }, - { - "epoch": 0.1666025690331513, - "learning_rate": 0.002799189105828995, - "loss": 1.4819, - "step": 2166 - }, - { - "epoch": 0.16667948619336975, - "learning_rate": 0.002799007899324979, - "loss": 1.1601, - "step": 2167 - }, - { - "epoch": 0.16675640335358818, - "learning_rate": 0.002798826616970538, - "loss": 1.0782, - "step": 2168 - }, - { - "epoch": 0.16683332051380664, - "learning_rate": 0.002798645258776257, - "loss": 1.1136, - "step": 2169 - }, - { - "epoch": 0.16691023767402507, - "learning_rate": 0.002798463824752726, - "loss": 1.3599, - "step": 2170 - }, - { - "epoch": 0.1669871548342435, - "learning_rate": 0.0027982823149105386, - "loss": 1.3108, - "step": 2171 - }, - { - "epoch": 0.16706407199446197, - "learning_rate": 0.0027981007292602945, - "loss": 0.9383, - "step": 2172 - }, - { - "epoch": 0.1671409891546804, - "learning_rate": 0.0027979190678125957, - "loss": 1.1164, - "step": 2173 - }, - { - "epoch": 0.16721790631489886, - "learning_rate": 0.00279773733057805, - "loss": 1.2451, - "step": 2174 - }, - { - "epoch": 0.1672948234751173, - "learning_rate": 0.0027975555175672687, - "loss": 0.9756, - "step": 2175 - }, - { - "epoch": 0.16737174063533575, - "learning_rate": 0.0027973736287908685, - "loss": 1.1213, - "step": 2176 - }, - { - "epoch": 0.16744865779555418, - "learning_rate": 0.0027971916642594704, - "loss": 1.0953, - "step": 2177 - }, - { - "epoch": 0.16752557495577264, - "learning_rate": 0.002797009623983699, - "loss": 1.499, - "step": 2178 - }, - { - "epoch": 0.16760249211599107, - "learning_rate": 0.0027968275079741844, - "loss": 1.0063, - "step": 2179 - }, - { - "epoch": 0.16767940927620953, - "learning_rate": 0.002796645316241559, - "loss": 1.2216, - "step": 2180 - }, - { - "epoch": 0.16775632643642796, - "learning_rate": 0.0027964630487964633, - "loss": 1.3844, - "step": 2181 - }, - { - "epoch": 0.16783324359664642, - "learning_rate": 0.0027962807056495383, - "loss": 1.2136, - "step": 2182 - }, - { - "epoch": 0.16791016075686485, - "learning_rate": 0.002796098286811432, - "loss": 1.3936, - "step": 2183 - }, - { - "epoch": 0.1679870779170833, - "learning_rate": 0.002795915792292797, - "loss": 1.2363, - "step": 2184 - }, - { - "epoch": 0.16806399507730174, - "learning_rate": 0.0027957332221042867, - "loss": 1.2134, - "step": 2185 - }, - { - "epoch": 0.1681409122375202, - "learning_rate": 0.002795550576256564, - "loss": 1.7939, - "step": 2186 - }, - { - "epoch": 0.16821782939773863, - "learning_rate": 0.0027953678547602934, - "loss": 1.2313, - "step": 2187 - }, - { - "epoch": 0.1682947465579571, - "learning_rate": 0.002795185057626143, - "loss": 1.0828, - "step": 2188 - }, - { - "epoch": 0.16837166371817552, - "learning_rate": 0.0027950021848647875, - "loss": 1.3092, - "step": 2189 - }, - { - "epoch": 0.16844858087839398, - "learning_rate": 0.002794819236486905, - "loss": 1.3984, - "step": 2190 - }, - { - "epoch": 0.16852549803861241, - "learning_rate": 0.0027946362125031783, - "loss": 1.1199, - "step": 2191 - }, - { - "epoch": 0.16860241519883085, - "learning_rate": 0.0027944531129242933, - "loss": 0.9542, - "step": 2192 - }, - { - "epoch": 0.1686793323590493, - "learning_rate": 0.002794269937760942, - "loss": 1.0747, - "step": 2193 - }, - { - "epoch": 0.16875624951926774, - "learning_rate": 0.00279408668702382, - "loss": 1.178, - "step": 2194 - }, - { - "epoch": 0.1688331666794862, - "learning_rate": 0.002793903360723628, - "loss": 1.1669, - "step": 2195 - }, - { - "epoch": 0.16891008383970463, - "learning_rate": 0.0027937199588710703, - "loss": 0.9588, - "step": 2196 - }, - { - "epoch": 0.1689870009999231, - "learning_rate": 0.0027935364814768555, - "loss": 1.1285, - "step": 2197 - }, - { - "epoch": 0.16906391816014152, - "learning_rate": 0.0027933529285516985, - "loss": 0.8729, - "step": 2198 - }, - { - "epoch": 0.16914083532035998, - "learning_rate": 0.002793169300106315, - "loss": 1.1388, - "step": 2199 - }, - { - "epoch": 0.1692177524805784, - "learning_rate": 0.002792985596151429, - "loss": 1.0768, - "step": 2200 - }, - { - "epoch": 0.16929466964079687, - "learning_rate": 0.002792801816697766, - "loss": 0.9755, - "step": 2201 - }, - { - "epoch": 0.1693715868010153, - "learning_rate": 0.002792617961756058, - "loss": 0.8531, - "step": 2202 - }, - { - "epoch": 0.16944850396123376, - "learning_rate": 0.00279243403133704, - "loss": 1.5626, - "step": 2203 - }, - { - "epoch": 0.1695254211214522, - "learning_rate": 0.0027922500254514517, - "loss": 1.0973, - "step": 2204 - }, - { - "epoch": 0.16960233828167065, - "learning_rate": 0.002792065944110038, - "loss": 0.9663, - "step": 2205 - }, - { - "epoch": 0.16967925544188908, - "learning_rate": 0.0027918817873235475, - "loss": 1.0623, - "step": 2206 - }, - { - "epoch": 0.16975617260210754, - "learning_rate": 0.0027916975551027325, - "loss": 1.3159, - "step": 2207 - }, - { - "epoch": 0.16983308976232597, - "learning_rate": 0.0027915132474583514, - "loss": 1.1395, - "step": 2208 - }, - { - "epoch": 0.16991000692254443, - "learning_rate": 0.0027913288644011657, - "loss": 1.2279, - "step": 2209 - }, - { - "epoch": 0.16998692408276286, - "learning_rate": 0.0027911444059419413, - "loss": 1.2001, - "step": 2210 - }, - { - "epoch": 0.17006384124298132, - "learning_rate": 0.0027909598720914502, - "loss": 0.9281, - "step": 2211 - }, - { - "epoch": 0.17014075840319975, - "learning_rate": 0.002790775262860466, - "loss": 0.7931, - "step": 2212 - }, - { - "epoch": 0.17021767556341819, - "learning_rate": 0.0027905905782597703, - "loss": 0.9781, - "step": 2213 - }, - { - "epoch": 0.17029459272363665, - "learning_rate": 0.002790405818300145, - "loss": 0.8806, - "step": 2214 - }, - { - "epoch": 0.17037150988385508, - "learning_rate": 0.002790220982992379, - "loss": 1.2641, - "step": 2215 - }, - { - "epoch": 0.17044842704407354, - "learning_rate": 0.002790036072347265, - "loss": 1.3273, - "step": 2216 - }, - { - "epoch": 0.17052534420429197, - "learning_rate": 0.0027898510863756006, - "loss": 0.8347, - "step": 2217 - }, - { - "epoch": 0.17060226136451043, - "learning_rate": 0.002789666025088187, - "loss": 1.4892, - "step": 2218 - }, - { - "epoch": 0.17067917852472886, - "learning_rate": 0.00278948088849583, - "loss": 0.8616, - "step": 2219 - }, - { - "epoch": 0.17075609568494732, - "learning_rate": 0.00278929567660934, - "loss": 1.0284, - "step": 2220 - }, - { - "epoch": 0.17083301284516575, - "learning_rate": 0.002789110389439532, - "loss": 0.8165, - "step": 2221 - }, - { - "epoch": 0.1709099300053842, - "learning_rate": 0.0027889250269972246, - "loss": 0.7955, - "step": 2222 - }, - { - "epoch": 0.17098684716560264, - "learning_rate": 0.0027887395892932415, - "loss": 1.1538, - "step": 2223 - }, - { - "epoch": 0.1710637643258211, - "learning_rate": 0.0027885540763384112, - "loss": 1.2271, - "step": 2224 - }, - { - "epoch": 0.17114068148603953, - "learning_rate": 0.0027883684881435653, - "loss": 1.0206, - "step": 2225 - }, - { - "epoch": 0.171217598646258, - "learning_rate": 0.002788182824719541, - "loss": 1.3821, - "step": 2226 - }, - { - "epoch": 0.17129451580647642, - "learning_rate": 0.0027879970860771783, - "loss": 1.13, - "step": 2227 - }, - { - "epoch": 0.17137143296669488, - "learning_rate": 0.002787811272227324, - "loss": 1.5313, - "step": 2228 - }, - { - "epoch": 0.1714483501269133, - "learning_rate": 0.002787625383180827, - "loss": 1.2021, - "step": 2229 - }, - { - "epoch": 0.17152526728713177, - "learning_rate": 0.002787439418948542, - "loss": 1.143, - "step": 2230 - }, - { - "epoch": 0.1716021844473502, - "learning_rate": 0.002787253379541328, - "loss": 0.9585, - "step": 2231 - }, - { - "epoch": 0.17167910160756863, - "learning_rate": 0.0027870672649700475, - "loss": 0.9756, - "step": 2232 - }, - { - "epoch": 0.1717560187677871, - "learning_rate": 0.0027868810752455678, - "loss": 1.1284, - "step": 2233 - }, - { - "epoch": 0.17183293592800553, - "learning_rate": 0.0027866948103787608, - "loss": 1.5215, - "step": 2234 - }, - { - "epoch": 0.17190985308822399, - "learning_rate": 0.0027865084703805033, - "loss": 0.7911, - "step": 2235 - }, - { - "epoch": 0.17198677024844242, - "learning_rate": 0.0027863220552616754, - "loss": 1.0903, - "step": 2236 - }, - { - "epoch": 0.17206368740866088, - "learning_rate": 0.0027861355650331617, - "loss": 1.073, - "step": 2237 - }, - { - "epoch": 0.1721406045688793, - "learning_rate": 0.0027859489997058523, - "loss": 1.1637, - "step": 2238 - }, - { - "epoch": 0.17221752172909777, - "learning_rate": 0.0027857623592906404, - "loss": 1.2024, - "step": 2239 - }, - { - "epoch": 0.1722944388893162, - "learning_rate": 0.002785575643798425, - "loss": 1.2597, - "step": 2240 - }, - { - "epoch": 0.17237135604953466, - "learning_rate": 0.0027853888532401072, - "loss": 1.2187, - "step": 2241 - }, - { - "epoch": 0.1724482732097531, - "learning_rate": 0.0027852019876265953, - "loss": 1.2744, - "step": 2242 - }, - { - "epoch": 0.17252519036997155, - "learning_rate": 0.0027850150469687996, - "loss": 1.0699, - "step": 2243 - }, - { - "epoch": 0.17260210753018998, - "learning_rate": 0.002784828031277636, - "loss": 1.2331, - "step": 2244 - }, - { - "epoch": 0.17267902469040844, - "learning_rate": 0.002784640940564025, - "loss": 1.2178, - "step": 2245 - }, - { - "epoch": 0.17275594185062687, - "learning_rate": 0.0027844537748388898, - "loss": 1.3137, - "step": 2246 - }, - { - "epoch": 0.17283285901084533, - "learning_rate": 0.002784266534113161, - "loss": 1.3004, - "step": 2247 - }, - { - "epoch": 0.17290977617106376, - "learning_rate": 0.00278407921839777, - "loss": 1.21, - "step": 2248 - }, - { - "epoch": 0.17298669333128222, - "learning_rate": 0.0027838918277036558, - "loss": 1.0841, - "step": 2249 - }, - { - "epoch": 0.17306361049150065, - "learning_rate": 0.0027837043620417595, - "loss": 1.35, - "step": 2250 - }, - { - "epoch": 0.1731405276517191, - "learning_rate": 0.002783516821423028, - "loss": 0.6665, - "step": 2251 - }, - { - "epoch": 0.17321744481193754, - "learning_rate": 0.0027833292058584117, - "loss": 1.0572, - "step": 2252 - }, - { - "epoch": 0.17329436197215597, - "learning_rate": 0.0027831415153588652, - "loss": 1.0276, - "step": 2253 - }, - { - "epoch": 0.17337127913237443, - "learning_rate": 0.0027829537499353485, - "loss": 1.426, - "step": 2254 - }, - { - "epoch": 0.17344819629259287, - "learning_rate": 0.0027827659095988255, - "loss": 1.1894, - "step": 2255 - }, - { - "epoch": 0.17352511345281132, - "learning_rate": 0.0027825779943602635, - "loss": 1.0175, - "step": 2256 - }, - { - "epoch": 0.17360203061302976, - "learning_rate": 0.0027823900042306363, - "loss": 1.2415, - "step": 2257 - }, - { - "epoch": 0.17367894777324822, - "learning_rate": 0.0027822019392209204, - "loss": 1.3457, - "step": 2258 - }, - { - "epoch": 0.17375586493346665, - "learning_rate": 0.0027820137993420967, - "loss": 1.537, - "step": 2259 - }, - { - "epoch": 0.1738327820936851, - "learning_rate": 0.002781825584605151, - "loss": 1.5206, - "step": 2260 - }, - { - "epoch": 0.17390969925390354, - "learning_rate": 0.0027816372950210744, - "loss": 0.7763, - "step": 2261 - }, - { - "epoch": 0.173986616414122, - "learning_rate": 0.002781448930600859, - "loss": 1.408, - "step": 2262 - }, - { - "epoch": 0.17406353357434043, - "learning_rate": 0.002781260491355506, - "loss": 1.8919, - "step": 2263 - }, - { - "epoch": 0.1741404507345589, - "learning_rate": 0.002781071977296018, - "loss": 1.3851, - "step": 2264 - }, - { - "epoch": 0.17421736789477732, - "learning_rate": 0.002780883388433402, - "loss": 0.706, - "step": 2265 - }, - { - "epoch": 0.17429428505499578, - "learning_rate": 0.002780694724778669, - "loss": 1.2864, - "step": 2266 - }, - { - "epoch": 0.1743712022152142, - "learning_rate": 0.0027805059863428372, - "loss": 1.0764, - "step": 2267 - }, - { - "epoch": 0.17444811937543267, - "learning_rate": 0.0027803171731369263, - "loss": 1.3292, - "step": 2268 - }, - { - "epoch": 0.1745250365356511, - "learning_rate": 0.002780128285171961, - "loss": 1.0075, - "step": 2269 - }, - { - "epoch": 0.17460195369586956, - "learning_rate": 0.0027799393224589714, - "loss": 1.3501, - "step": 2270 - }, - { - "epoch": 0.174678870856088, - "learning_rate": 0.0027797502850089905, - "loss": 0.8623, - "step": 2271 - }, - { - "epoch": 0.17475578801630645, - "learning_rate": 0.0027795611728330567, - "loss": 0.828, - "step": 2272 - }, - { - "epoch": 0.17483270517652488, - "learning_rate": 0.002779371985942213, - "loss": 1.1315, - "step": 2273 - }, - { - "epoch": 0.17490962233674331, - "learning_rate": 0.002779182724347505, - "loss": 1.4622, - "step": 2274 - }, - { - "epoch": 0.17498653949696177, - "learning_rate": 0.0027789933880599848, - "loss": 0.8753, - "step": 2275 - }, - { - "epoch": 0.1750634566571802, - "learning_rate": 0.0027788039770907085, - "loss": 1.1484, - "step": 2276 - }, - { - "epoch": 0.17514037381739866, - "learning_rate": 0.0027786144914507347, - "loss": 1.1718, - "step": 2277 - }, - { - "epoch": 0.1752172909776171, - "learning_rate": 0.0027784249311511282, - "loss": 1.1838, - "step": 2278 - }, - { - "epoch": 0.17529420813783556, - "learning_rate": 0.002778235296202957, - "loss": 1.3006, - "step": 2279 - }, - { - "epoch": 0.175371125298054, - "learning_rate": 0.0027780455866172954, - "loss": 1.0545, - "step": 2280 - }, - { - "epoch": 0.17544804245827245, - "learning_rate": 0.00277785580240522, - "loss": 1.2178, - "step": 2281 - }, - { - "epoch": 0.17552495961849088, - "learning_rate": 0.002777665943577813, - "loss": 0.7611, - "step": 2282 - }, - { - "epoch": 0.17560187677870934, - "learning_rate": 0.002777476010146159, - "loss": 1.3487, - "step": 2283 - }, - { - "epoch": 0.17567879393892777, - "learning_rate": 0.00277728600212135, - "loss": 1.2143, - "step": 2284 - }, - { - "epoch": 0.17575571109914623, - "learning_rate": 0.00277709591951448, - "loss": 1.2719, - "step": 2285 - }, - { - "epoch": 0.17583262825936466, - "learning_rate": 0.0027769057623366483, - "loss": 1.5202, - "step": 2286 - }, - { - "epoch": 0.17590954541958312, - "learning_rate": 0.0027767155305989583, - "loss": 1.0031, - "step": 2287 - }, - { - "epoch": 0.17598646257980155, - "learning_rate": 0.0027765252243125184, - "loss": 1.3378, - "step": 2288 - }, - { - "epoch": 0.17606337974002, - "learning_rate": 0.0027763348434884398, - "loss": 1.3234, - "step": 2289 - }, - { - "epoch": 0.17614029690023844, - "learning_rate": 0.00277614438813784, - "loss": 1.1323, - "step": 2290 - }, - { - "epoch": 0.1762172140604569, - "learning_rate": 0.0027759538582718393, - "loss": 1.294, - "step": 2291 - }, - { - "epoch": 0.17629413122067533, - "learning_rate": 0.0027757632539015626, - "loss": 1.2474, - "step": 2292 - }, - { - "epoch": 0.1763710483808938, - "learning_rate": 0.00277557257503814, - "loss": 0.7762, - "step": 2293 - }, - { - "epoch": 0.17644796554111222, - "learning_rate": 0.0027753818216927056, - "loss": 1.5522, - "step": 2294 - }, - { - "epoch": 0.17652488270133065, - "learning_rate": 0.0027751909938763973, - "loss": 1.964, - "step": 2295 - }, - { - "epoch": 0.1766017998615491, - "learning_rate": 0.002775000091600358, - "loss": 1.2278, - "step": 2296 - }, - { - "epoch": 0.17667871702176755, - "learning_rate": 0.002774809114875735, - "loss": 1.1071, - "step": 2297 - }, - { - "epoch": 0.176755634181986, - "learning_rate": 0.0027746180637136784, - "loss": 1.3481, - "step": 2298 - }, - { - "epoch": 0.17683255134220444, - "learning_rate": 0.002774426938125345, - "loss": 0.9673, - "step": 2299 - }, - { - "epoch": 0.1769094685024229, - "learning_rate": 0.0027742357381218947, - "loss": 0.9776, - "step": 2300 - }, - { - "epoch": 0.17698638566264133, - "learning_rate": 0.0027740444637144915, - "loss": 0.8895, - "step": 2301 - }, - { - "epoch": 0.17706330282285979, - "learning_rate": 0.0027738531149143044, - "loss": 1.5286, - "step": 2302 - }, - { - "epoch": 0.17714021998307822, - "learning_rate": 0.0027736616917325065, - "loss": 1.1316, - "step": 2303 - }, - { - "epoch": 0.17721713714329668, - "learning_rate": 0.002773470194180275, - "loss": 1.0178, - "step": 2304 - }, - { - "epoch": 0.1772940543035151, - "learning_rate": 0.0027732786222687913, - "loss": 1.1816, - "step": 2305 - }, - { - "epoch": 0.17737097146373357, - "learning_rate": 0.002773086976009242, - "loss": 1.172, - "step": 2306 - }, - { - "epoch": 0.177447888623952, - "learning_rate": 0.0027728952554128172, - "loss": 1.0449, - "step": 2307 - }, - { - "epoch": 0.17752480578417046, - "learning_rate": 0.0027727034604907125, - "loss": 0.8095, - "step": 2308 - }, - { - "epoch": 0.1776017229443889, - "learning_rate": 0.0027725115912541256, - "loss": 0.899, - "step": 2309 - }, - { - "epoch": 0.17767864010460735, - "learning_rate": 0.0027723196477142612, - "loss": 1.2357, - "step": 2310 - }, - { - "epoch": 0.17775555726482578, - "learning_rate": 0.0027721276298823265, - "loss": 1.2257, - "step": 2311 - }, - { - "epoch": 0.17783247442504424, - "learning_rate": 0.002771935537769534, - "loss": 1.3586, - "step": 2312 - }, - { - "epoch": 0.17790939158526267, - "learning_rate": 0.0027717433713870996, - "loss": 1.4614, - "step": 2313 - }, - { - "epoch": 0.1779863087454811, - "learning_rate": 0.0027715511307462443, - "loss": 1.0453, - "step": 2314 - }, - { - "epoch": 0.17806322590569956, - "learning_rate": 0.002771358815858193, - "loss": 1.1424, - "step": 2315 - }, - { - "epoch": 0.178140143065918, - "learning_rate": 0.002771166426734176, - "loss": 0.7497, - "step": 2316 - }, - { - "epoch": 0.17821706022613645, - "learning_rate": 0.0027709739633854265, - "loss": 1.0717, - "step": 2317 - }, - { - "epoch": 0.17829397738635488, - "learning_rate": 0.002770781425823182, - "loss": 1.2349, - "step": 2318 - }, - { - "epoch": 0.17837089454657334, - "learning_rate": 0.002770588814058687, - "loss": 1.1313, - "step": 2319 - }, - { - "epoch": 0.17844781170679178, - "learning_rate": 0.0027703961281031857, - "loss": 1.3433, - "step": 2320 - }, - { - "epoch": 0.17852472886701024, - "learning_rate": 0.0027702033679679315, - "loss": 1.1871, - "step": 2321 - }, - { - "epoch": 0.17860164602722867, - "learning_rate": 0.0027700105336641782, - "loss": 1.0887, - "step": 2322 - }, - { - "epoch": 0.17867856318744713, - "learning_rate": 0.0027698176252031865, - "loss": 1.3306, - "step": 2323 - }, - { - "epoch": 0.17875548034766556, - "learning_rate": 0.00276962464259622, - "loss": 1.1755, - "step": 2324 - }, - { - "epoch": 0.17883239750788402, - "learning_rate": 0.0027694315858545477, - "loss": 1.2721, - "step": 2325 - }, - { - "epoch": 0.17890931466810245, - "learning_rate": 0.002769238454989442, - "loss": 1.1679, - "step": 2326 - }, - { - "epoch": 0.1789862318283209, - "learning_rate": 0.0027690452500121806, - "loss": 1.0343, - "step": 2327 - }, - { - "epoch": 0.17906314898853934, - "learning_rate": 0.002768851970934044, - "loss": 0.8905, - "step": 2328 - }, - { - "epoch": 0.1791400661487578, - "learning_rate": 0.0027686586177663184, - "loss": 1.1715, - "step": 2329 - }, - { - "epoch": 0.17921698330897623, - "learning_rate": 0.0027684651905202943, - "loss": 1.1536, - "step": 2330 - }, - { - "epoch": 0.1792939004691947, - "learning_rate": 0.0027682716892072656, - "loss": 1.1649, - "step": 2331 - }, - { - "epoch": 0.17937081762941312, - "learning_rate": 0.0027680781138385313, - "loss": 1.1529, - "step": 2332 - }, - { - "epoch": 0.17944773478963158, - "learning_rate": 0.0027678844644253944, - "loss": 0.8628, - "step": 2333 - }, - { - "epoch": 0.17952465194985, - "learning_rate": 0.002767690740979162, - "loss": 0.9465, - "step": 2334 - }, - { - "epoch": 0.17960156911006844, - "learning_rate": 0.0027674969435111463, - "loss": 1.1379, - "step": 2335 - }, - { - "epoch": 0.1796784862702869, - "learning_rate": 0.002767303072032663, - "loss": 1.0461, - "step": 2336 - }, - { - "epoch": 0.17975540343050533, - "learning_rate": 0.0027671091265550323, - "loss": 1.126, - "step": 2337 - }, - { - "epoch": 0.1798323205907238, - "learning_rate": 0.00276691510708958, - "loss": 1.2887, - "step": 2338 - }, - { - "epoch": 0.17990923775094222, - "learning_rate": 0.0027667210136476336, - "loss": 0.9879, - "step": 2339 - }, - { - "epoch": 0.17998615491116068, - "learning_rate": 0.002766526846240527, - "loss": 1.0731, - "step": 2340 - }, - { - "epoch": 0.18006307207137912, - "learning_rate": 0.002766332604879598, - "loss": 1.0854, - "step": 2341 - }, - { - "epoch": 0.18013998923159757, - "learning_rate": 0.0027661382895761886, - "loss": 1.3078, - "step": 2342 - }, - { - "epoch": 0.180216906391816, - "learning_rate": 0.0027659439003416453, - "loss": 1.3538, - "step": 2343 - }, - { - "epoch": 0.18029382355203447, - "learning_rate": 0.0027657494371873172, - "loss": 1.092, - "step": 2344 - }, - { - "epoch": 0.1803707407122529, - "learning_rate": 0.002765554900124561, - "loss": 1.2404, - "step": 2345 - }, - { - "epoch": 0.18044765787247136, - "learning_rate": 0.0027653602891647357, - "loss": 1.1784, - "step": 2346 - }, - { - "epoch": 0.1805245750326898, - "learning_rate": 0.0027651656043192036, - "loss": 1.2711, - "step": 2347 - }, - { - "epoch": 0.18060149219290825, - "learning_rate": 0.002764970845599334, - "loss": 0.9122, - "step": 2348 - }, - { - "epoch": 0.18067840935312668, - "learning_rate": 0.002764776013016498, - "loss": 1.0016, - "step": 2349 - }, - { - "epoch": 0.18075532651334514, - "learning_rate": 0.0027645811065820725, - "loss": 1.0989, - "step": 2350 - }, - { - "epoch": 0.18083224367356357, - "learning_rate": 0.0027643861263074387, - "loss": 1.3739, - "step": 2351 - }, - { - "epoch": 0.18090916083378203, - "learning_rate": 0.002764191072203981, - "loss": 1.2372, - "step": 2352 - }, - { - "epoch": 0.18098607799400046, - "learning_rate": 0.002763995944283089, - "loss": 0.8442, - "step": 2353 - }, - { - "epoch": 0.18106299515421892, - "learning_rate": 0.0027638007425561566, - "loss": 0.8965, - "step": 2354 - }, - { - "epoch": 0.18113991231443735, - "learning_rate": 0.0027636054670345813, - "loss": 0.8469, - "step": 2355 - }, - { - "epoch": 0.18121682947465578, - "learning_rate": 0.002763410117729767, - "loss": 1.1008, - "step": 2356 - }, - { - "epoch": 0.18129374663487424, - "learning_rate": 0.002763214694653118, - "loss": 1.0531, - "step": 2357 - }, - { - "epoch": 0.18137066379509267, - "learning_rate": 0.002763019197816047, - "loss": 0.9141, - "step": 2358 - }, - { - "epoch": 0.18144758095531113, - "learning_rate": 0.0027628236272299687, - "loss": 0.9273, - "step": 2359 - }, - { - "epoch": 0.18152449811552956, - "learning_rate": 0.002762627982906303, - "loss": 1.2542, - "step": 2360 - }, - { - "epoch": 0.18160141527574802, - "learning_rate": 0.0027624322648564727, - "loss": 1.2319, - "step": 2361 - }, - { - "epoch": 0.18167833243596646, - "learning_rate": 0.0027622364730919076, - "loss": 0.8684, - "step": 2362 - }, - { - "epoch": 0.18175524959618491, - "learning_rate": 0.002762040607624039, - "loss": 1.0326, - "step": 2363 - }, - { - "epoch": 0.18183216675640335, - "learning_rate": 0.0027618446684643046, - "loss": 1.1698, - "step": 2364 - }, - { - "epoch": 0.1819090839166218, - "learning_rate": 0.002761648655624144, - "loss": 1.4721, - "step": 2365 - }, - { - "epoch": 0.18198600107684024, - "learning_rate": 0.002761452569115004, - "loss": 1.4152, - "step": 2366 - }, - { - "epoch": 0.1820629182370587, - "learning_rate": 0.002761256408948334, - "loss": 1.2847, - "step": 2367 - }, - { - "epoch": 0.18213983539727713, - "learning_rate": 0.002761060175135588, - "loss": 1.0758, - "step": 2368 - }, - { - "epoch": 0.1822167525574956, - "learning_rate": 0.002760863867688224, - "loss": 1.4763, - "step": 2369 - }, - { - "epoch": 0.18229366971771402, - "learning_rate": 0.002760667486617704, - "loss": 0.9241, - "step": 2370 - }, - { - "epoch": 0.18237058687793248, - "learning_rate": 0.002760471031935497, - "loss": 1.1661, - "step": 2371 - }, - { - "epoch": 0.1824475040381509, - "learning_rate": 0.0027602745036530716, - "loss": 0.9507, - "step": 2372 - }, - { - "epoch": 0.18252442119836937, - "learning_rate": 0.002760077901781905, - "loss": 1.3158, - "step": 2373 - }, - { - "epoch": 0.1826013383585878, - "learning_rate": 0.0027598812263334763, - "loss": 1.2181, - "step": 2374 - }, - { - "epoch": 0.18267825551880623, - "learning_rate": 0.0027596844773192697, - "loss": 1.3014, - "step": 2375 - }, - { - "epoch": 0.1827551726790247, - "learning_rate": 0.0027594876547507737, - "loss": 1.0981, - "step": 2376 - }, - { - "epoch": 0.18283208983924312, - "learning_rate": 0.0027592907586394813, - "loss": 1.2466, - "step": 2377 - }, - { - "epoch": 0.18290900699946158, - "learning_rate": 0.002759093788996889, - "loss": 1.2532, - "step": 2378 - }, - { - "epoch": 0.18298592415968, - "learning_rate": 0.0027588967458344977, - "loss": 1.1043, - "step": 2379 - }, - { - "epoch": 0.18306284131989847, - "learning_rate": 0.002758699629163814, - "loss": 1.0542, - "step": 2380 - }, - { - "epoch": 0.1831397584801169, - "learning_rate": 0.002758502438996347, - "loss": 1.0792, - "step": 2381 - }, - { - "epoch": 0.18321667564033536, - "learning_rate": 0.0027583051753436107, - "loss": 1.3526, - "step": 2382 - }, - { - "epoch": 0.1832935928005538, - "learning_rate": 0.0027581078382171243, - "loss": 0.8125, - "step": 2383 - }, - { - "epoch": 0.18337050996077225, - "learning_rate": 0.0027579104276284094, - "loss": 1.2082, - "step": 2384 - }, - { - "epoch": 0.18344742712099069, - "learning_rate": 0.002757712943588994, - "loss": 1.0168, - "step": 2385 - }, - { - "epoch": 0.18352434428120915, - "learning_rate": 0.002757515386110409, - "loss": 1.36, - "step": 2386 - }, - { - "epoch": 0.18360126144142758, - "learning_rate": 0.002757317755204191, - "loss": 1.3223, - "step": 2387 - }, - { - "epoch": 0.18367817860164604, - "learning_rate": 0.0027571200508818778, - "loss": 1.367, - "step": 2388 - }, - { - "epoch": 0.18375509576186447, - "learning_rate": 0.0027569222731550153, - "loss": 1.4544, - "step": 2389 - }, - { - "epoch": 0.18383201292208293, - "learning_rate": 0.0027567244220351506, - "loss": 1.2253, - "step": 2390 - }, - { - "epoch": 0.18390893008230136, - "learning_rate": 0.002756526497533838, - "loss": 1.4035, - "step": 2391 - }, - { - "epoch": 0.18398584724251982, - "learning_rate": 0.002756328499662633, - "loss": 0.9734, - "step": 2392 - }, - { - "epoch": 0.18406276440273825, - "learning_rate": 0.0027561304284330983, - "loss": 1.2042, - "step": 2393 - }, - { - "epoch": 0.1841396815629567, - "learning_rate": 0.0027559322838567983, - "loss": 1.3424, - "step": 2394 - }, - { - "epoch": 0.18421659872317514, - "learning_rate": 0.002755734065945303, - "loss": 0.8253, - "step": 2395 - }, - { - "epoch": 0.18429351588339357, - "learning_rate": 0.0027555357747101874, - "loss": 1.2893, - "step": 2396 - }, - { - "epoch": 0.18437043304361203, - "learning_rate": 0.00275533741016303, - "loss": 1.082, - "step": 2397 - }, - { - "epoch": 0.18444735020383046, - "learning_rate": 0.002755138972315412, - "loss": 1.2415, - "step": 2398 - }, - { - "epoch": 0.18452426736404892, - "learning_rate": 0.002754940461178922, - "loss": 1.0947, - "step": 2399 - }, - { - "epoch": 0.18460118452426735, - "learning_rate": 0.00275474187676515, - "loss": 1.0091, - "step": 2400 - }, - { - "epoch": 0.1846781016844858, - "learning_rate": 0.0027545432190856922, - "loss": 1.1793, - "step": 2401 - }, - { - "epoch": 0.18475501884470424, - "learning_rate": 0.0027543444881521486, - "loss": 1.3007, - "step": 2402 - }, - { - "epoch": 0.1848319360049227, - "learning_rate": 0.002754145683976123, - "loss": 1.3381, - "step": 2403 - }, - { - "epoch": 0.18490885316514113, - "learning_rate": 0.002753946806569224, - "loss": 1.0557, - "step": 2404 - }, - { - "epoch": 0.1849857703253596, - "learning_rate": 0.002753747855943064, - "loss": 1.3429, - "step": 2405 - }, - { - "epoch": 0.18506268748557803, - "learning_rate": 0.00275354883210926, - "loss": 0.9561, - "step": 2406 - }, - { - "epoch": 0.18513960464579649, - "learning_rate": 0.0027533497350794335, - "loss": 1.0618, - "step": 2407 - }, - { - "epoch": 0.18521652180601492, - "learning_rate": 0.0027531505648652095, - "loss": 1.359, - "step": 2408 - }, - { - "epoch": 0.18529343896623338, - "learning_rate": 0.0027529513214782186, - "loss": 1.0228, - "step": 2409 - }, - { - "epoch": 0.1853703561264518, - "learning_rate": 0.0027527520049300935, - "loss": 1.1886, - "step": 2410 - }, - { - "epoch": 0.18544727328667027, - "learning_rate": 0.0027525526152324736, - "loss": 1.165, - "step": 2411 - }, - { - "epoch": 0.1855241904468887, - "learning_rate": 0.002752353152397001, - "loss": 0.9727, - "step": 2412 - }, - { - "epoch": 0.18560110760710716, - "learning_rate": 0.0027521536164353227, - "loss": 0.8979, - "step": 2413 - }, - { - "epoch": 0.1856780247673256, - "learning_rate": 0.00275195400735909, - "loss": 0.971, - "step": 2414 - }, - { - "epoch": 0.18575494192754405, - "learning_rate": 0.002751754325179958, - "loss": 1.3337, - "step": 2415 - }, - { - "epoch": 0.18583185908776248, - "learning_rate": 0.0027515545699095863, - "loss": 0.9449, - "step": 2416 - }, - { - "epoch": 0.1859087762479809, - "learning_rate": 0.002751354741559639, - "loss": 1.4593, - "step": 2417 - }, - { - "epoch": 0.18598569340819937, - "learning_rate": 0.002751154840141784, - "loss": 1.0102, - "step": 2418 - }, - { - "epoch": 0.1860626105684178, - "learning_rate": 0.002750954865667694, - "loss": 1.1943, - "step": 2419 - }, - { - "epoch": 0.18613952772863626, - "learning_rate": 0.0027507548181490457, - "loss": 1.2264, - "step": 2420 - }, - { - "epoch": 0.1862164448888547, - "learning_rate": 0.0027505546975975202, - "loss": 1.3027, - "step": 2421 - }, - { - "epoch": 0.18629336204907315, - "learning_rate": 0.0027503545040248024, - "loss": 1.3922, - "step": 2422 - }, - { - "epoch": 0.18637027920929158, - "learning_rate": 0.002750154237442582, - "loss": 1.252, - "step": 2423 - }, - { - "epoch": 0.18644719636951004, - "learning_rate": 0.0027499538978625533, - "loss": 1.1408, - "step": 2424 - }, - { - "epoch": 0.18652411352972847, - "learning_rate": 0.0027497534852964135, - "loss": 1.3712, - "step": 2425 - }, - { - "epoch": 0.18660103068994693, - "learning_rate": 0.002749552999755865, - "loss": 1.067, - "step": 2426 - }, - { - "epoch": 0.18667794785016537, - "learning_rate": 0.002749352441252615, - "loss": 1.0006, - "step": 2427 - }, - { - "epoch": 0.18675486501038382, - "learning_rate": 0.0027491518097983733, - "loss": 1.1334, - "step": 2428 - }, - { - "epoch": 0.18683178217060226, - "learning_rate": 0.002748951105404856, - "loss": 0.9525, - "step": 2429 - }, - { - "epoch": 0.18690869933082072, - "learning_rate": 0.0027487503280837823, - "loss": 0.7266, - "step": 2430 - }, - { - "epoch": 0.18698561649103915, - "learning_rate": 0.0027485494778468752, - "loss": 1.1159, - "step": 2431 - }, - { - "epoch": 0.1870625336512576, - "learning_rate": 0.002748348554705863, - "loss": 1.356, - "step": 2432 - }, - { - "epoch": 0.18713945081147604, - "learning_rate": 0.0027481475586724775, - "loss": 1.0046, - "step": 2433 - }, - { - "epoch": 0.1872163679716945, - "learning_rate": 0.0027479464897584554, - "loss": 1.0578, - "step": 2434 - }, - { - "epoch": 0.18729328513191293, - "learning_rate": 0.002747745347975537, - "loss": 0.8953, - "step": 2435 - }, - { - "epoch": 0.1873702022921314, - "learning_rate": 0.002747544133335468, - "loss": 1.1441, - "step": 2436 - }, - { - "epoch": 0.18744711945234982, - "learning_rate": 0.0027473428458499967, - "loss": 1.2491, - "step": 2437 - }, - { - "epoch": 0.18752403661256825, - "learning_rate": 0.002747141485530876, - "loss": 1.2872, - "step": 2438 - }, - { - "epoch": 0.1876009537727867, - "learning_rate": 0.002746940052389865, - "loss": 1.0918, - "step": 2439 - }, - { - "epoch": 0.18767787093300514, - "learning_rate": 0.0027467385464387244, - "loss": 1.0533, - "step": 2440 - }, - { - "epoch": 0.1877547880932236, - "learning_rate": 0.0027465369676892204, - "loss": 1.1003, - "step": 2441 - }, - { - "epoch": 0.18783170525344203, - "learning_rate": 0.0027463353161531244, - "loss": 1.1003, - "step": 2442 - }, - { - "epoch": 0.1879086224136605, - "learning_rate": 0.00274613359184221, - "loss": 1.1075, - "step": 2443 - }, - { - "epoch": 0.18798553957387892, - "learning_rate": 0.002745931794768257, - "loss": 0.9396, - "step": 2444 - }, - { - "epoch": 0.18806245673409738, - "learning_rate": 0.0027457299249430476, - "loss": 1.3618, - "step": 2445 - }, - { - "epoch": 0.18813937389431581, - "learning_rate": 0.0027455279823783694, - "loss": 1.0066, - "step": 2446 - }, - { - "epoch": 0.18821629105453427, - "learning_rate": 0.0027453259670860144, - "loss": 1.2253, - "step": 2447 - }, - { - "epoch": 0.1882932082147527, - "learning_rate": 0.002745123879077779, - "loss": 1.2503, - "step": 2448 - }, - { - "epoch": 0.18837012537497116, - "learning_rate": 0.0027449217183654617, - "loss": 0.852, - "step": 2449 - }, - { - "epoch": 0.1884470425351896, - "learning_rate": 0.0027447194849608684, - "loss": 1.2111, - "step": 2450 - }, - { - "epoch": 0.18852395969540806, - "learning_rate": 0.0027445171788758067, - "loss": 0.8599, - "step": 2451 - }, - { - "epoch": 0.1886008768556265, - "learning_rate": 0.0027443148001220904, - "loss": 1.1196, - "step": 2452 - }, - { - "epoch": 0.18867779401584495, - "learning_rate": 0.002744112348711536, - "loss": 0.9139, - "step": 2453 - }, - { - "epoch": 0.18875471117606338, - "learning_rate": 0.002743909824655965, - "loss": 1.205, - "step": 2454 - }, - { - "epoch": 0.18883162833628184, - "learning_rate": 0.002743707227967203, - "loss": 1.1818, - "step": 2455 - }, - { - "epoch": 0.18890854549650027, - "learning_rate": 0.0027435045586570793, - "loss": 1.8483, - "step": 2456 - }, - { - "epoch": 0.1889854626567187, - "learning_rate": 0.002743301816737429, - "loss": 0.9127, - "step": 2457 - }, - { - "epoch": 0.18906237981693716, - "learning_rate": 0.0027430990022200897, - "loss": 1.0292, - "step": 2458 - }, - { - "epoch": 0.1891392969771556, - "learning_rate": 0.0027428961151169046, - "loss": 0.9004, - "step": 2459 - }, - { - "epoch": 0.18921621413737405, - "learning_rate": 0.0027426931554397193, - "loss": 1.0631, - "step": 2460 - }, - { - "epoch": 0.18929313129759248, - "learning_rate": 0.002742490123200386, - "loss": 1.1435, - "step": 2461 - }, - { - "epoch": 0.18937004845781094, - "learning_rate": 0.0027422870184107594, - "loss": 1.2402, - "step": 2462 - }, - { - "epoch": 0.18944696561802937, - "learning_rate": 0.002742083841082699, - "loss": 1.1791, - "step": 2463 - }, - { - "epoch": 0.18952388277824783, - "learning_rate": 0.002741880591228069, - "loss": 1.0982, - "step": 2464 - }, - { - "epoch": 0.18960079993846626, - "learning_rate": 0.0027416772688587366, - "loss": 1.0543, - "step": 2465 - }, - { - "epoch": 0.18967771709868472, - "learning_rate": 0.0027414738739865744, - "loss": 1.6758, - "step": 2466 - }, - { - "epoch": 0.18975463425890315, - "learning_rate": 0.0027412704066234594, - "loss": 1.0502, - "step": 2467 - }, - { - "epoch": 0.1898315514191216, - "learning_rate": 0.0027410668667812712, - "loss": 1.0975, - "step": 2468 - }, - { - "epoch": 0.18990846857934005, - "learning_rate": 0.0027408632544718954, - "loss": 1.3923, - "step": 2469 - }, - { - "epoch": 0.1899853857395585, - "learning_rate": 0.002740659569707221, - "loss": 1.1496, - "step": 2470 - }, - { - "epoch": 0.19006230289977694, - "learning_rate": 0.0027404558124991415, - "loss": 1.1202, - "step": 2471 - }, - { - "epoch": 0.1901392200599954, - "learning_rate": 0.0027402519828595537, - "loss": 1.1255, - "step": 2472 - }, - { - "epoch": 0.19021613722021383, - "learning_rate": 0.002740048080800361, - "loss": 0.8023, - "step": 2473 - }, - { - "epoch": 0.19029305438043229, - "learning_rate": 0.002739844106333468, - "loss": 1.0702, - "step": 2474 - }, - { - "epoch": 0.19036997154065072, - "learning_rate": 0.0027396400594707853, - "loss": 1.4331, - "step": 2475 - }, - { - "epoch": 0.19044688870086918, - "learning_rate": 0.002739435940224228, - "loss": 0.8715, - "step": 2476 - }, - { - "epoch": 0.1905238058610876, - "learning_rate": 0.0027392317486057147, - "loss": 1.0336, - "step": 2477 - }, - { - "epoch": 0.19060072302130604, - "learning_rate": 0.0027390274846271676, - "loss": 1.3031, - "step": 2478 - }, - { - "epoch": 0.1906776401815245, - "learning_rate": 0.0027388231483005143, - "loss": 1.8521, - "step": 2479 - }, - { - "epoch": 0.19075455734174293, - "learning_rate": 0.0027386187396376866, - "loss": 1.0224, - "step": 2480 - }, - { - "epoch": 0.1908314745019614, - "learning_rate": 0.00273841425865062, - "loss": 1.1065, - "step": 2481 - }, - { - "epoch": 0.19090839166217982, - "learning_rate": 0.0027382097053512537, - "loss": 1.4644, - "step": 2482 - }, - { - "epoch": 0.19098530882239828, - "learning_rate": 0.002738005079751532, - "loss": 1.2938, - "step": 2483 - }, - { - "epoch": 0.1910622259826167, - "learning_rate": 0.0027378003818634045, - "loss": 1.2478, - "step": 2484 - }, - { - "epoch": 0.19113914314283517, - "learning_rate": 0.002737595611698822, - "loss": 1.1379, - "step": 2485 - }, - { - "epoch": 0.1912160603030536, - "learning_rate": 0.0027373907692697422, - "loss": 1.4759, - "step": 2486 - }, - { - "epoch": 0.19129297746327206, - "learning_rate": 0.0027371858545881256, - "loss": 0.9392, - "step": 2487 - }, - { - "epoch": 0.1913698946234905, - "learning_rate": 0.0027369808676659378, - "loss": 1.0521, - "step": 2488 - }, - { - "epoch": 0.19144681178370895, - "learning_rate": 0.002736775808515148, - "loss": 1.143, - "step": 2489 - }, - { - "epoch": 0.19152372894392738, - "learning_rate": 0.0027365706771477296, - "loss": 1.0759, - "step": 2490 - }, - { - "epoch": 0.19160064610414584, - "learning_rate": 0.00273636547357566, - "loss": 1.4833, - "step": 2491 - }, - { - "epoch": 0.19167756326436428, - "learning_rate": 0.002736160197810923, - "loss": 1.121, - "step": 2492 - }, - { - "epoch": 0.19175448042458274, - "learning_rate": 0.0027359548498655036, - "loss": 0.9887, - "step": 2493 - }, - { - "epoch": 0.19183139758480117, - "learning_rate": 0.002735749429751392, - "loss": 1.022, - "step": 2494 - }, - { - "epoch": 0.19190831474501963, - "learning_rate": 0.0027355439374805836, - "loss": 1.4389, - "step": 2495 - }, - { - "epoch": 0.19198523190523806, - "learning_rate": 0.002735338373065077, - "loss": 0.9638, - "step": 2496 - }, - { - "epoch": 0.19206214906545652, - "learning_rate": 0.002735132736516875, - "loss": 1.2374, - "step": 2497 - }, - { - "epoch": 0.19213906622567495, - "learning_rate": 0.002734927027847986, - "loss": 1.0329, - "step": 2498 - }, - { - "epoch": 0.19221598338589338, - "learning_rate": 0.0027347212470704204, - "loss": 0.9132, - "step": 2499 - }, - { - "epoch": 0.19229290054611184, - "learning_rate": 0.0027345153941961946, - "loss": 1.1657, - "step": 2500 - }, - { - "epoch": 0.19236981770633027, - "learning_rate": 0.002734309469237328, - "loss": 1.459, - "step": 2501 - }, - { - "epoch": 0.19244673486654873, - "learning_rate": 0.0027341034722058455, - "loss": 1.1164, - "step": 2502 - }, - { - "epoch": 0.19252365202676716, - "learning_rate": 0.0027338974031137745, - "loss": 1.3067, - "step": 2503 - }, - { - "epoch": 0.19260056918698562, - "learning_rate": 0.002733691261973149, - "loss": 1.2746, - "step": 2504 - }, - { - "epoch": 0.19267748634720405, - "learning_rate": 0.0027334850487960045, - "loss": 0.8684, - "step": 2505 - }, - { - "epoch": 0.1927544035074225, - "learning_rate": 0.0027332787635943826, - "loss": 1.1647, - "step": 2506 - }, - { - "epoch": 0.19283132066764094, - "learning_rate": 0.0027330724063803287, - "loss": 1.6763, - "step": 2507 - }, - { - "epoch": 0.1929082378278594, - "learning_rate": 0.0027328659771658913, - "loss": 0.8732, - "step": 2508 - }, - { - "epoch": 0.19298515498807783, - "learning_rate": 0.0027326594759631254, - "loss": 1.2196, - "step": 2509 - }, - { - "epoch": 0.1930620721482963, - "learning_rate": 0.0027324529027840873, - "loss": 1.0564, - "step": 2510 - }, - { - "epoch": 0.19313898930851472, - "learning_rate": 0.00273224625764084, - "loss": 0.8968, - "step": 2511 - }, - { - "epoch": 0.19321590646873318, - "learning_rate": 0.0027320395405454495, - "loss": 1.2223, - "step": 2512 - }, - { - "epoch": 0.19329282362895162, - "learning_rate": 0.002731832751509986, - "loss": 0.7521, - "step": 2513 - }, - { - "epoch": 0.19336974078917007, - "learning_rate": 0.002731625890546525, - "loss": 1.1269, - "step": 2514 - }, - { - "epoch": 0.1934466579493885, - "learning_rate": 0.0027314189576671446, - "loss": 1.0745, - "step": 2515 - }, - { - "epoch": 0.19352357510960697, - "learning_rate": 0.0027312119528839275, - "loss": 1.1512, - "step": 2516 - }, - { - "epoch": 0.1936004922698254, - "learning_rate": 0.0027310048762089617, - "loss": 1.1137, - "step": 2517 - }, - { - "epoch": 0.19367740943004386, - "learning_rate": 0.002730797727654338, - "loss": 1.2492, - "step": 2518 - }, - { - "epoch": 0.1937543265902623, - "learning_rate": 0.002730590507232152, - "loss": 1.2935, - "step": 2519 - }, - { - "epoch": 0.19383124375048072, - "learning_rate": 0.002730383214954504, - "loss": 1.3263, - "step": 2520 - }, - { - "epoch": 0.19390816091069918, - "learning_rate": 0.002730175850833498, - "loss": 1.1364, - "step": 2521 - }, - { - "epoch": 0.1939850780709176, - "learning_rate": 0.002729968414881242, - "loss": 1.0328, - "step": 2522 - }, - { - "epoch": 0.19406199523113607, - "learning_rate": 0.0027297609071098485, - "loss": 1.3088, - "step": 2523 - }, - { - "epoch": 0.1941389123913545, - "learning_rate": 0.002729553327531434, - "loss": 1.1823, - "step": 2524 - }, - { - "epoch": 0.19421582955157296, - "learning_rate": 0.002729345676158119, - "loss": 1.1516, - "step": 2525 - }, - { - "epoch": 0.1942927467117914, - "learning_rate": 0.0027291379530020293, - "loss": 1.0633, - "step": 2526 - }, - { - "epoch": 0.19436966387200985, - "learning_rate": 0.0027289301580752925, - "loss": 1.0713, - "step": 2527 - }, - { - "epoch": 0.19444658103222828, - "learning_rate": 0.002728722291390044, - "loss": 1.4266, - "step": 2528 - }, - { - "epoch": 0.19452349819244674, - "learning_rate": 0.00272851435295842, - "loss": 0.9635, - "step": 2529 - }, - { - "epoch": 0.19460041535266517, - "learning_rate": 0.0027283063427925624, - "loss": 1.653, - "step": 2530 - }, - { - "epoch": 0.19467733251288363, - "learning_rate": 0.0027280982609046178, - "loss": 0.9572, - "step": 2531 - }, - { - "epoch": 0.19475424967310206, - "learning_rate": 0.0027278901073067353, - "loss": 1.4323, - "step": 2532 - }, - { - "epoch": 0.19483116683332052, - "learning_rate": 0.00272768188201107, - "loss": 1.4142, - "step": 2533 - }, - { - "epoch": 0.19490808399353896, - "learning_rate": 0.00272747358502978, - "loss": 1.173, - "step": 2534 - }, - { - "epoch": 0.19498500115375741, - "learning_rate": 0.002727265216375029, - "loss": 1.0115, - "step": 2535 - }, - { - "epoch": 0.19506191831397585, - "learning_rate": 0.0027270567760589825, - "loss": 1.2721, - "step": 2536 - }, - { - "epoch": 0.1951388354741943, - "learning_rate": 0.0027268482640938115, - "loss": 1.1139, - "step": 2537 - }, - { - "epoch": 0.19521575263441274, - "learning_rate": 0.0027266396804916924, - "loss": 1.286, - "step": 2538 - }, - { - "epoch": 0.19529266979463117, - "learning_rate": 0.0027264310252648034, - "loss": 0.7293, - "step": 2539 - }, - { - "epoch": 0.19536958695484963, - "learning_rate": 0.002726222298425329, - "loss": 1.8487, - "step": 2540 - }, - { - "epoch": 0.19544650411506806, - "learning_rate": 0.002726013499985457, - "loss": 1.2382, - "step": 2541 - }, - { - "epoch": 0.19552342127528652, - "learning_rate": 0.0027258046299573784, - "loss": 1.0999, - "step": 2542 - }, - { - "epoch": 0.19560033843550495, - "learning_rate": 0.0027255956883532905, - "loss": 1.1137, - "step": 2543 - }, - { - "epoch": 0.1956772555957234, - "learning_rate": 0.0027253866751853926, - "loss": 0.9086, - "step": 2544 - }, - { - "epoch": 0.19575417275594184, - "learning_rate": 0.00272517759046589, - "loss": 1.0784, - "step": 2545 - }, - { - "epoch": 0.1958310899161603, - "learning_rate": 0.002724968434206991, - "loss": 1.2894, - "step": 2546 - }, - { - "epoch": 0.19590800707637873, - "learning_rate": 0.0027247592064209084, - "loss": 1.22, - "step": 2547 - }, - { - "epoch": 0.1959849242365972, - "learning_rate": 0.002724549907119859, - "loss": 0.5908, - "step": 2548 - }, - { - "epoch": 0.19606184139681562, - "learning_rate": 0.002724340536316065, - "loss": 0.9553, - "step": 2549 - }, - { - "epoch": 0.19613875855703408, - "learning_rate": 0.0027241310940217507, - "loss": 1.5002, - "step": 2550 - }, - { - "epoch": 0.1962156757172525, - "learning_rate": 0.0027239215802491463, - "loss": 1.4841, - "step": 2551 - }, - { - "epoch": 0.19629259287747097, - "learning_rate": 0.0027237119950104852, - "loss": 1.0263, - "step": 2552 - }, - { - "epoch": 0.1963695100376894, - "learning_rate": 0.002723502338318005, - "loss": 0.772, - "step": 2553 - }, - { - "epoch": 0.19644642719790786, - "learning_rate": 0.002723292610183949, - "loss": 1.1455, - "step": 2554 - }, - { - "epoch": 0.1965233443581263, - "learning_rate": 0.002723082810620562, - "loss": 1.0296, - "step": 2555 - }, - { - "epoch": 0.19660026151834475, - "learning_rate": 0.002722872939640095, - "loss": 1.2724, - "step": 2556 - }, - { - "epoch": 0.19667717867856319, - "learning_rate": 0.002722662997254803, - "loss": 1.2582, - "step": 2557 - }, - { - "epoch": 0.19675409583878165, - "learning_rate": 0.0027224529834769438, - "loss": 1.3994, - "step": 2558 - }, - { - "epoch": 0.19683101299900008, - "learning_rate": 0.002722242898318781, - "loss": 1.1654, - "step": 2559 - }, - { - "epoch": 0.1969079301592185, - "learning_rate": 0.002722032741792582, - "loss": 1.2423, - "step": 2560 - }, - { - "epoch": 0.19698484731943697, - "learning_rate": 0.0027218225139106172, - "loss": 0.9579, - "step": 2561 - }, - { - "epoch": 0.1970617644796554, - "learning_rate": 0.002721612214685163, - "loss": 1.0188, - "step": 2562 - }, - { - "epoch": 0.19713868163987386, - "learning_rate": 0.0027214018441284976, - "loss": 1.2168, - "step": 2563 - }, - { - "epoch": 0.1972155988000923, - "learning_rate": 0.0027211914022529063, - "loss": 0.9237, - "step": 2564 - }, - { - "epoch": 0.19729251596031075, - "learning_rate": 0.002720980889070676, - "loss": 1.4665, - "step": 2565 - }, - { - "epoch": 0.19736943312052918, - "learning_rate": 0.002720770304594099, - "loss": 1.1105, - "step": 2566 - }, - { - "epoch": 0.19744635028074764, - "learning_rate": 0.0027205596488354723, - "loss": 1.3942, - "step": 2567 - }, - { - "epoch": 0.19752326744096607, - "learning_rate": 0.002720348921807095, - "loss": 1.0266, - "step": 2568 - }, - { - "epoch": 0.19760018460118453, - "learning_rate": 0.0027201381235212726, - "loss": 1.2189, - "step": 2569 - }, - { - "epoch": 0.19767710176140296, - "learning_rate": 0.0027199272539903136, - "loss": 1.0854, - "step": 2570 - }, - { - "epoch": 0.19775401892162142, - "learning_rate": 0.0027197163132265307, - "loss": 1.4957, - "step": 2571 - }, - { - "epoch": 0.19783093608183985, - "learning_rate": 0.0027195053012422413, - "loss": 1.0693, - "step": 2572 - }, - { - "epoch": 0.1979078532420583, - "learning_rate": 0.0027192942180497663, - "loss": 0.7972, - "step": 2573 - }, - { - "epoch": 0.19798477040227674, - "learning_rate": 0.0027190830636614313, - "loss": 1.1248, - "step": 2574 - }, - { - "epoch": 0.1980616875624952, - "learning_rate": 0.002718871838089566, - "loss": 1.4307, - "step": 2575 - }, - { - "epoch": 0.19813860472271363, - "learning_rate": 0.002718660541346503, - "loss": 1.168, - "step": 2576 - }, - { - "epoch": 0.1982155218829321, - "learning_rate": 0.0027184491734445816, - "loss": 1.1479, - "step": 2577 - }, - { - "epoch": 0.19829243904315053, - "learning_rate": 0.002718237734396143, - "loss": 1.3318, - "step": 2578 - }, - { - "epoch": 0.19836935620336899, - "learning_rate": 0.0027180262242135333, - "loss": 0.8992, - "step": 2579 - }, - { - "epoch": 0.19844627336358742, - "learning_rate": 0.002717814642909103, - "loss": 1.1816, - "step": 2580 - }, - { - "epoch": 0.19852319052380585, - "learning_rate": 0.0027176029904952066, - "loss": 1.1761, - "step": 2581 - }, - { - "epoch": 0.1986001076840243, - "learning_rate": 0.0027173912669842025, - "loss": 1.6291, - "step": 2582 - }, - { - "epoch": 0.19867702484424274, - "learning_rate": 0.002717179472388454, - "loss": 1.3271, - "step": 2583 - }, - { - "epoch": 0.1987539420044612, - "learning_rate": 0.0027169676067203274, - "loss": 1.136, - "step": 2584 - }, - { - "epoch": 0.19883085916467963, - "learning_rate": 0.0027167556699921936, - "loss": 1.3296, - "step": 2585 - }, - { - "epoch": 0.1989077763248981, - "learning_rate": 0.0027165436622164285, - "loss": 1.1308, - "step": 2586 - }, - { - "epoch": 0.19898469348511652, - "learning_rate": 0.002716331583405411, - "loss": 1.2838, - "step": 2587 - }, - { - "epoch": 0.19906161064533498, - "learning_rate": 0.0027161194335715245, - "loss": 1.2179, - "step": 2588 - }, - { - "epoch": 0.1991385278055534, - "learning_rate": 0.0027159072127271575, - "loss": 1.3697, - "step": 2589 - }, - { - "epoch": 0.19921544496577187, - "learning_rate": 0.0027156949208847004, - "loss": 1.3041, - "step": 2590 - }, - { - "epoch": 0.1992923621259903, - "learning_rate": 0.0027154825580565506, - "loss": 0.8961, - "step": 2591 - }, - { - "epoch": 0.19936927928620876, - "learning_rate": 0.0027152701242551067, - "loss": 1.1366, - "step": 2592 - }, - { - "epoch": 0.1994461964464272, - "learning_rate": 0.0027150576194927747, - "loss": 1.0879, - "step": 2593 - }, - { - "epoch": 0.19952311360664565, - "learning_rate": 0.002714845043781961, - "loss": 1.441, - "step": 2594 - }, - { - "epoch": 0.19960003076686408, - "learning_rate": 0.00271463239713508, - "loss": 1.2458, - "step": 2595 - }, - { - "epoch": 0.19967694792708254, - "learning_rate": 0.002714419679564547, - "loss": 0.899, - "step": 2596 - }, - { - "epoch": 0.19975386508730097, - "learning_rate": 0.0027142068910827833, - "loss": 1.0908, - "step": 2597 - }, - { - "epoch": 0.19983078224751943, - "learning_rate": 0.0027139940317022137, - "loss": 1.3489, - "step": 2598 - }, - { - "epoch": 0.19990769940773787, - "learning_rate": 0.002713781101435268, - "loss": 1.4459, - "step": 2599 - }, - { - "epoch": 0.19998461656795632, - "learning_rate": 0.002713568100294378, - "loss": 1.1666, - "step": 2600 - }, - { - "epoch": 0.20006153372817476, - "learning_rate": 0.0027133550282919822, - "loss": 1.4039, - "step": 2601 - }, - { - "epoch": 0.2001384508883932, - "learning_rate": 0.0027131418854405218, - "loss": 1.2346, - "step": 2602 - }, - { - "epoch": 0.20021536804861165, - "learning_rate": 0.002712928671752442, - "loss": 0.8976, - "step": 2603 - }, - { - "epoch": 0.20029228520883008, - "learning_rate": 0.0027127153872401936, - "loss": 1.128, - "step": 2604 - }, - { - "epoch": 0.20036920236904854, - "learning_rate": 0.0027125020319162295, - "loss": 0.9785, - "step": 2605 - }, - { - "epoch": 0.20044611952926697, - "learning_rate": 0.0027122886057930084, - "loss": 1.1928, - "step": 2606 - }, - { - "epoch": 0.20052303668948543, - "learning_rate": 0.0027120751088829925, - "loss": 0.9113, - "step": 2607 - }, - { - "epoch": 0.20059995384970386, - "learning_rate": 0.002711861541198647, - "loss": 0.9073, - "step": 2608 - }, - { - "epoch": 0.20067687100992232, - "learning_rate": 0.0027116479027524437, - "loss": 1.063, - "step": 2609 - }, - { - "epoch": 0.20075378817014075, - "learning_rate": 0.0027114341935568563, - "loss": 1.3355, - "step": 2610 - }, - { - "epoch": 0.2008307053303592, - "learning_rate": 0.002711220413624364, - "loss": 1.2035, - "step": 2611 - }, - { - "epoch": 0.20090762249057764, - "learning_rate": 0.002711006562967449, - "loss": 1.4746, - "step": 2612 - }, - { - "epoch": 0.2009845396507961, - "learning_rate": 0.002710792641598599, - "loss": 1.3634, - "step": 2613 - }, - { - "epoch": 0.20106145681101453, - "learning_rate": 0.002710578649530305, - "loss": 1.2459, - "step": 2614 - }, - { - "epoch": 0.201138373971233, - "learning_rate": 0.002710364586775062, - "loss": 1.4056, - "step": 2615 - }, - { - "epoch": 0.20121529113145142, - "learning_rate": 0.002710150453345369, - "loss": 1.4892, - "step": 2616 - }, - { - "epoch": 0.20129220829166988, - "learning_rate": 0.00270993624925373, - "loss": 1.1893, - "step": 2617 - }, - { - "epoch": 0.20136912545188831, - "learning_rate": 0.002709721974512653, - "loss": 1.1919, - "step": 2618 - }, - { - "epoch": 0.20144604261210677, - "learning_rate": 0.0027095076291346483, - "loss": 1.3314, - "step": 2619 - }, - { - "epoch": 0.2015229597723252, - "learning_rate": 0.0027092932131322334, - "loss": 1.2867, - "step": 2620 - }, - { - "epoch": 0.20159987693254364, - "learning_rate": 0.0027090787265179267, - "loss": 1.4829, - "step": 2621 - }, - { - "epoch": 0.2016767940927621, - "learning_rate": 0.002708864169304254, - "loss": 0.7613, - "step": 2622 - }, - { - "epoch": 0.20175371125298053, - "learning_rate": 0.0027086495415037416, - "loss": 1.1193, - "step": 2623 - }, - { - "epoch": 0.201830628413199, - "learning_rate": 0.0027084348431289235, - "loss": 1.1944, - "step": 2624 - }, - { - "epoch": 0.20190754557341742, - "learning_rate": 0.0027082200741923353, - "loss": 1.3288, - "step": 2625 - }, - { - "epoch": 0.20198446273363588, - "learning_rate": 0.002708005234706518, - "loss": 1.0715, - "step": 2626 - }, - { - "epoch": 0.2020613798938543, - "learning_rate": 0.002707790324684016, - "loss": 1.121, - "step": 2627 - }, - { - "epoch": 0.20213829705407277, - "learning_rate": 0.002707575344137378, - "loss": 0.808, - "step": 2628 - }, - { - "epoch": 0.2022152142142912, - "learning_rate": 0.0027073602930791575, - "loss": 1.3753, - "step": 2629 - }, - { - "epoch": 0.20229213137450966, - "learning_rate": 0.0027071451715219106, - "loss": 0.9612, - "step": 2630 - }, - { - "epoch": 0.2023690485347281, - "learning_rate": 0.0027069299794781998, - "loss": 1.4398, - "step": 2631 - }, - { - "epoch": 0.20244596569494655, - "learning_rate": 0.0027067147169605892, - "loss": 1.1476, - "step": 2632 - }, - { - "epoch": 0.20252288285516498, - "learning_rate": 0.002706499383981649, - "loss": 1.1654, - "step": 2633 - }, - { - "epoch": 0.20259980001538344, - "learning_rate": 0.002706283980553952, - "loss": 0.8518, - "step": 2634 - }, - { - "epoch": 0.20267671717560187, - "learning_rate": 0.0027060685066900765, - "loss": 1.1888, - "step": 2635 - }, - { - "epoch": 0.20275363433582033, - "learning_rate": 0.0027058529624026037, - "loss": 1.0883, - "step": 2636 - }, - { - "epoch": 0.20283055149603876, - "learning_rate": 0.0027056373477041197, - "loss": 0.9727, - "step": 2637 - }, - { - "epoch": 0.20290746865625722, - "learning_rate": 0.0027054216626072147, - "loss": 1.0949, - "step": 2638 - }, - { - "epoch": 0.20298438581647565, - "learning_rate": 0.0027052059071244828, - "loss": 1.2156, - "step": 2639 - }, - { - "epoch": 0.2030613029766941, - "learning_rate": 0.002704990081268522, - "loss": 1.4779, - "step": 2640 - }, - { - "epoch": 0.20313822013691255, - "learning_rate": 0.002704774185051934, - "loss": 1.258, - "step": 2641 - }, - { - "epoch": 0.20321513729713098, - "learning_rate": 0.0027045582184873256, - "loss": 0.8716, - "step": 2642 - }, - { - "epoch": 0.20329205445734944, - "learning_rate": 0.002704342181587308, - "loss": 1.1194, - "step": 2643 - }, - { - "epoch": 0.20336897161756787, - "learning_rate": 0.0027041260743644953, - "loss": 1.3263, - "step": 2644 - }, - { - "epoch": 0.20344588877778633, - "learning_rate": 0.0027039098968315058, - "loss": 1.1777, - "step": 2645 - }, - { - "epoch": 0.20352280593800476, - "learning_rate": 0.0027036936490009633, - "loss": 1.062, - "step": 2646 - }, - { - "epoch": 0.20359972309822322, - "learning_rate": 0.002703477330885494, - "loss": 1.0762, - "step": 2647 - }, - { - "epoch": 0.20367664025844165, - "learning_rate": 0.0027032609424977286, - "loss": 0.9619, - "step": 2648 - }, - { - "epoch": 0.2037535574186601, - "learning_rate": 0.002703044483850303, - "loss": 0.9424, - "step": 2649 - }, - { - "epoch": 0.20383047457887854, - "learning_rate": 0.0027028279549558562, - "loss": 1.333, - "step": 2650 - }, - { - "epoch": 0.203907391739097, - "learning_rate": 0.002702611355827032, - "loss": 1.2799, - "step": 2651 - }, - { - "epoch": 0.20398430889931543, - "learning_rate": 0.002702394686476477, - "loss": 1.186, - "step": 2652 - }, - { - "epoch": 0.2040612260595339, - "learning_rate": 0.002702177946916843, - "loss": 1.1085, - "step": 2653 - }, - { - "epoch": 0.20413814321975232, - "learning_rate": 0.0027019611371607863, - "loss": 1.0352, - "step": 2654 - }, - { - "epoch": 0.20421506037997078, - "learning_rate": 0.0027017442572209657, - "loss": 1.4193, - "step": 2655 - }, - { - "epoch": 0.2042919775401892, - "learning_rate": 0.0027015273071100457, - "loss": 1.2035, - "step": 2656 - }, - { - "epoch": 0.20436889470040767, - "learning_rate": 0.002701310286840694, - "loss": 1.5892, - "step": 2657 - }, - { - "epoch": 0.2044458118606261, - "learning_rate": 0.0027010931964255827, - "loss": 0.9946, - "step": 2658 - }, - { - "epoch": 0.20452272902084456, - "learning_rate": 0.002700876035877388, - "loss": 1.1391, - "step": 2659 - }, - { - "epoch": 0.204599646181063, - "learning_rate": 0.00270065880520879, - "loss": 0.9078, - "step": 2660 - }, - { - "epoch": 0.20467656334128145, - "learning_rate": 0.002700441504432473, - "loss": 1.0461, - "step": 2661 - }, - { - "epoch": 0.20475348050149988, - "learning_rate": 0.0027002241335611255, - "loss": 0.9501, - "step": 2662 - }, - { - "epoch": 0.20483039766171832, - "learning_rate": 0.00270000669260744, - "loss": 1.4062, - "step": 2663 - }, - { - "epoch": 0.20490731482193678, - "learning_rate": 0.002699789181584113, - "loss": 1.2921, - "step": 2664 - }, - { - "epoch": 0.2049842319821552, - "learning_rate": 0.0026995716005038454, - "loss": 1.4167, - "step": 2665 - }, - { - "epoch": 0.20506114914237367, - "learning_rate": 0.002699353949379342, - "loss": 1.1758, - "step": 2666 - }, - { - "epoch": 0.2051380663025921, - "learning_rate": 0.0026991362282233113, - "loss": 1.2451, - "step": 2667 - }, - { - "epoch": 0.20521498346281056, - "learning_rate": 0.0026989184370484667, - "loss": 1.105, - "step": 2668 - }, - { - "epoch": 0.205291900623029, - "learning_rate": 0.0026987005758675247, - "loss": 0.9573, - "step": 2669 - }, - { - "epoch": 0.20536881778324745, - "learning_rate": 0.002698482644693207, - "loss": 1.0918, - "step": 2670 - }, - { - "epoch": 0.20544573494346588, - "learning_rate": 0.002698264643538239, - "loss": 1.137, - "step": 2671 - }, - { - "epoch": 0.20552265210368434, - "learning_rate": 0.0026980465724153494, - "loss": 1.3692, - "step": 2672 - }, - { - "epoch": 0.20559956926390277, - "learning_rate": 0.002697828431337272, - "loss": 1.614, - "step": 2673 - }, - { - "epoch": 0.20567648642412123, - "learning_rate": 0.0026976102203167445, - "loss": 1.3403, - "step": 2674 - }, - { - "epoch": 0.20575340358433966, - "learning_rate": 0.0026973919393665073, - "loss": 1.0917, - "step": 2675 - }, - { - "epoch": 0.20583032074455812, - "learning_rate": 0.002697173588499307, - "loss": 1.7301, - "step": 2676 - }, - { - "epoch": 0.20590723790477655, - "learning_rate": 0.0026969551677278937, - "loss": 1.4602, - "step": 2677 - }, - { - "epoch": 0.205984155064995, - "learning_rate": 0.0026967366770650207, - "loss": 1.1644, - "step": 2678 - }, - { - "epoch": 0.20606107222521344, - "learning_rate": 0.0026965181165234456, - "loss": 1.2144, - "step": 2679 - }, - { - "epoch": 0.2061379893854319, - "learning_rate": 0.002696299486115931, - "loss": 1.0635, - "step": 2680 - }, - { - "epoch": 0.20621490654565033, - "learning_rate": 0.0026960807858552424, - "loss": 1.1161, - "step": 2681 - }, - { - "epoch": 0.2062918237058688, - "learning_rate": 0.00269586201575415, - "loss": 1.135, - "step": 2682 - }, - { - "epoch": 0.20636874086608722, - "learning_rate": 0.0026956431758254286, - "loss": 1.1538, - "step": 2683 - }, - { - "epoch": 0.20644565802630566, - "learning_rate": 0.002695424266081856, - "loss": 1.3421, - "step": 2684 - }, - { - "epoch": 0.20652257518652412, - "learning_rate": 0.0026952052865362145, - "loss": 0.6206, - "step": 2685 - }, - { - "epoch": 0.20659949234674255, - "learning_rate": 0.0026949862372012906, - "loss": 1.226, - "step": 2686 - }, - { - "epoch": 0.206676409506961, - "learning_rate": 0.002694767118089875, - "loss": 1.194, - "step": 2687 - }, - { - "epoch": 0.20675332666717944, - "learning_rate": 0.0026945479292147623, - "loss": 1.1061, - "step": 2688 - }, - { - "epoch": 0.2068302438273979, - "learning_rate": 0.0026943286705887514, - "loss": 1.2629, - "step": 2689 - }, - { - "epoch": 0.20690716098761633, - "learning_rate": 0.002694109342224644, - "loss": 0.9402, - "step": 2690 - }, - { - "epoch": 0.2069840781478348, - "learning_rate": 0.0026938899441352484, - "loss": 1.1339, - "step": 2691 - }, - { - "epoch": 0.20706099530805322, - "learning_rate": 0.0026936704763333742, - "loss": 1.0795, - "step": 2692 - }, - { - "epoch": 0.20713791246827168, - "learning_rate": 0.002693450938831837, - "loss": 1.2599, - "step": 2693 - }, - { - "epoch": 0.2072148296284901, - "learning_rate": 0.002693231331643456, - "loss": 1.2991, - "step": 2694 - }, - { - "epoch": 0.20729174678870857, - "learning_rate": 0.002693011654781054, - "loss": 0.9029, - "step": 2695 - }, - { - "epoch": 0.207368663948927, - "learning_rate": 0.002692791908257458, - "loss": 1.1686, - "step": 2696 - }, - { - "epoch": 0.20744558110914546, - "learning_rate": 0.0026925720920854992, - "loss": 1.1356, - "step": 2697 - }, - { - "epoch": 0.2075224982693639, - "learning_rate": 0.0026923522062780128, - "loss": 1.0119, - "step": 2698 - }, - { - "epoch": 0.20759941542958235, - "learning_rate": 0.002692132250847839, - "loss": 1.5194, - "step": 2699 - }, - { - "epoch": 0.20767633258980078, - "learning_rate": 0.0026919122258078207, - "loss": 0.8539, - "step": 2700 - }, - { - "epoch": 0.20775324975001924, - "learning_rate": 0.002691692131170805, - "loss": 1.1621, - "step": 2701 - }, - { - "epoch": 0.20783016691023767, - "learning_rate": 0.0026914719669496436, - "loss": 1.0978, - "step": 2702 - }, - { - "epoch": 0.2079070840704561, - "learning_rate": 0.002691251733157193, - "loss": 1.0201, - "step": 2703 - }, - { - "epoch": 0.20798400123067456, - "learning_rate": 0.0026910314298063116, - "loss": 1.3523, - "step": 2704 - }, - { - "epoch": 0.208060918390893, - "learning_rate": 0.002690811056909864, - "loss": 1.374, - "step": 2705 - }, - { - "epoch": 0.20813783555111146, - "learning_rate": 0.0026905906144807177, - "loss": 1.2682, - "step": 2706 - }, - { - "epoch": 0.2082147527113299, - "learning_rate": 0.002690370102531745, - "loss": 1.0675, - "step": 2707 - }, - { - "epoch": 0.20829166987154835, - "learning_rate": 0.0026901495210758207, - "loss": 1.0536, - "step": 2708 - }, - { - "epoch": 0.20836858703176678, - "learning_rate": 0.0026899288701258263, - "loss": 1.2805, - "step": 2709 - }, - { - "epoch": 0.20844550419198524, - "learning_rate": 0.0026897081496946446, - "loss": 0.9618, - "step": 2710 - }, - { - "epoch": 0.20852242135220367, - "learning_rate": 0.002689487359795164, - "loss": 0.9159, - "step": 2711 - }, - { - "epoch": 0.20859933851242213, - "learning_rate": 0.0026892665004402774, - "loss": 0.8268, - "step": 2712 - }, - { - "epoch": 0.20867625567264056, - "learning_rate": 0.00268904557164288, - "loss": 1.5303, - "step": 2713 - }, - { - "epoch": 0.20875317283285902, - "learning_rate": 0.0026888245734158723, - "loss": 0.9274, - "step": 2714 - }, - { - "epoch": 0.20883008999307745, - "learning_rate": 0.0026886035057721593, - "loss": 1.3888, - "step": 2715 - }, - { - "epoch": 0.2089070071532959, - "learning_rate": 0.0026883823687246485, - "loss": 0.9998, - "step": 2716 - }, - { - "epoch": 0.20898392431351434, - "learning_rate": 0.0026881611622862527, - "loss": 1.1488, - "step": 2717 - }, - { - "epoch": 0.2090608414737328, - "learning_rate": 0.0026879398864698887, - "loss": 1.058, - "step": 2718 - }, - { - "epoch": 0.20913775863395123, - "learning_rate": 0.0026877185412884762, - "loss": 1.1346, - "step": 2719 - }, - { - "epoch": 0.2092146757941697, - "learning_rate": 0.002687497126754941, - "loss": 0.9705, - "step": 2720 - }, - { - "epoch": 0.20929159295438812, - "learning_rate": 0.00268727564288221, - "loss": 1.2282, - "step": 2721 - }, - { - "epoch": 0.20936851011460658, - "learning_rate": 0.002687054089683218, - "loss": 1.4496, - "step": 2722 - }, - { - "epoch": 0.209445427274825, - "learning_rate": 0.0026868324671708997, - "loss": 1.2114, - "step": 2723 - }, - { - "epoch": 0.20952234443504344, - "learning_rate": 0.002686610775358197, - "loss": 1.2715, - "step": 2724 - }, - { - "epoch": 0.2095992615952619, - "learning_rate": 0.0026863890142580543, - "loss": 1.1555, - "step": 2725 - }, - { - "epoch": 0.20967617875548034, - "learning_rate": 0.0026861671838834212, - "loss": 1.3641, - "step": 2726 - }, - { - "epoch": 0.2097530959156988, - "learning_rate": 0.0026859452842472493, - "loss": 0.9379, - "step": 2727 - }, - { - "epoch": 0.20983001307591723, - "learning_rate": 0.0026857233153624966, - "loss": 1.3668, - "step": 2728 - }, - { - "epoch": 0.20990693023613569, - "learning_rate": 0.002685501277242124, - "loss": 1.1526, - "step": 2729 - }, - { - "epoch": 0.20998384739635412, - "learning_rate": 0.002685279169899096, - "loss": 1.4391, - "step": 2730 - }, - { - "epoch": 0.21006076455657258, - "learning_rate": 0.002685056993346382, - "loss": 1.3306, - "step": 2731 - }, - { - "epoch": 0.210137681716791, - "learning_rate": 0.0026848347475969554, - "loss": 1.1268, - "step": 2732 - }, - { - "epoch": 0.21021459887700947, - "learning_rate": 0.002684612432663793, - "loss": 1.0123, - "step": 2733 - }, - { - "epoch": 0.2102915160372279, - "learning_rate": 0.002684390048559876, - "loss": 0.7296, - "step": 2734 - }, - { - "epoch": 0.21036843319744636, - "learning_rate": 0.0026841675952981895, - "loss": 1.0813, - "step": 2735 - }, - { - "epoch": 0.2104453503576648, - "learning_rate": 0.002683945072891724, - "loss": 1.1279, - "step": 2736 - }, - { - "epoch": 0.21052226751788325, - "learning_rate": 0.0026837224813534705, - "loss": 1.2752, - "step": 2737 - }, - { - "epoch": 0.21059918467810168, - "learning_rate": 0.002683499820696428, - "loss": 1.0637, - "step": 2738 - }, - { - "epoch": 0.21067610183832014, - "learning_rate": 0.0026832770909335983, - "loss": 1.0091, - "step": 2739 - }, - { - "epoch": 0.21075301899853857, - "learning_rate": 0.0026830542920779858, - "loss": 1.1888, - "step": 2740 - }, - { - "epoch": 0.21082993615875703, - "learning_rate": 0.0026828314241425996, - "loss": 0.8736, - "step": 2741 - }, - { - "epoch": 0.21090685331897546, - "learning_rate": 0.0026826084871404542, - "loss": 1.1327, - "step": 2742 - }, - { - "epoch": 0.21098377047919392, - "learning_rate": 0.002682385481084567, - "loss": 1.0802, - "step": 2743 - }, - { - "epoch": 0.21106068763941235, - "learning_rate": 0.002682162405987959, - "loss": 1.1862, - "step": 2744 - }, - { - "epoch": 0.21113760479963078, - "learning_rate": 0.002681939261863656, - "loss": 1.0259, - "step": 2745 - }, - { - "epoch": 0.21121452195984924, - "learning_rate": 0.002681716048724688, - "loss": 1.4125, - "step": 2746 - }, - { - "epoch": 0.21129143912006768, - "learning_rate": 0.002681492766584088, - "loss": 1.3, - "step": 2747 - }, - { - "epoch": 0.21136835628028613, - "learning_rate": 0.0026812694154548945, - "loss": 1.0045, - "step": 2748 - }, - { - "epoch": 0.21144527344050457, - "learning_rate": 0.0026810459953501486, - "loss": 1.0391, - "step": 2749 - }, - { - "epoch": 0.21152219060072303, - "learning_rate": 0.002680822506282896, - "loss": 1.1679, - "step": 2750 - }, - { - "epoch": 0.21159910776094146, - "learning_rate": 0.002680598948266187, - "loss": 0.96, - "step": 2751 - }, - { - "epoch": 0.21167602492115992, - "learning_rate": 0.0026803753213130747, - "loss": 1.0611, - "step": 2752 - }, - { - "epoch": 0.21175294208137835, - "learning_rate": 0.0026801516254366174, - "loss": 1.3594, - "step": 2753 - }, - { - "epoch": 0.2118298592415968, - "learning_rate": 0.0026799278606498764, - "loss": 0.9369, - "step": 2754 - }, - { - "epoch": 0.21190677640181524, - "learning_rate": 0.0026797040269659184, - "loss": 1.237, - "step": 2755 - }, - { - "epoch": 0.2119836935620337, - "learning_rate": 0.0026794801243978125, - "loss": 0.8864, - "step": 2756 - }, - { - "epoch": 0.21206061072225213, - "learning_rate": 0.002679256152958633, - "loss": 1.373, - "step": 2757 - }, - { - "epoch": 0.2121375278824706, - "learning_rate": 0.002679032112661458, - "loss": 1.2083, - "step": 2758 - }, - { - "epoch": 0.21221444504268902, - "learning_rate": 0.002678808003519369, - "loss": 1.1763, - "step": 2759 - }, - { - "epoch": 0.21229136220290748, - "learning_rate": 0.002678583825545452, - "loss": 1.2056, - "step": 2760 - }, - { - "epoch": 0.2123682793631259, - "learning_rate": 0.0026783595787527974, - "loss": 1.6132, - "step": 2761 - }, - { - "epoch": 0.21244519652334437, - "learning_rate": 0.0026781352631544985, - "loss": 1.074, - "step": 2762 - }, - { - "epoch": 0.2125221136835628, - "learning_rate": 0.0026779108787636542, - "loss": 1.371, - "step": 2763 - }, - { - "epoch": 0.21259903084378123, - "learning_rate": 0.002677686425593366, - "loss": 1.1205, - "step": 2764 - }, - { - "epoch": 0.2126759480039997, - "learning_rate": 0.0026774619036567396, - "loss": 1.0216, - "step": 2765 - }, - { - "epoch": 0.21275286516421812, - "learning_rate": 0.0026772373129668858, - "loss": 1.3456, - "step": 2766 - }, - { - "epoch": 0.21282978232443658, - "learning_rate": 0.0026770126535369185, - "loss": 1.093, - "step": 2767 - }, - { - "epoch": 0.21290669948465502, - "learning_rate": 0.0026767879253799557, - "loss": 1.3328, - "step": 2768 - }, - { - "epoch": 0.21298361664487347, - "learning_rate": 0.00267656312850912, - "loss": 1.1032, - "step": 2769 - }, - { - "epoch": 0.2130605338050919, - "learning_rate": 0.0026763382629375367, - "loss": 1.1695, - "step": 2770 - }, - { - "epoch": 0.21313745096531037, - "learning_rate": 0.002676113328678336, - "loss": 1.0566, - "step": 2771 - }, - { - "epoch": 0.2132143681255288, - "learning_rate": 0.0026758883257446526, - "loss": 1.3098, - "step": 2772 - }, - { - "epoch": 0.21329128528574726, - "learning_rate": 0.002675663254149624, - "loss": 1.3136, - "step": 2773 - }, - { - "epoch": 0.2133682024459657, - "learning_rate": 0.0026754381139063926, - "loss": 1.2384, - "step": 2774 - }, - { - "epoch": 0.21344511960618415, - "learning_rate": 0.002675212905028105, - "loss": 0.8283, - "step": 2775 - }, - { - "epoch": 0.21352203676640258, - "learning_rate": 0.0026749876275279118, - "loss": 1.0777, - "step": 2776 - }, - { - "epoch": 0.21359895392662104, - "learning_rate": 0.002674762281418966, - "loss": 1.1138, - "step": 2777 - }, - { - "epoch": 0.21367587108683947, - "learning_rate": 0.002674536866714426, - "loss": 0.8933, - "step": 2778 - }, - { - "epoch": 0.21375278824705793, - "learning_rate": 0.0026743113834274547, - "loss": 1.2152, - "step": 2779 - }, - { - "epoch": 0.21382970540727636, - "learning_rate": 0.0026740858315712174, - "loss": 1.2671, - "step": 2780 - }, - { - "epoch": 0.21390662256749482, - "learning_rate": 0.002673860211158885, - "loss": 1.1557, - "step": 2781 - }, - { - "epoch": 0.21398353972771325, - "learning_rate": 0.0026736345222036317, - "loss": 0.9709, - "step": 2782 - }, - { - "epoch": 0.2140604568879317, - "learning_rate": 0.002673408764718635, - "loss": 1.2213, - "step": 2783 - }, - { - "epoch": 0.21413737404815014, - "learning_rate": 0.002673182938717078, - "loss": 0.9639, - "step": 2784 - }, - { - "epoch": 0.21421429120836857, - "learning_rate": 0.0026729570442121466, - "loss": 1.2288, - "step": 2785 - }, - { - "epoch": 0.21429120836858703, - "learning_rate": 0.0026727310812170307, - "loss": 1.1893, - "step": 2786 - }, - { - "epoch": 0.21436812552880546, - "learning_rate": 0.0026725050497449256, - "loss": 1.0382, - "step": 2787 - }, - { - "epoch": 0.21444504268902392, - "learning_rate": 0.002672278949809028, - "loss": 1.1415, - "step": 2788 - }, - { - "epoch": 0.21452195984924236, - "learning_rate": 0.0026720527814225407, - "loss": 1.0127, - "step": 2789 - }, - { - "epoch": 0.21459887700946081, - "learning_rate": 0.00267182654459867, - "loss": 1.2729, - "step": 2790 - }, - { - "epoch": 0.21467579416967925, - "learning_rate": 0.0026716002393506262, - "loss": 1.2256, - "step": 2791 - }, - { - "epoch": 0.2147527113298977, - "learning_rate": 0.0026713738656916237, - "loss": 1.4644, - "step": 2792 - }, - { - "epoch": 0.21482962849011614, - "learning_rate": 0.0026711474236348802, - "loss": 0.797, - "step": 2793 - }, - { - "epoch": 0.2149065456503346, - "learning_rate": 0.002670920913193618, - "loss": 1.2909, - "step": 2794 - }, - { - "epoch": 0.21498346281055303, - "learning_rate": 0.0026706943343810636, - "loss": 1.3884, - "step": 2795 - }, - { - "epoch": 0.2150603799707715, - "learning_rate": 0.0026704676872104467, - "loss": 1.1233, - "step": 2796 - }, - { - "epoch": 0.21513729713098992, - "learning_rate": 0.0026702409716950017, - "loss": 1.3233, - "step": 2797 - }, - { - "epoch": 0.21521421429120838, - "learning_rate": 0.002670014187847967, - "loss": 1.0681, - "step": 2798 - }, - { - "epoch": 0.2152911314514268, - "learning_rate": 0.0026697873356825844, - "loss": 1.3902, - "step": 2799 - }, - { - "epoch": 0.21536804861164527, - "learning_rate": 0.0026695604152121005, - "loss": 0.9033, - "step": 2800 - }, - { - "epoch": 0.2154449657718637, - "learning_rate": 0.0026693334264497647, - "loss": 1.2176, - "step": 2801 - }, - { - "epoch": 0.21552188293208216, - "learning_rate": 0.0026691063694088313, - "loss": 1.4218, - "step": 2802 - }, - { - "epoch": 0.2155988000923006, - "learning_rate": 0.0026688792441025588, - "loss": 1.2279, - "step": 2803 - }, - { - "epoch": 0.21567571725251905, - "learning_rate": 0.002668652050544209, - "loss": 1.4316, - "step": 2804 - }, - { - "epoch": 0.21575263441273748, - "learning_rate": 0.002668424788747048, - "loss": 0.9718, - "step": 2805 - }, - { - "epoch": 0.2158295515729559, - "learning_rate": 0.002668197458724346, - "loss": 1.4095, - "step": 2806 - }, - { - "epoch": 0.21590646873317437, - "learning_rate": 0.0026679700604893765, - "loss": 1.143, - "step": 2807 - }, - { - "epoch": 0.2159833858933928, - "learning_rate": 0.002667742594055419, - "loss": 1.2975, - "step": 2808 - }, - { - "epoch": 0.21606030305361126, - "learning_rate": 0.0026675150594357533, - "loss": 0.6969, - "step": 2809 - }, - { - "epoch": 0.2161372202138297, - "learning_rate": 0.0026672874566436673, - "loss": 1.015, - "step": 2810 - }, - { - "epoch": 0.21621413737404815, - "learning_rate": 0.0026670597856924493, - "loss": 1.2185, - "step": 2811 - }, - { - "epoch": 0.21629105453426659, - "learning_rate": 0.0026668320465953947, - "loss": 1.2983, - "step": 2812 - }, - { - "epoch": 0.21636797169448505, - "learning_rate": 0.0026666042393658014, - "loss": 1.0997, - "step": 2813 - }, - { - "epoch": 0.21644488885470348, - "learning_rate": 0.00266637636401697, - "loss": 1.1781, - "step": 2814 - }, - { - "epoch": 0.21652180601492194, - "learning_rate": 0.002666148420562207, - "loss": 1.0728, - "step": 2815 - }, - { - "epoch": 0.21659872317514037, - "learning_rate": 0.002665920409014823, - "loss": 1.3593, - "step": 2816 - }, - { - "epoch": 0.21667564033535883, - "learning_rate": 0.0026656923293881312, - "loss": 0.9013, - "step": 2817 - }, - { - "epoch": 0.21675255749557726, - "learning_rate": 0.002665464181695449, - "loss": 2.0623, - "step": 2818 - }, - { - "epoch": 0.21682947465579572, - "learning_rate": 0.0026652359659500987, - "loss": 0.9032, - "step": 2819 - }, - { - "epoch": 0.21690639181601415, - "learning_rate": 0.0026650076821654062, - "loss": 1.2291, - "step": 2820 - }, - { - "epoch": 0.2169833089762326, - "learning_rate": 0.002664779330354701, - "loss": 1.515, - "step": 2821 - }, - { - "epoch": 0.21706022613645104, - "learning_rate": 0.0026645509105313165, - "loss": 1.4406, - "step": 2822 - }, - { - "epoch": 0.2171371432966695, - "learning_rate": 0.002664322422708591, - "loss": 1.3025, - "step": 2823 - }, - { - "epoch": 0.21721406045688793, - "learning_rate": 0.0026640938668998655, - "loss": 1.0984, - "step": 2824 - }, - { - "epoch": 0.2172909776171064, - "learning_rate": 0.0026638652431184863, - "loss": 1.2292, - "step": 2825 - }, - { - "epoch": 0.21736789477732482, - "learning_rate": 0.0026636365513778024, - "loss": 1.1934, - "step": 2826 - }, - { - "epoch": 0.21744481193754325, - "learning_rate": 0.0026634077916911675, - "loss": 1.1607, - "step": 2827 - }, - { - "epoch": 0.2175217290977617, - "learning_rate": 0.002663178964071939, - "loss": 1.6558, - "step": 2828 - }, - { - "epoch": 0.21759864625798014, - "learning_rate": 0.0026629500685334782, - "loss": 1.4741, - "step": 2829 - }, - { - "epoch": 0.2176755634181986, - "learning_rate": 0.002662721105089152, - "loss": 1.2368, - "step": 2830 - }, - { - "epoch": 0.21775248057841703, - "learning_rate": 0.002662492073752328, - "loss": 0.9789, - "step": 2831 - }, - { - "epoch": 0.2178293977386355, - "learning_rate": 0.0026622629745363803, - "loss": 1.1643, - "step": 2832 - }, - { - "epoch": 0.21790631489885393, - "learning_rate": 0.0026620338074546863, - "loss": 1.3747, - "step": 2833 - }, - { - "epoch": 0.21798323205907238, - "learning_rate": 0.0026618045725206275, - "loss": 0.9163, - "step": 2834 - }, - { - "epoch": 0.21806014921929082, - "learning_rate": 0.0026615752697475886, - "loss": 1.019, - "step": 2835 - }, - { - "epoch": 0.21813706637950928, - "learning_rate": 0.0026613458991489596, - "loss": 0.892, - "step": 2836 - }, - { - "epoch": 0.2182139835397277, - "learning_rate": 0.002661116460738133, - "loss": 1.0551, - "step": 2837 - }, - { - "epoch": 0.21829090069994617, - "learning_rate": 0.0026608869545285056, - "loss": 1.1778, - "step": 2838 - }, - { - "epoch": 0.2183678178601646, - "learning_rate": 0.00266065738053348, - "loss": 1.3514, - "step": 2839 - }, - { - "epoch": 0.21844473502038306, - "learning_rate": 0.0026604277387664594, - "loss": 0.8138, - "step": 2840 - }, - { - "epoch": 0.2185216521806015, - "learning_rate": 0.0026601980292408544, - "loss": 1.3814, - "step": 2841 - }, - { - "epoch": 0.21859856934081995, - "learning_rate": 0.002659968251970077, - "loss": 1.3129, - "step": 2842 - }, - { - "epoch": 0.21867548650103838, - "learning_rate": 0.002659738406967545, - "loss": 1.1769, - "step": 2843 - }, - { - "epoch": 0.21875240366125684, - "learning_rate": 0.0026595084942466784, - "loss": 1.0289, - "step": 2844 - }, - { - "epoch": 0.21882932082147527, - "learning_rate": 0.0026592785138209022, - "loss": 1.1511, - "step": 2845 - }, - { - "epoch": 0.2189062379816937, - "learning_rate": 0.002659048465703646, - "loss": 1.2311, - "step": 2846 - }, - { - "epoch": 0.21898315514191216, - "learning_rate": 0.0026588183499083413, - "loss": 1.0108, - "step": 2847 - }, - { - "epoch": 0.2190600723021306, - "learning_rate": 0.002658588166448426, - "loss": 1.1901, - "step": 2848 - }, - { - "epoch": 0.21913698946234905, - "learning_rate": 0.00265835791533734, - "loss": 0.9988, - "step": 2849 - }, - { - "epoch": 0.21921390662256748, - "learning_rate": 0.0026581275965885283, - "loss": 1.0638, - "step": 2850 - }, - { - "epoch": 0.21929082378278594, - "learning_rate": 0.0026578972102154386, - "loss": 1.2478, - "step": 2851 - }, - { - "epoch": 0.21936774094300437, - "learning_rate": 0.002657666756231525, - "loss": 1.3969, - "step": 2852 - }, - { - "epoch": 0.21944465810322283, - "learning_rate": 0.002657436234650242, - "loss": 1.2669, - "step": 2853 - }, - { - "epoch": 0.21952157526344127, - "learning_rate": 0.002657205645485052, - "loss": 1.2133, - "step": 2854 - }, - { - "epoch": 0.21959849242365972, - "learning_rate": 0.0026569749887494177, - "loss": 0.8658, - "step": 2855 - }, - { - "epoch": 0.21967540958387816, - "learning_rate": 0.002656744264456808, - "loss": 1.2947, - "step": 2856 - }, - { - "epoch": 0.21975232674409662, - "learning_rate": 0.0026565134726206957, - "loss": 1.3169, - "step": 2857 - }, - { - "epoch": 0.21982924390431505, - "learning_rate": 0.002656282613254556, - "loss": 1.1669, - "step": 2858 - }, - { - "epoch": 0.2199061610645335, - "learning_rate": 0.00265605168637187, - "loss": 1.3458, - "step": 2859 - }, - { - "epoch": 0.21998307822475194, - "learning_rate": 0.0026558206919861205, - "loss": 1.2217, - "step": 2860 - }, - { - "epoch": 0.2200599953849704, - "learning_rate": 0.0026555896301107967, - "loss": 1.4341, - "step": 2861 - }, - { - "epoch": 0.22013691254518883, - "learning_rate": 0.0026553585007593893, - "loss": 0.9718, - "step": 2862 - }, - { - "epoch": 0.2202138297054073, - "learning_rate": 0.002655127303945396, - "loss": 1.3726, - "step": 2863 - }, - { - "epoch": 0.22029074686562572, - "learning_rate": 0.002654896039682315, - "loss": 1.0828, - "step": 2864 - }, - { - "epoch": 0.22036766402584418, - "learning_rate": 0.0026546647079836506, - "loss": 1.177, - "step": 2865 - }, - { - "epoch": 0.2204445811860626, - "learning_rate": 0.002654433308862911, - "loss": 1.1054, - "step": 2866 - }, - { - "epoch": 0.22052149834628104, - "learning_rate": 0.002654201842333607, - "loss": 1.4729, - "step": 2867 - }, - { - "epoch": 0.2205984155064995, - "learning_rate": 0.0026539703084092546, - "loss": 1.4224, - "step": 2868 - }, - { - "epoch": 0.22067533266671793, - "learning_rate": 0.0026537387071033734, - "loss": 0.9926, - "step": 2869 - }, - { - "epoch": 0.2207522498269364, - "learning_rate": 0.002653507038429487, - "loss": 1.4223, - "step": 2870 - }, - { - "epoch": 0.22082916698715482, - "learning_rate": 0.0026532753024011216, - "loss": 1.1191, - "step": 2871 - }, - { - "epoch": 0.22090608414737328, - "learning_rate": 0.0026530434990318097, - "loss": 1.4377, - "step": 2872 - }, - { - "epoch": 0.22098300130759171, - "learning_rate": 0.0026528116283350864, - "loss": 1.1086, - "step": 2873 - }, - { - "epoch": 0.22105991846781017, - "learning_rate": 0.002652579690324491, - "loss": 1.0963, - "step": 2874 - }, - { - "epoch": 0.2211368356280286, - "learning_rate": 0.0026523476850135656, - "loss": 1.1156, - "step": 2875 - }, - { - "epoch": 0.22121375278824706, - "learning_rate": 0.0026521156124158585, - "loss": 0.9587, - "step": 2876 - }, - { - "epoch": 0.2212906699484655, - "learning_rate": 0.0026518834725449197, - "loss": 1.466, - "step": 2877 - }, - { - "epoch": 0.22136758710868396, - "learning_rate": 0.0026516512654143048, - "loss": 1.2132, - "step": 2878 - }, - { - "epoch": 0.2214445042689024, - "learning_rate": 0.0026514189910375723, - "loss": 1.0236, - "step": 2879 - }, - { - "epoch": 0.22152142142912085, - "learning_rate": 0.0026511866494282844, - "loss": 1.3139, - "step": 2880 - }, - { - "epoch": 0.22159833858933928, - "learning_rate": 0.0026509542406000087, - "loss": 1.0677, - "step": 2881 - }, - { - "epoch": 0.22167525574955774, - "learning_rate": 0.0026507217645663156, - "loss": 1.5382, - "step": 2882 - }, - { - "epoch": 0.22175217290977617, - "learning_rate": 0.002650489221340779, - "loss": 1.2255, - "step": 2883 - }, - { - "epoch": 0.22182909006999463, - "learning_rate": 0.002650256610936978, - "loss": 1.1327, - "step": 2884 - }, - { - "epoch": 0.22190600723021306, - "learning_rate": 0.0026500239333684945, - "loss": 0.9811, - "step": 2885 - }, - { - "epoch": 0.22198292439043152, - "learning_rate": 0.0026497911886489154, - "loss": 1.2869, - "step": 2886 - }, - { - "epoch": 0.22205984155064995, - "learning_rate": 0.0026495583767918297, - "loss": 1.4157, - "step": 2887 - }, - { - "epoch": 0.22213675871086838, - "learning_rate": 0.0026493254978108335, - "loss": 1.3822, - "step": 2888 - }, - { - "epoch": 0.22221367587108684, - "learning_rate": 0.002649092551719523, - "loss": 1.158, - "step": 2889 - }, - { - "epoch": 0.22229059303130527, - "learning_rate": 0.0026488595385315006, - "loss": 1.387, - "step": 2890 - }, - { - "epoch": 0.22236751019152373, - "learning_rate": 0.0026486264582603726, - "loss": 1.2149, - "step": 2891 - }, - { - "epoch": 0.22244442735174216, - "learning_rate": 0.002648393310919749, - "loss": 1.0277, - "step": 2892 - }, - { - "epoch": 0.22252134451196062, - "learning_rate": 0.002648160096523243, - "loss": 1.1177, - "step": 2893 - }, - { - "epoch": 0.22259826167217905, - "learning_rate": 0.0026479268150844722, - "loss": 1.1437, - "step": 2894 - }, - { - "epoch": 0.2226751788323975, - "learning_rate": 0.0026476934666170584, - "loss": 1.0757, - "step": 2895 - }, - { - "epoch": 0.22275209599261594, - "learning_rate": 0.0026474600511346273, - "loss": 1.2584, - "step": 2896 - }, - { - "epoch": 0.2228290131528344, - "learning_rate": 0.002647226568650808, - "loss": 1.3191, - "step": 2897 - }, - { - "epoch": 0.22290593031305284, - "learning_rate": 0.0026469930191792327, - "loss": 1.0578, - "step": 2898 - }, - { - "epoch": 0.2229828474732713, - "learning_rate": 0.0026467594027335404, - "loss": 1.3145, - "step": 2899 - }, - { - "epoch": 0.22305976463348973, - "learning_rate": 0.0026465257193273714, - "loss": 1.1279, - "step": 2900 - }, - { - "epoch": 0.22313668179370819, - "learning_rate": 0.002646291968974371, - "loss": 1.2705, - "step": 2901 - }, - { - "epoch": 0.22321359895392662, - "learning_rate": 0.002646058151688188, - "loss": 1.0089, - "step": 2902 - }, - { - "epoch": 0.22329051611414508, - "learning_rate": 0.0026458242674824747, - "loss": 1.2941, - "step": 2903 - }, - { - "epoch": 0.2233674332743635, - "learning_rate": 0.0026455903163708884, - "loss": 0.9568, - "step": 2904 - }, - { - "epoch": 0.22344435043458197, - "learning_rate": 0.0026453562983670895, - "loss": 1.423, - "step": 2905 - }, - { - "epoch": 0.2235212675948004, - "learning_rate": 0.0026451222134847427, - "loss": 0.7131, - "step": 2906 - }, - { - "epoch": 0.22359818475501886, - "learning_rate": 0.0026448880617375168, - "loss": 0.8101, - "step": 2907 - }, - { - "epoch": 0.2236751019152373, - "learning_rate": 0.002644653843139083, - "loss": 0.948, - "step": 2908 - }, - { - "epoch": 0.22375201907545572, - "learning_rate": 0.002644419557703119, - "loss": 1.1125, - "step": 2909 - }, - { - "epoch": 0.22382893623567418, - "learning_rate": 0.0026441852054433046, - "loss": 1.1147, - "step": 2910 - }, - { - "epoch": 0.2239058533958926, - "learning_rate": 0.0026439507863733236, - "loss": 1.4192, - "step": 2911 - }, - { - "epoch": 0.22398277055611107, - "learning_rate": 0.002643716300506863, - "loss": 1.5509, - "step": 2912 - }, - { - "epoch": 0.2240596877163295, - "learning_rate": 0.0026434817478576166, - "loss": 0.9808, - "step": 2913 - }, - { - "epoch": 0.22413660487654796, - "learning_rate": 0.002643247128439279, - "loss": 1.0333, - "step": 2914 - }, - { - "epoch": 0.2242135220367664, - "learning_rate": 0.00264301244226555, - "loss": 1.192, - "step": 2915 - }, - { - "epoch": 0.22429043919698485, - "learning_rate": 0.002642777689350133, - "loss": 1.7771, - "step": 2916 - }, - { - "epoch": 0.22436735635720328, - "learning_rate": 0.002642542869706736, - "loss": 1.197, - "step": 2917 - }, - { - "epoch": 0.22444427351742174, - "learning_rate": 0.00264230798334907, - "loss": 1.1411, - "step": 2918 - }, - { - "epoch": 0.22452119067764018, - "learning_rate": 0.002642073030290851, - "loss": 1.2446, - "step": 2919 - }, - { - "epoch": 0.22459810783785863, - "learning_rate": 0.002641838010545797, - "loss": 1.2408, - "step": 2920 - }, - { - "epoch": 0.22467502499807707, - "learning_rate": 0.0026416029241276315, - "loss": 1.1546, - "step": 2921 - }, - { - "epoch": 0.22475194215829553, - "learning_rate": 0.002641367771050082, - "loss": 1.1168, - "step": 2922 - }, - { - "epoch": 0.22482885931851396, - "learning_rate": 0.0026411325513268783, - "loss": 1.2393, - "step": 2923 - }, - { - "epoch": 0.22490577647873242, - "learning_rate": 0.0026408972649717556, - "loss": 0.9176, - "step": 2924 - }, - { - "epoch": 0.22498269363895085, - "learning_rate": 0.0026406619119984528, - "loss": 1.2219, - "step": 2925 - }, - { - "epoch": 0.2250596107991693, - "learning_rate": 0.0026404264924207127, - "loss": 1.2197, - "step": 2926 - }, - { - "epoch": 0.22513652795938774, - "learning_rate": 0.0026401910062522804, - "loss": 0.7539, - "step": 2927 - }, - { - "epoch": 0.22521344511960617, - "learning_rate": 0.0026399554535069076, - "loss": 1.005, - "step": 2928 - }, - { - "epoch": 0.22529036227982463, - "learning_rate": 0.002639719834198347, - "loss": 0.9408, - "step": 2929 - }, - { - "epoch": 0.22536727944004306, - "learning_rate": 0.0026394841483403583, - "loss": 1.1059, - "step": 2930 - }, - { - "epoch": 0.22544419660026152, - "learning_rate": 0.0026392483959467024, - "loss": 1.0097, - "step": 2931 - }, - { - "epoch": 0.22552111376047995, - "learning_rate": 0.002639012577031145, - "loss": 1.1187, - "step": 2932 - }, - { - "epoch": 0.2255980309206984, - "learning_rate": 0.0026387766916074564, - "loss": 0.6283, - "step": 2933 - }, - { - "epoch": 0.22567494808091684, - "learning_rate": 0.0026385407396894104, - "loss": 1.0521, - "step": 2934 - }, - { - "epoch": 0.2257518652411353, - "learning_rate": 0.0026383047212907838, - "loss": 0.8396, - "step": 2935 - }, - { - "epoch": 0.22582878240135373, - "learning_rate": 0.002638068636425358, - "loss": 1.1625, - "step": 2936 - }, - { - "epoch": 0.2259056995615722, - "learning_rate": 0.0026378324851069183, - "loss": 1.3229, - "step": 2937 - }, - { - "epoch": 0.22598261672179062, - "learning_rate": 0.0026375962673492546, - "loss": 1.1008, - "step": 2938 - }, - { - "epoch": 0.22605953388200908, - "learning_rate": 0.002637359983166159, - "loss": 0.9938, - "step": 2939 - }, - { - "epoch": 0.22613645104222752, - "learning_rate": 0.0026371236325714283, - "loss": 0.9123, - "step": 2940 - }, - { - "epoch": 0.22621336820244597, - "learning_rate": 0.002636887215578864, - "loss": 1.1797, - "step": 2941 - }, - { - "epoch": 0.2262902853626644, - "learning_rate": 0.00263665073220227, - "loss": 1.1467, - "step": 2942 - }, - { - "epoch": 0.22636720252288287, - "learning_rate": 0.0026364141824554554, - "loss": 1.1814, - "step": 2943 - }, - { - "epoch": 0.2264441196831013, - "learning_rate": 0.002636177566352233, - "loss": 1.1443, - "step": 2944 - }, - { - "epoch": 0.22652103684331976, - "learning_rate": 0.002635940883906417, - "loss": 1.1127, - "step": 2945 - }, - { - "epoch": 0.2265979540035382, - "learning_rate": 0.0026357041351318298, - "loss": 1.0529, - "step": 2946 - }, - { - "epoch": 0.22667487116375665, - "learning_rate": 0.002635467320042294, - "loss": 1.28, - "step": 2947 - }, - { - "epoch": 0.22675178832397508, - "learning_rate": 0.002635230438651639, - "loss": 1.2115, - "step": 2948 - }, - { - "epoch": 0.2268287054841935, - "learning_rate": 0.002634993490973695, - "loss": 1.1188, - "step": 2949 - }, - { - "epoch": 0.22690562264441197, - "learning_rate": 0.002634756477022298, - "loss": 1.1421, - "step": 2950 - }, - { - "epoch": 0.2269825398046304, - "learning_rate": 0.0026345193968112884, - "loss": 1.4197, - "step": 2951 - }, - { - "epoch": 0.22705945696484886, - "learning_rate": 0.0026342822503545083, - "loss": 0.92, - "step": 2952 - }, - { - "epoch": 0.2271363741250673, - "learning_rate": 0.002634045037665805, - "loss": 0.9803, - "step": 2953 - }, - { - "epoch": 0.22721329128528575, - "learning_rate": 0.002633807758759031, - "loss": 1.1468, - "step": 2954 - }, - { - "epoch": 0.22729020844550418, - "learning_rate": 0.00263357041364804, - "loss": 1.2968, - "step": 2955 - }, - { - "epoch": 0.22736712560572264, - "learning_rate": 0.0026333330023466913, - "loss": 1.3053, - "step": 2956 - }, - { - "epoch": 0.22744404276594107, - "learning_rate": 0.0026330955248688474, - "loss": 1.1437, - "step": 2957 - }, - { - "epoch": 0.22752095992615953, - "learning_rate": 0.0026328579812283743, - "loss": 0.997, - "step": 2958 - }, - { - "epoch": 0.22759787708637796, - "learning_rate": 0.0026326203714391438, - "loss": 0.9116, - "step": 2959 - }, - { - "epoch": 0.22767479424659642, - "learning_rate": 0.0026323826955150288, - "loss": 0.6847, - "step": 2960 - }, - { - "epoch": 0.22775171140681486, - "learning_rate": 0.002632144953469909, - "loss": 1.314, - "step": 2961 - }, - { - "epoch": 0.22782862856703331, - "learning_rate": 0.0026319071453176648, - "loss": 0.8372, - "step": 2962 - }, - { - "epoch": 0.22790554572725175, - "learning_rate": 0.002631669271072183, - "loss": 1.0382, - "step": 2963 - }, - { - "epoch": 0.2279824628874702, - "learning_rate": 0.0026314313307473527, - "loss": 1.3457, - "step": 2964 - }, - { - "epoch": 0.22805938004768864, - "learning_rate": 0.002631193324357068, - "loss": 0.8498, - "step": 2965 - }, - { - "epoch": 0.2281362972079071, - "learning_rate": 0.0026309552519152265, - "loss": 1.146, - "step": 2966 - }, - { - "epoch": 0.22821321436812553, - "learning_rate": 0.002630717113435729, - "loss": 1.3942, - "step": 2967 - }, - { - "epoch": 0.228290131528344, - "learning_rate": 0.002630478908932481, - "loss": 1.0987, - "step": 2968 - }, - { - "epoch": 0.22836704868856242, - "learning_rate": 0.002630240638419391, - "loss": 1.2825, - "step": 2969 - }, - { - "epoch": 0.22844396584878085, - "learning_rate": 0.0026300023019103723, - "loss": 1.5577, - "step": 2970 - }, - { - "epoch": 0.2285208830089993, - "learning_rate": 0.0026297638994193413, - "loss": 1.0042, - "step": 2971 - }, - { - "epoch": 0.22859780016921774, - "learning_rate": 0.002629525430960219, - "loss": 0.9451, - "step": 2972 - }, - { - "epoch": 0.2286747173294362, - "learning_rate": 0.00262928689654693, - "loss": 1.2184, - "step": 2973 - }, - { - "epoch": 0.22875163448965463, - "learning_rate": 0.002629048296193402, - "loss": 1.1151, - "step": 2974 - }, - { - "epoch": 0.2288285516498731, - "learning_rate": 0.002628809629913567, - "loss": 0.9534, - "step": 2975 - }, - { - "epoch": 0.22890546881009152, - "learning_rate": 0.002628570897721362, - "loss": 1.1324, - "step": 2976 - }, - { - "epoch": 0.22898238597030998, - "learning_rate": 0.0026283320996307253, - "loss": 0.9374, - "step": 2977 - }, - { - "epoch": 0.2290593031305284, - "learning_rate": 0.0026280932356556016, - "loss": 1.3866, - "step": 2978 - }, - { - "epoch": 0.22913622029074687, - "learning_rate": 0.0026278543058099384, - "loss": 0.8686, - "step": 2979 - }, - { - "epoch": 0.2292131374509653, - "learning_rate": 0.0026276153101076867, - "loss": 1.2933, - "step": 2980 - }, - { - "epoch": 0.22929005461118376, - "learning_rate": 0.002627376248562802, - "loss": 1.4172, - "step": 2981 - }, - { - "epoch": 0.2293669717714022, - "learning_rate": 0.002627137121189243, - "loss": 1.0657, - "step": 2982 - }, - { - "epoch": 0.22944388893162065, - "learning_rate": 0.0026268979280009734, - "loss": 1.0249, - "step": 2983 - }, - { - "epoch": 0.22952080609183909, - "learning_rate": 0.0026266586690119587, - "loss": 1.1686, - "step": 2984 - }, - { - "epoch": 0.22959772325205755, - "learning_rate": 0.0026264193442361704, - "loss": 0.9695, - "step": 2985 - }, - { - "epoch": 0.22967464041227598, - "learning_rate": 0.0026261799536875827, - "loss": 1.4658, - "step": 2986 - }, - { - "epoch": 0.22975155757249444, - "learning_rate": 0.0026259404973801737, - "loss": 0.9816, - "step": 2987 - }, - { - "epoch": 0.22982847473271287, - "learning_rate": 0.0026257009753279255, - "loss": 1.3082, - "step": 2988 - }, - { - "epoch": 0.22990539189293133, - "learning_rate": 0.0026254613875448247, - "loss": 1.3418, - "step": 2989 - }, - { - "epoch": 0.22998230905314976, - "learning_rate": 0.0026252217340448606, - "loss": 0.9574, - "step": 2990 - }, - { - "epoch": 0.2300592262133682, - "learning_rate": 0.002624982014842026, - "loss": 1.0907, - "step": 2991 - }, - { - "epoch": 0.23013614337358665, - "learning_rate": 0.0026247422299503194, - "loss": 1.2523, - "step": 2992 - }, - { - "epoch": 0.23021306053380508, - "learning_rate": 0.002624502379383742, - "loss": 1.2739, - "step": 2993 - }, - { - "epoch": 0.23028997769402354, - "learning_rate": 0.002624262463156299, - "loss": 1.037, - "step": 2994 - }, - { - "epoch": 0.23036689485424197, - "learning_rate": 0.002624022481281999, - "loss": 1.1786, - "step": 2995 - }, - { - "epoch": 0.23044381201446043, - "learning_rate": 0.0026237824337748544, - "loss": 0.9719, - "step": 2996 - }, - { - "epoch": 0.23052072917467886, - "learning_rate": 0.0026235423206488824, - "loss": 1.4303, - "step": 2997 - }, - { - "epoch": 0.23059764633489732, - "learning_rate": 0.0026233021419181037, - "loss": 0.7923, - "step": 2998 - }, - { - "epoch": 0.23067456349511575, - "learning_rate": 0.002623061897596542, - "loss": 1.0055, - "step": 2999 - }, - { - "epoch": 0.2307514806553342, - "learning_rate": 0.002622821587698226, - "loss": 1.4625, - "step": 3000 - }, - { - "epoch": 0.23082839781555264, - "learning_rate": 0.002622581212237187, - "loss": 1.0458, - "step": 3001 - }, - { - "epoch": 0.2309053149757711, - "learning_rate": 0.002622340771227461, - "loss": 1.2414, - "step": 3002 - }, - { - "epoch": 0.23098223213598953, - "learning_rate": 0.002622100264683088, - "loss": 1.251, - "step": 3003 - }, - { - "epoch": 0.231059149296208, - "learning_rate": 0.0026218596926181107, - "loss": 0.9071, - "step": 3004 - }, - { - "epoch": 0.23113606645642643, - "learning_rate": 0.002621619055046578, - "loss": 0.8977, - "step": 3005 - }, - { - "epoch": 0.23121298361664488, - "learning_rate": 0.0026213783519825383, - "loss": 1.1652, - "step": 3006 - }, - { - "epoch": 0.23128990077686332, - "learning_rate": 0.0026211375834400484, - "loss": 1.7237, - "step": 3007 - }, - { - "epoch": 0.23136681793708178, - "learning_rate": 0.0026208967494331667, - "loss": 0.8388, - "step": 3008 - }, - { - "epoch": 0.2314437350973002, - "learning_rate": 0.002620655849975956, - "loss": 1.2687, - "step": 3009 - }, - { - "epoch": 0.23152065225751864, - "learning_rate": 0.002620414885082482, - "loss": 1.6059, - "step": 3010 - }, - { - "epoch": 0.2315975694177371, - "learning_rate": 0.002620173854766815, - "loss": 1.1278, - "step": 3011 - }, - { - "epoch": 0.23167448657795553, - "learning_rate": 0.002619932759043029, - "loss": 1.1644, - "step": 3012 - }, - { - "epoch": 0.231751403738174, - "learning_rate": 0.002619691597925203, - "loss": 1.0764, - "step": 3013 - }, - { - "epoch": 0.23182832089839242, - "learning_rate": 0.0026194503714274163, - "loss": 1.094, - "step": 3014 - }, - { - "epoch": 0.23190523805861088, - "learning_rate": 0.0026192090795637566, - "loss": 1.5712, - "step": 3015 - }, - { - "epoch": 0.2319821552188293, - "learning_rate": 0.002618967722348312, - "loss": 1.2202, - "step": 3016 - }, - { - "epoch": 0.23205907237904777, - "learning_rate": 0.0026187262997951766, - "loss": 0.9639, - "step": 3017 - }, - { - "epoch": 0.2321359895392662, - "learning_rate": 0.002618484811918446, - "loss": 1.2411, - "step": 3018 - }, - { - "epoch": 0.23221290669948466, - "learning_rate": 0.002618243258732222, - "loss": 1.0966, - "step": 3019 - }, - { - "epoch": 0.2322898238597031, - "learning_rate": 0.002618001640250608, - "loss": 0.8387, - "step": 3020 - }, - { - "epoch": 0.23236674101992155, - "learning_rate": 0.0026177599564877136, - "loss": 1.5191, - "step": 3021 - }, - { - "epoch": 0.23244365818013998, - "learning_rate": 0.0026175182074576506, - "loss": 1.2134, - "step": 3022 - }, - { - "epoch": 0.23252057534035844, - "learning_rate": 0.0026172763931745346, - "loss": 0.8338, - "step": 3023 - }, - { - "epoch": 0.23259749250057687, - "learning_rate": 0.0026170345136524854, - "loss": 1.2159, - "step": 3024 - }, - { - "epoch": 0.23267440966079533, - "learning_rate": 0.002616792568905627, - "loss": 1.2989, - "step": 3025 - }, - { - "epoch": 0.23275132682101377, - "learning_rate": 0.0026165505589480866, - "loss": 0.9664, - "step": 3026 - }, - { - "epoch": 0.23282824398123222, - "learning_rate": 0.0026163084837939957, - "loss": 1.0549, - "step": 3027 - }, - { - "epoch": 0.23290516114145066, - "learning_rate": 0.002616066343457489, - "loss": 1.5709, - "step": 3028 - }, - { - "epoch": 0.23298207830166912, - "learning_rate": 0.0026158241379527045, - "loss": 1.1979, - "step": 3029 - }, - { - "epoch": 0.23305899546188755, - "learning_rate": 0.0026155818672937868, - "loss": 1.2932, - "step": 3030 - }, - { - "epoch": 0.23313591262210598, - "learning_rate": 0.0026153395314948806, - "loss": 0.9649, - "step": 3031 - }, - { - "epoch": 0.23321282978232444, - "learning_rate": 0.002615097130570137, - "loss": 0.8093, - "step": 3032 - }, - { - "epoch": 0.23328974694254287, - "learning_rate": 0.00261485466453371, - "loss": 1.4998, - "step": 3033 - }, - { - "epoch": 0.23336666410276133, - "learning_rate": 0.0026146121333997566, - "loss": 1.4707, - "step": 3034 - }, - { - "epoch": 0.23344358126297976, - "learning_rate": 0.0026143695371824397, - "loss": 1.1959, - "step": 3035 - }, - { - "epoch": 0.23352049842319822, - "learning_rate": 0.002614126875895924, - "loss": 1.1982, - "step": 3036 - }, - { - "epoch": 0.23359741558341665, - "learning_rate": 0.002613884149554379, - "loss": 1.1764, - "step": 3037 - }, - { - "epoch": 0.2336743327436351, - "learning_rate": 0.0026136413581719776, - "loss": 1.2159, - "step": 3038 - }, - { - "epoch": 0.23375124990385354, - "learning_rate": 0.0026133985017628967, - "loss": 1.1605, - "step": 3039 - }, - { - "epoch": 0.233828167064072, - "learning_rate": 0.002613155580341317, - "loss": 1.3456, - "step": 3040 - }, - { - "epoch": 0.23390508422429043, - "learning_rate": 0.0026129125939214222, - "loss": 1.174, - "step": 3041 - }, - { - "epoch": 0.2339820013845089, - "learning_rate": 0.0026126695425174023, - "loss": 0.9774, - "step": 3042 - }, - { - "epoch": 0.23405891854472732, - "learning_rate": 0.0026124264261434475, - "loss": 1.4991, - "step": 3043 - }, - { - "epoch": 0.23413583570494578, - "learning_rate": 0.0026121832448137545, - "loss": 1.1045, - "step": 3044 - }, - { - "epoch": 0.23421275286516421, - "learning_rate": 0.002611939998542523, - "loss": 0.762, - "step": 3045 - }, - { - "epoch": 0.23428967002538267, - "learning_rate": 0.0026116966873439553, - "loss": 1.2953, - "step": 3046 - }, - { - "epoch": 0.2343665871856011, - "learning_rate": 0.00261145331123226, - "loss": 1.1405, - "step": 3047 - }, - { - "epoch": 0.23444350434581956, - "learning_rate": 0.0026112098702216475, - "loss": 1.0199, - "step": 3048 - }, - { - "epoch": 0.234520421506038, - "learning_rate": 0.002610966364326333, - "loss": 0.9345, - "step": 3049 - }, - { - "epoch": 0.23459733866625646, - "learning_rate": 0.002610722793560534, - "loss": 1.4446, - "step": 3050 - }, - { - "epoch": 0.2346742558264749, - "learning_rate": 0.002610479157938474, - "loss": 0.9241, - "step": 3051 - }, - { - "epoch": 0.23475117298669332, - "learning_rate": 0.0026102354574743785, - "loss": 1.2471, - "step": 3052 - }, - { - "epoch": 0.23482809014691178, - "learning_rate": 0.002609991692182478, - "loss": 1.2094, - "step": 3053 - }, - { - "epoch": 0.2349050073071302, - "learning_rate": 0.002609747862077005, - "loss": 1.0292, - "step": 3054 - }, - { - "epoch": 0.23498192446734867, - "learning_rate": 0.0026095039671721982, - "loss": 1.069, - "step": 3055 - }, - { - "epoch": 0.2350588416275671, - "learning_rate": 0.002609260007482298, - "loss": 0.7545, - "step": 3056 - }, - { - "epoch": 0.23513575878778556, - "learning_rate": 0.0026090159830215506, - "loss": 1.3558, - "step": 3057 - }, - { - "epoch": 0.235212675948004, - "learning_rate": 0.0026087718938042036, - "loss": 1.3914, - "step": 3058 - }, - { - "epoch": 0.23528959310822245, - "learning_rate": 0.0026085277398445105, - "loss": 1.0881, - "step": 3059 - }, - { - "epoch": 0.23536651026844088, - "learning_rate": 0.0026082835211567275, - "loss": 1.2548, - "step": 3060 - }, - { - "epoch": 0.23544342742865934, - "learning_rate": 0.0026080392377551146, - "loss": 1.3836, - "step": 3061 - }, - { - "epoch": 0.23552034458887777, - "learning_rate": 0.0026077948896539357, - "loss": 1.0861, - "step": 3062 - }, - { - "epoch": 0.23559726174909623, - "learning_rate": 0.002607550476867459, - "loss": 1.1892, - "step": 3063 - }, - { - "epoch": 0.23567417890931466, - "learning_rate": 0.0026073059994099557, - "loss": 1.0239, - "step": 3064 - }, - { - "epoch": 0.23575109606953312, - "learning_rate": 0.0026070614572957004, - "loss": 0.9831, - "step": 3065 - }, - { - "epoch": 0.23582801322975155, - "learning_rate": 0.002606816850538973, - "loss": 1.5357, - "step": 3066 - }, - { - "epoch": 0.23590493038997, - "learning_rate": 0.0026065721791540567, - "loss": 1.1955, - "step": 3067 - }, - { - "epoch": 0.23598184755018844, - "learning_rate": 0.002606327443155237, - "loss": 1.1478, - "step": 3068 - }, - { - "epoch": 0.2360587647104069, - "learning_rate": 0.0026060826425568055, - "loss": 1.3183, - "step": 3069 - }, - { - "epoch": 0.23613568187062534, - "learning_rate": 0.002605837777373056, - "loss": 1.1357, - "step": 3070 - }, - { - "epoch": 0.2362125990308438, - "learning_rate": 0.0026055928476182853, - "loss": 0.9016, - "step": 3071 - }, - { - "epoch": 0.23628951619106223, - "learning_rate": 0.002605347853306797, - "loss": 1.3511, - "step": 3072 - }, - { - "epoch": 0.23636643335128066, - "learning_rate": 0.0026051027944528947, - "loss": 1.056, - "step": 3073 - }, - { - "epoch": 0.23644335051149912, - "learning_rate": 0.0026048576710708893, - "loss": 1.1846, - "step": 3074 - }, - { - "epoch": 0.23652026767171755, - "learning_rate": 0.0026046124831750925, - "loss": 1.0967, - "step": 3075 - }, - { - "epoch": 0.236597184831936, - "learning_rate": 0.0026043672307798217, - "loss": 1.1679, - "step": 3076 - }, - { - "epoch": 0.23667410199215444, - "learning_rate": 0.0026041219138993976, - "loss": 1.1025, - "step": 3077 - }, - { - "epoch": 0.2367510191523729, - "learning_rate": 0.002603876532548144, - "loss": 1.0724, - "step": 3078 - }, - { - "epoch": 0.23682793631259133, - "learning_rate": 0.002603631086740389, - "loss": 1.1619, - "step": 3079 - }, - { - "epoch": 0.2369048534728098, - "learning_rate": 0.002603385576490465, - "loss": 1.3809, - "step": 3080 - }, - { - "epoch": 0.23698177063302822, - "learning_rate": 0.002603140001812707, - "loss": 1.0079, - "step": 3081 - }, - { - "epoch": 0.23705868779324668, - "learning_rate": 0.002602894362721455, - "loss": 1.2764, - "step": 3082 - }, - { - "epoch": 0.2371356049534651, - "learning_rate": 0.0026026486592310516, - "loss": 1.2921, - "step": 3083 - }, - { - "epoch": 0.23721252211368357, - "learning_rate": 0.0026024028913558435, - "loss": 1.4317, - "step": 3084 - }, - { - "epoch": 0.237289439273902, - "learning_rate": 0.002602157059110182, - "loss": 1.2007, - "step": 3085 - }, - { - "epoch": 0.23736635643412046, - "learning_rate": 0.0026019111625084214, - "loss": 0.9553, - "step": 3086 - }, - { - "epoch": 0.2374432735943389, - "learning_rate": 0.0026016652015649192, - "loss": 1.2116, - "step": 3087 - }, - { - "epoch": 0.23752019075455735, - "learning_rate": 0.002601419176294038, - "loss": 1.376, - "step": 3088 - }, - { - "epoch": 0.23759710791477578, - "learning_rate": 0.0026011730867101434, - "loss": 1.2945, - "step": 3089 - }, - { - "epoch": 0.23767402507499424, - "learning_rate": 0.002600926932827604, - "loss": 0.9543, - "step": 3090 - }, - { - "epoch": 0.23775094223521268, - "learning_rate": 0.0026006807146607943, - "loss": 1.1269, - "step": 3091 - }, - { - "epoch": 0.2378278593954311, - "learning_rate": 0.0026004344322240903, - "loss": 1.3008, - "step": 3092 - }, - { - "epoch": 0.23790477655564957, - "learning_rate": 0.002600188085531873, - "loss": 0.9291, - "step": 3093 - }, - { - "epoch": 0.237981693715868, - "learning_rate": 0.002599941674598527, - "loss": 0.887, - "step": 3094 - }, - { - "epoch": 0.23805861087608646, - "learning_rate": 0.00259969519943844, - "loss": 0.9833, - "step": 3095 - }, - { - "epoch": 0.2381355280363049, - "learning_rate": 0.0025994486600660043, - "loss": 1.3557, - "step": 3096 - }, - { - "epoch": 0.23821244519652335, - "learning_rate": 0.002599202056495615, - "loss": 1.0028, - "step": 3097 - }, - { - "epoch": 0.23828936235674178, - "learning_rate": 0.002598955388741673, - "loss": 1.2178, - "step": 3098 - }, - { - "epoch": 0.23836627951696024, - "learning_rate": 0.0025987086568185803, - "loss": 0.8754, - "step": 3099 - }, - { - "epoch": 0.23844319667717867, - "learning_rate": 0.0025984618607407437, - "loss": 1.0655, - "step": 3100 - }, - { - "epoch": 0.23852011383739713, - "learning_rate": 0.0025982150005225742, - "loss": 1.1286, - "step": 3101 - }, - { - "epoch": 0.23859703099761556, - "learning_rate": 0.002597968076178487, - "loss": 1.0951, - "step": 3102 - }, - { - "epoch": 0.23867394815783402, - "learning_rate": 0.002597721087722899, - "loss": 1.1224, - "step": 3103 - }, - { - "epoch": 0.23875086531805245, - "learning_rate": 0.002597474035170233, - "loss": 1.2548, - "step": 3104 - }, - { - "epoch": 0.2388277824782709, - "learning_rate": 0.002597226918534914, - "loss": 1.2096, - "step": 3105 - }, - { - "epoch": 0.23890469963848934, - "learning_rate": 0.002596979737831372, - "loss": 1.1102, - "step": 3106 - }, - { - "epoch": 0.2389816167987078, - "learning_rate": 0.0025967324930740395, - "loss": 1.0185, - "step": 3107 - }, - { - "epoch": 0.23905853395892623, - "learning_rate": 0.002596485184277354, - "loss": 1.2696, - "step": 3108 - }, - { - "epoch": 0.2391354511191447, - "learning_rate": 0.002596237811455756, - "loss": 1.011, - "step": 3109 - }, - { - "epoch": 0.23921236827936312, - "learning_rate": 0.002595990374623689, - "loss": 1.3151, - "step": 3110 - }, - { - "epoch": 0.23928928543958158, - "learning_rate": 0.0025957428737956026, - "loss": 1.1528, - "step": 3111 - }, - { - "epoch": 0.23936620259980002, - "learning_rate": 0.0025954953089859477, - "loss": 0.8947, - "step": 3112 - }, - { - "epoch": 0.23944311976001845, - "learning_rate": 0.00259524768020918, - "loss": 1.2746, - "step": 3113 - }, - { - "epoch": 0.2395200369202369, - "learning_rate": 0.0025949999874797587, - "loss": 1.4013, - "step": 3114 - }, - { - "epoch": 0.23959695408045534, - "learning_rate": 0.002594752230812147, - "loss": 1.1671, - "step": 3115 - }, - { - "epoch": 0.2396738712406738, - "learning_rate": 0.0025945044102208114, - "loss": 0.9244, - "step": 3116 - }, - { - "epoch": 0.23975078840089223, - "learning_rate": 0.002594256525720223, - "loss": 1.1692, - "step": 3117 - }, - { - "epoch": 0.2398277055611107, - "learning_rate": 0.002594008577324855, - "loss": 1.4609, - "step": 3118 - }, - { - "epoch": 0.23990462272132912, - "learning_rate": 0.0025937605650491868, - "loss": 1.0407, - "step": 3119 - }, - { - "epoch": 0.23998153988154758, - "learning_rate": 0.0025935124889076987, - "loss": 0.9881, - "step": 3120 - }, - { - "epoch": 0.240058457041766, - "learning_rate": 0.0025932643489148775, - "loss": 1.289, - "step": 3121 - }, - { - "epoch": 0.24013537420198447, - "learning_rate": 0.002593016145085211, - "loss": 0.8986, - "step": 3122 - }, - { - "epoch": 0.2402122913622029, - "learning_rate": 0.0025927678774331927, - "loss": 1.3161, - "step": 3123 - }, - { - "epoch": 0.24028920852242136, - "learning_rate": 0.0025925195459733197, - "loss": 1.1897, - "step": 3124 - }, - { - "epoch": 0.2403661256826398, - "learning_rate": 0.0025922711507200915, - "loss": 1.3164, - "step": 3125 - }, - { - "epoch": 0.24044304284285825, - "learning_rate": 0.002592022691688012, - "loss": 1.0286, - "step": 3126 - }, - { - "epoch": 0.24051996000307668, - "learning_rate": 0.0025917741688915898, - "loss": 0.7483, - "step": 3127 - }, - { - "epoch": 0.24059687716329514, - "learning_rate": 0.002591525582345336, - "loss": 1.1146, - "step": 3128 - }, - { - "epoch": 0.24067379432351357, - "learning_rate": 0.002591276932063766, - "loss": 1.4364, - "step": 3129 - }, - { - "epoch": 0.24075071148373203, - "learning_rate": 0.002591028218061399, - "loss": 0.9923, - "step": 3130 - }, - { - "epoch": 0.24082762864395046, - "learning_rate": 0.0025907794403527565, - "loss": 0.8849, - "step": 3131 - }, - { - "epoch": 0.24090454580416892, - "learning_rate": 0.0025905305989523666, - "loss": 1.4039, - "step": 3132 - }, - { - "epoch": 0.24098146296438736, - "learning_rate": 0.0025902816938747574, - "loss": 1.1842, - "step": 3133 - }, - { - "epoch": 0.2410583801246058, - "learning_rate": 0.002590032725134465, - "loss": 1.0936, - "step": 3134 - }, - { - "epoch": 0.24113529728482425, - "learning_rate": 0.0025897836927460246, - "loss": 1.1739, - "step": 3135 - }, - { - "epoch": 0.24121221444504268, - "learning_rate": 0.00258953459672398, - "loss": 1.2149, - "step": 3136 - }, - { - "epoch": 0.24128913160526114, - "learning_rate": 0.002589285437082874, - "loss": 1.1202, - "step": 3137 - }, - { - "epoch": 0.24136604876547957, - "learning_rate": 0.0025890362138372555, - "loss": 1.5392, - "step": 3138 - }, - { - "epoch": 0.24144296592569803, - "learning_rate": 0.002588786927001678, - "loss": 1.0236, - "step": 3139 - }, - { - "epoch": 0.24151988308591646, - "learning_rate": 0.0025885375765906976, - "loss": 1.0635, - "step": 3140 - }, - { - "epoch": 0.24159680024613492, - "learning_rate": 0.0025882881626188734, - "loss": 0.8874, - "step": 3141 - }, - { - "epoch": 0.24167371740635335, - "learning_rate": 0.002588038685100769, - "loss": 0.9573, - "step": 3142 - }, - { - "epoch": 0.2417506345665718, - "learning_rate": 0.0025877891440509527, - "loss": 1.0899, - "step": 3143 - }, - { - "epoch": 0.24182755172679024, - "learning_rate": 0.0025875395394839933, - "loss": 1.8011, - "step": 3144 - }, - { - "epoch": 0.2419044688870087, - "learning_rate": 0.002587289871414467, - "loss": 1.4425, - "step": 3145 - }, - { - "epoch": 0.24198138604722713, - "learning_rate": 0.002587040139856953, - "loss": 1.1479, - "step": 3146 - }, - { - "epoch": 0.2420583032074456, - "learning_rate": 0.0025867903448260317, - "loss": 1.3082, - "step": 3147 - }, - { - "epoch": 0.24213522036766402, - "learning_rate": 0.0025865404863362894, - "loss": 1.0346, - "step": 3148 - }, - { - "epoch": 0.24221213752788248, - "learning_rate": 0.002586290564402316, - "loss": 1.2068, - "step": 3149 - }, - { - "epoch": 0.2422890546881009, - "learning_rate": 0.002586040579038704, - "loss": 1.2679, - "step": 3150 - }, - { - "epoch": 0.24236597184831937, - "learning_rate": 0.0025857905302600517, - "loss": 1.2732, - "step": 3151 - }, - { - "epoch": 0.2424428890085378, - "learning_rate": 0.0025855404180809578, - "loss": 1.1605, - "step": 3152 - }, - { - "epoch": 0.24251980616875624, - "learning_rate": 0.002585290242516028, - "loss": 1.1359, - "step": 3153 - }, - { - "epoch": 0.2425967233289747, - "learning_rate": 0.0025850400035798697, - "loss": 1.3522, - "step": 3154 - }, - { - "epoch": 0.24267364048919313, - "learning_rate": 0.0025847897012870945, - "loss": 1.2395, - "step": 3155 - }, - { - "epoch": 0.24275055764941159, - "learning_rate": 0.0025845393356523187, - "loss": 1.394, - "step": 3156 - }, - { - "epoch": 0.24282747480963002, - "learning_rate": 0.0025842889066901606, - "loss": 1.3979, - "step": 3157 - }, - { - "epoch": 0.24290439196984848, - "learning_rate": 0.0025840384144152433, - "loss": 1.0034, - "step": 3158 - }, - { - "epoch": 0.2429813091300669, - "learning_rate": 0.002583787858842193, - "loss": 1.0498, - "step": 3159 - }, - { - "epoch": 0.24305822629028537, - "learning_rate": 0.00258353723998564, - "loss": 1.3022, - "step": 3160 - }, - { - "epoch": 0.2431351434505038, - "learning_rate": 0.0025832865578602187, - "loss": 1.4339, - "step": 3161 - }, - { - "epoch": 0.24321206061072226, - "learning_rate": 0.0025830358124805664, - "loss": 1.281, - "step": 3162 - }, - { - "epoch": 0.2432889777709407, - "learning_rate": 0.002582785003861324, - "loss": 1.1118, - "step": 3163 - }, - { - "epoch": 0.24336589493115915, - "learning_rate": 0.002582534132017137, - "loss": 0.7518, - "step": 3164 - }, - { - "epoch": 0.24344281209137758, - "learning_rate": 0.0025822831969626535, - "loss": 1.0684, - "step": 3165 - }, - { - "epoch": 0.24351972925159604, - "learning_rate": 0.002582032198712526, - "loss": 1.2551, - "step": 3166 - }, - { - "epoch": 0.24359664641181447, - "learning_rate": 0.0025817811372814115, - "loss": 0.9243, - "step": 3167 - }, - { - "epoch": 0.24367356357203293, - "learning_rate": 0.0025815300126839683, - "loss": 1.2287, - "step": 3168 - }, - { - "epoch": 0.24375048073225136, - "learning_rate": 0.002581278824934861, - "loss": 1.2283, - "step": 3169 - }, - { - "epoch": 0.24382739789246982, - "learning_rate": 0.002581027574048756, - "loss": 1.0643, - "step": 3170 - }, - { - "epoch": 0.24390431505268825, - "learning_rate": 0.0025807762600403247, - "loss": 1.001, - "step": 3171 - }, - { - "epoch": 0.2439812322129067, - "learning_rate": 0.0025805248829242405, - "loss": 1.1246, - "step": 3172 - }, - { - "epoch": 0.24405814937312514, - "learning_rate": 0.002580273442715183, - "loss": 1.231, - "step": 3173 - }, - { - "epoch": 0.24413506653334358, - "learning_rate": 0.0025800219394278333, - "loss": 1.3881, - "step": 3174 - }, - { - "epoch": 0.24421198369356203, - "learning_rate": 0.0025797703730768766, - "loss": 1.21, - "step": 3175 - }, - { - "epoch": 0.24428890085378047, - "learning_rate": 0.0025795187436770026, - "loss": 1.2494, - "step": 3176 - }, - { - "epoch": 0.24436581801399893, - "learning_rate": 0.002579267051242904, - "loss": 1.3317, - "step": 3177 - }, - { - "epoch": 0.24444273517421736, - "learning_rate": 0.0025790152957892777, - "loss": 1.2651, - "step": 3178 - }, - { - "epoch": 0.24451965233443582, - "learning_rate": 0.0025787634773308237, - "loss": 1.108, - "step": 3179 - }, - { - "epoch": 0.24459656949465425, - "learning_rate": 0.0025785115958822462, - "loss": 0.9391, - "step": 3180 - }, - { - "epoch": 0.2446734866548727, - "learning_rate": 0.002578259651458252, - "loss": 1.1423, - "step": 3181 - }, - { - "epoch": 0.24475040381509114, - "learning_rate": 0.002578007644073554, - "loss": 1.1358, - "step": 3182 - }, - { - "epoch": 0.2448273209753096, - "learning_rate": 0.0025777555737428654, - "loss": 1.2116, - "step": 3183 - }, - { - "epoch": 0.24490423813552803, - "learning_rate": 0.0025775034404809057, - "loss": 1.3328, - "step": 3184 - }, - { - "epoch": 0.2449811552957465, - "learning_rate": 0.0025772512443023973, - "loss": 0.993, - "step": 3185 - }, - { - "epoch": 0.24505807245596492, - "learning_rate": 0.0025769989852220663, - "loss": 0.756, - "step": 3186 - }, - { - "epoch": 0.24513498961618338, - "learning_rate": 0.0025767466632546417, - "loss": 1.2895, - "step": 3187 - }, - { - "epoch": 0.2452119067764018, - "learning_rate": 0.002576494278414858, - "loss": 1.0305, - "step": 3188 - }, - { - "epoch": 0.24528882393662027, - "learning_rate": 0.0025762418307174505, - "loss": 1.4926, - "step": 3189 - }, - { - "epoch": 0.2453657410968387, - "learning_rate": 0.0025759893201771616, - "loss": 1.1996, - "step": 3190 - }, - { - "epoch": 0.24544265825705716, - "learning_rate": 0.0025757367468087346, - "loss": 1.2316, - "step": 3191 - }, - { - "epoch": 0.2455195754172756, - "learning_rate": 0.002575484110626918, - "loss": 0.8187, - "step": 3192 - }, - { - "epoch": 0.24559649257749405, - "learning_rate": 0.0025752314116464633, - "loss": 1.4764, - "step": 3193 - }, - { - "epoch": 0.24567340973771248, - "learning_rate": 0.002574978649882126, - "loss": 1.0287, - "step": 3194 - }, - { - "epoch": 0.24575032689793092, - "learning_rate": 0.0025747258253486643, - "loss": 1.2878, - "step": 3195 - }, - { - "epoch": 0.24582724405814937, - "learning_rate": 0.0025744729380608425, - "loss": 1.1815, - "step": 3196 - }, - { - "epoch": 0.2459041612183678, - "learning_rate": 0.002574219988033426, - "loss": 0.8401, - "step": 3197 - }, - { - "epoch": 0.24598107837858627, - "learning_rate": 0.0025739669752811844, - "loss": 1.1133, - "step": 3198 - }, - { - "epoch": 0.2460579955388047, - "learning_rate": 0.0025737138998188917, - "loss": 1.265, - "step": 3199 - }, - { - "epoch": 0.24613491269902316, - "learning_rate": 0.002573460761661326, - "loss": 1.5385, - "step": 3200 - }, - { - "epoch": 0.2462118298592416, - "learning_rate": 0.0025732075608232677, - "loss": 0.9086, - "step": 3201 - }, - { - "epoch": 0.24628874701946005, - "learning_rate": 0.002572954297319501, - "loss": 0.8488, - "step": 3202 - }, - { - "epoch": 0.24636566417967848, - "learning_rate": 0.002572700971164815, - "loss": 0.7496, - "step": 3203 - }, - { - "epoch": 0.24644258133989694, - "learning_rate": 0.0025724475823740013, - "loss": 1.1925, - "step": 3204 - }, - { - "epoch": 0.24651949850011537, - "learning_rate": 0.0025721941309618554, - "loss": 1.2115, - "step": 3205 - }, - { - "epoch": 0.24659641566033383, - "learning_rate": 0.0025719406169431767, - "loss": 1.2288, - "step": 3206 - }, - { - "epoch": 0.24667333282055226, - "learning_rate": 0.0025716870403327684, - "loss": 1.0751, - "step": 3207 - }, - { - "epoch": 0.24675024998077072, - "learning_rate": 0.0025714334011454373, - "loss": 1.1344, - "step": 3208 - }, - { - "epoch": 0.24682716714098915, - "learning_rate": 0.0025711796993959928, - "loss": 1.2588, - "step": 3209 - }, - { - "epoch": 0.2469040843012076, - "learning_rate": 0.0025709259350992497, - "loss": 1.3606, - "step": 3210 - }, - { - "epoch": 0.24698100146142604, - "learning_rate": 0.0025706721082700244, - "loss": 1.2679, - "step": 3211 - }, - { - "epoch": 0.2470579186216445, - "learning_rate": 0.0025704182189231397, - "loss": 1.1779, - "step": 3212 - }, - { - "epoch": 0.24713483578186293, - "learning_rate": 0.002570164267073419, - "loss": 1.2455, - "step": 3213 - }, - { - "epoch": 0.2472117529420814, - "learning_rate": 0.002569910252735692, - "loss": 1.1148, - "step": 3214 - }, - { - "epoch": 0.24728867010229982, - "learning_rate": 0.00256965617592479, - "loss": 1.2124, - "step": 3215 - }, - { - "epoch": 0.24736558726251826, - "learning_rate": 0.002569402036655549, - "loss": 1.5176, - "step": 3216 - }, - { - "epoch": 0.24744250442273671, - "learning_rate": 0.002569147834942809, - "loss": 1.4819, - "step": 3217 - }, - { - "epoch": 0.24751942158295515, - "learning_rate": 0.0025688935708014123, - "loss": 1.2158, - "step": 3218 - }, - { - "epoch": 0.2475963387431736, - "learning_rate": 0.0025686392442462063, - "loss": 1.4652, - "step": 3219 - }, - { - "epoch": 0.24767325590339204, - "learning_rate": 0.0025683848552920407, - "loss": 1.0987, - "step": 3220 - }, - { - "epoch": 0.2477501730636105, - "learning_rate": 0.0025681304039537702, - "loss": 1.4371, - "step": 3221 - }, - { - "epoch": 0.24782709022382893, - "learning_rate": 0.0025678758902462524, - "loss": 1.0194, - "step": 3222 - }, - { - "epoch": 0.2479040073840474, - "learning_rate": 0.002567621314184348, - "loss": 1.3744, - "step": 3223 - }, - { - "epoch": 0.24798092454426582, - "learning_rate": 0.0025673666757829233, - "loss": 0.9836, - "step": 3224 - }, - { - "epoch": 0.24805784170448428, - "learning_rate": 0.002567111975056845, - "loss": 1.145, - "step": 3225 - }, - { - "epoch": 0.2481347588647027, - "learning_rate": 0.0025668572120209872, - "loss": 1.5748, - "step": 3226 - }, - { - "epoch": 0.24821167602492117, - "learning_rate": 0.0025666023866902244, - "loss": 1.2518, - "step": 3227 - }, - { - "epoch": 0.2482885931851396, - "learning_rate": 0.0025663474990794367, - "loss": 1.1501, - "step": 3228 - }, - { - "epoch": 0.24836551034535806, - "learning_rate": 0.0025660925492035076, - "loss": 1.4456, - "step": 3229 - }, - { - "epoch": 0.2484424275055765, - "learning_rate": 0.0025658375370773232, - "loss": 1.1321, - "step": 3230 - }, - { - "epoch": 0.24851934466579495, - "learning_rate": 0.002565582462715774, - "loss": 1.1396, - "step": 3231 - }, - { - "epoch": 0.24859626182601338, - "learning_rate": 0.0025653273261337546, - "loss": 1.3533, - "step": 3232 - }, - { - "epoch": 0.24867317898623184, - "learning_rate": 0.002565072127346162, - "loss": 0.9253, - "step": 3233 - }, - { - "epoch": 0.24875009614645027, - "learning_rate": 0.002564816866367898, - "loss": 1.325, - "step": 3234 - }, - { - "epoch": 0.2488270133066687, - "learning_rate": 0.002564561543213868, - "loss": 0.815, - "step": 3235 - }, - { - "epoch": 0.24890393046688716, - "learning_rate": 0.0025643061578989796, - "loss": 1.3108, - "step": 3236 - }, - { - "epoch": 0.2489808476271056, - "learning_rate": 0.0025640507104381454, - "loss": 1.0179, - "step": 3237 - }, - { - "epoch": 0.24905776478732405, - "learning_rate": 0.0025637952008462817, - "loss": 1.049, - "step": 3238 - }, - { - "epoch": 0.24913468194754249, - "learning_rate": 0.002563539629138307, - "loss": 1.2534, - "step": 3239 - }, - { - "epoch": 0.24921159910776094, - "learning_rate": 0.0025632839953291455, - "loss": 0.7798, - "step": 3240 - }, - { - "epoch": 0.24928851626797938, - "learning_rate": 0.002563028299433723, - "loss": 1.1917, - "step": 3241 - }, - { - "epoch": 0.24936543342819784, - "learning_rate": 0.0025627725414669703, - "loss": 1.0835, - "step": 3242 - }, - { - "epoch": 0.24944235058841627, - "learning_rate": 0.0025625167214438213, - "loss": 1.2254, - "step": 3243 - }, - { - "epoch": 0.24951926774863473, - "learning_rate": 0.0025622608393792135, - "loss": 1.1073, - "step": 3244 - }, - { - "epoch": 0.24959618490885316, - "learning_rate": 0.0025620048952880886, - "loss": 1.2777, - "step": 3245 - }, - { - "epoch": 0.24967310206907162, - "learning_rate": 0.0025617488891853902, - "loss": 1.0222, - "step": 3246 - }, - { - "epoch": 0.24975001922929005, - "learning_rate": 0.0025614928210860688, - "loss": 1.0946, - "step": 3247 - }, - { - "epoch": 0.2498269363895085, - "learning_rate": 0.0025612366910050745, - "loss": 1.0655, - "step": 3248 - }, - { - "epoch": 0.24990385354972694, - "learning_rate": 0.0025609804989573646, - "loss": 1.0671, - "step": 3249 - }, - { - "epoch": 0.2499807707099454, - "learning_rate": 0.0025607242449578967, - "loss": 1.127, - "step": 3250 - }, - { - "epoch": 0.25005768787016386, - "learning_rate": 0.002560467929021635, - "loss": 1.0283, - "step": 3251 - }, - { - "epoch": 0.25013460503038226, - "learning_rate": 0.0025602115511635464, - "loss": 1.198, - "step": 3252 - }, - { - "epoch": 0.2502115221906007, - "learning_rate": 0.0025599551113985996, - "loss": 1.2752, - "step": 3253 - }, - { - "epoch": 0.2502884393508192, - "learning_rate": 0.0025596986097417692, - "loss": 0.867, - "step": 3254 - }, - { - "epoch": 0.25036535651103764, - "learning_rate": 0.0025594420462080334, - "loss": 1.1996, - "step": 3255 - }, - { - "epoch": 0.25044227367125604, - "learning_rate": 0.0025591854208123713, - "loss": 1.1055, - "step": 3256 - }, - { - "epoch": 0.2505191908314745, - "learning_rate": 0.0025589287335697694, - "loss": 1.3893, - "step": 3257 - }, - { - "epoch": 0.25059610799169296, - "learning_rate": 0.0025586719844952147, - "loss": 1.2048, - "step": 3258 - }, - { - "epoch": 0.25067302515191137, - "learning_rate": 0.0025584151736037, - "loss": 1.031, - "step": 3259 - }, - { - "epoch": 0.2507499423121298, - "learning_rate": 0.00255815830091022, - "loss": 1.6608, - "step": 3260 - }, - { - "epoch": 0.2508268594723483, - "learning_rate": 0.0025579013664297737, - "loss": 1.3659, - "step": 3261 - }, - { - "epoch": 0.25090377663256674, - "learning_rate": 0.0025576443701773644, - "loss": 1.1875, - "step": 3262 - }, - { - "epoch": 0.25098069379278515, - "learning_rate": 0.002557387312167998, - "loss": 1.1799, - "step": 3263 - }, - { - "epoch": 0.2510576109530036, - "learning_rate": 0.0025571301924166845, - "loss": 1.3114, - "step": 3264 - }, - { - "epoch": 0.25113452811322207, - "learning_rate": 0.002556873010938437, - "loss": 1.3133, - "step": 3265 - }, - { - "epoch": 0.2512114452734405, - "learning_rate": 0.002556615767748274, - "loss": 1.0744, - "step": 3266 - }, - { - "epoch": 0.25128836243365893, - "learning_rate": 0.0025563584628612145, - "loss": 1.1149, - "step": 3267 - }, - { - "epoch": 0.2513652795938774, - "learning_rate": 0.0025561010962922835, - "loss": 0.9961, - "step": 3268 - }, - { - "epoch": 0.25144219675409585, - "learning_rate": 0.0025558436680565085, - "loss": 1.5148, - "step": 3269 - }, - { - "epoch": 0.2515191139143143, - "learning_rate": 0.0025555861781689213, - "loss": 1.1389, - "step": 3270 - }, - { - "epoch": 0.2515960310745327, - "learning_rate": 0.002555328626644558, - "loss": 0.9799, - "step": 3271 - }, - { - "epoch": 0.25167294823475117, - "learning_rate": 0.0025550710134984553, - "loss": 1.1286, - "step": 3272 - }, - { - "epoch": 0.25174986539496963, - "learning_rate": 0.002554813338745657, - "loss": 1.282, - "step": 3273 - }, - { - "epoch": 0.2518267825551881, - "learning_rate": 0.0025545556024012084, - "loss": 1.2751, - "step": 3274 - }, - { - "epoch": 0.2519036997154065, - "learning_rate": 0.002554297804480159, - "loss": 1.2496, - "step": 3275 - }, - { - "epoch": 0.25198061687562495, - "learning_rate": 0.0025540399449975625, - "loss": 0.9874, - "step": 3276 - }, - { - "epoch": 0.2520575340358434, - "learning_rate": 0.002553782023968475, - "loss": 1.2749, - "step": 3277 - }, - { - "epoch": 0.2521344511960618, - "learning_rate": 0.002553524041407957, - "loss": 1.1992, - "step": 3278 - }, - { - "epoch": 0.2522113683562803, - "learning_rate": 0.002553265997331072, - "loss": 1.1654, - "step": 3279 - }, - { - "epoch": 0.25228828551649873, - "learning_rate": 0.002553007891752888, - "loss": 1.2483, - "step": 3280 - }, - { - "epoch": 0.2523652026767172, - "learning_rate": 0.002552749724688475, - "loss": 1.2375, - "step": 3281 - }, - { - "epoch": 0.2524421198369356, - "learning_rate": 0.0025524914961529096, - "loss": 1.2751, - "step": 3282 - }, - { - "epoch": 0.25251903699715406, - "learning_rate": 0.0025522332061612683, - "loss": 1.1489, - "step": 3283 - }, - { - "epoch": 0.2525959541573725, - "learning_rate": 0.002551974854728633, - "loss": 1.3221, - "step": 3284 - }, - { - "epoch": 0.252672871317591, - "learning_rate": 0.0025517164418700903, - "loss": 1.2766, - "step": 3285 - }, - { - "epoch": 0.2527497884778094, - "learning_rate": 0.002551457967600728, - "loss": 1.1042, - "step": 3286 - }, - { - "epoch": 0.25282670563802784, - "learning_rate": 0.0025511994319356393, - "loss": 1.1367, - "step": 3287 - }, - { - "epoch": 0.2529036227982463, - "learning_rate": 0.0025509408348899205, - "loss": 1.4032, - "step": 3288 - }, - { - "epoch": 0.25298053995846476, - "learning_rate": 0.002550682176478671, - "loss": 1.0402, - "step": 3289 - }, - { - "epoch": 0.25305745711868316, - "learning_rate": 0.0025504234567169937, - "loss": 1.1797, - "step": 3290 - }, - { - "epoch": 0.2531343742789016, - "learning_rate": 0.0025501646756199967, - "loss": 1.221, - "step": 3291 - }, - { - "epoch": 0.2532112914391201, - "learning_rate": 0.0025499058332027896, - "loss": 1.2086, - "step": 3292 - }, - { - "epoch": 0.25328820859933854, - "learning_rate": 0.0025496469294804865, - "loss": 1.2087, - "step": 3293 - }, - { - "epoch": 0.25336512575955694, - "learning_rate": 0.002549387964468206, - "loss": 0.7126, - "step": 3294 - }, - { - "epoch": 0.2534420429197754, - "learning_rate": 0.002549128938181068, - "loss": 1.2015, - "step": 3295 - }, - { - "epoch": 0.25351896007999386, - "learning_rate": 0.0025488698506341977, - "loss": 1.4902, - "step": 3296 - }, - { - "epoch": 0.25359587724021226, - "learning_rate": 0.0025486107018427243, - "loss": 1.5627, - "step": 3297 - }, - { - "epoch": 0.2536727944004307, - "learning_rate": 0.0025483514918217785, - "loss": 1.1413, - "step": 3298 - }, - { - "epoch": 0.2537497115606492, - "learning_rate": 0.002548092220586497, - "loss": 1.0084, - "step": 3299 - }, - { - "epoch": 0.25382662872086764, - "learning_rate": 0.0025478328881520184, - "loss": 1.2909, - "step": 3300 - }, - { - "epoch": 0.25390354588108605, - "learning_rate": 0.002547573494533486, - "loss": 1.3393, - "step": 3301 - }, - { - "epoch": 0.2539804630413045, - "learning_rate": 0.0025473140397460443, - "loss": 1.1288, - "step": 3302 - }, - { - "epoch": 0.25405738020152296, - "learning_rate": 0.002547054523804845, - "loss": 1.08, - "step": 3303 - }, - { - "epoch": 0.2541342973617414, - "learning_rate": 0.002546794946725041, - "loss": 0.8534, - "step": 3304 - }, - { - "epoch": 0.2542112145219598, - "learning_rate": 0.0025465353085217892, - "loss": 1.3742, - "step": 3305 - }, - { - "epoch": 0.2542881316821783, - "learning_rate": 0.0025462756092102493, - "loss": 0.9702, - "step": 3306 - }, - { - "epoch": 0.25436504884239675, - "learning_rate": 0.0025460158488055866, - "loss": 1.2243, - "step": 3307 - }, - { - "epoch": 0.2544419660026152, - "learning_rate": 0.0025457560273229687, - "loss": 1.3708, - "step": 3308 - }, - { - "epoch": 0.2545188831628336, - "learning_rate": 0.002545496144777566, - "loss": 1.0532, - "step": 3309 - }, - { - "epoch": 0.25459580032305207, - "learning_rate": 0.002545236201184554, - "loss": 1.21, - "step": 3310 - }, - { - "epoch": 0.2546727174832705, - "learning_rate": 0.002544976196559111, - "loss": 1.0845, - "step": 3311 - }, - { - "epoch": 0.254749634643489, - "learning_rate": 0.002544716130916419, - "loss": 0.9006, - "step": 3312 - }, - { - "epoch": 0.2548265518037074, - "learning_rate": 0.002544456004271663, - "loss": 1.0491, - "step": 3313 - }, - { - "epoch": 0.25490346896392585, - "learning_rate": 0.0025441958166400325, - "loss": 1.2703, - "step": 3314 - }, - { - "epoch": 0.2549803861241443, - "learning_rate": 0.00254393556803672, - "loss": 1.1311, - "step": 3315 - }, - { - "epoch": 0.25505730328436277, - "learning_rate": 0.0025436752584769213, - "loss": 1.0844, - "step": 3316 - }, - { - "epoch": 0.2551342204445812, - "learning_rate": 0.0025434148879758373, - "loss": 1.5026, - "step": 3317 - }, - { - "epoch": 0.25521113760479963, - "learning_rate": 0.00254315445654867, - "loss": 1.1845, - "step": 3318 - }, - { - "epoch": 0.2552880547650181, - "learning_rate": 0.0025428939642106276, - "loss": 1.1035, - "step": 3319 - }, - { - "epoch": 0.2553649719252365, - "learning_rate": 0.002542633410976919, - "loss": 1.0475, - "step": 3320 - }, - { - "epoch": 0.25544188908545495, - "learning_rate": 0.002542372796862759, - "loss": 0.9373, - "step": 3321 - }, - { - "epoch": 0.2555188062456734, - "learning_rate": 0.0025421121218833654, - "loss": 0.7934, - "step": 3322 - }, - { - "epoch": 0.2555957234058919, - "learning_rate": 0.0025418513860539587, - "loss": 1.4146, - "step": 3323 - }, - { - "epoch": 0.2556726405661103, - "learning_rate": 0.0025415905893897642, - "loss": 1.0758, - "step": 3324 - }, - { - "epoch": 0.25574955772632874, - "learning_rate": 0.002541329731906009, - "loss": 1.0413, - "step": 3325 - }, - { - "epoch": 0.2558264748865472, - "learning_rate": 0.0025410688136179257, - "loss": 1.2682, - "step": 3326 - }, - { - "epoch": 0.25590339204676565, - "learning_rate": 0.00254080783454075, - "loss": 1.2823, - "step": 3327 - }, - { - "epoch": 0.25598030920698406, - "learning_rate": 0.0025405467946897197, - "loss": 1.1681, - "step": 3328 - }, - { - "epoch": 0.2560572263672025, - "learning_rate": 0.0025402856940800775, - "loss": 1.0219, - "step": 3329 - }, - { - "epoch": 0.256134143527421, - "learning_rate": 0.0025400245327270696, - "loss": 1.0044, - "step": 3330 - }, - { - "epoch": 0.25621106068763944, - "learning_rate": 0.002539763310645945, - "loss": 0.9554, - "step": 3331 - }, - { - "epoch": 0.25628797784785784, - "learning_rate": 0.0025395020278519576, - "loss": 1.429, - "step": 3332 - }, - { - "epoch": 0.2563648950080763, - "learning_rate": 0.0025392406843603634, - "loss": 0.9826, - "step": 3333 - }, - { - "epoch": 0.25644181216829476, - "learning_rate": 0.0025389792801864225, - "loss": 1.1936, - "step": 3334 - }, - { - "epoch": 0.2565187293285132, - "learning_rate": 0.0025387178153453986, - "loss": 0.9941, - "step": 3335 - }, - { - "epoch": 0.2565956464887316, - "learning_rate": 0.0025384562898525588, - "loss": 1.112, - "step": 3336 - }, - { - "epoch": 0.2566725636489501, - "learning_rate": 0.002538194703723174, - "loss": 1.2618, - "step": 3337 - }, - { - "epoch": 0.25674948080916854, - "learning_rate": 0.0025379330569725183, - "loss": 0.7113, - "step": 3338 - }, - { - "epoch": 0.25682639796938694, - "learning_rate": 0.0025376713496158697, - "loss": 1.2442, - "step": 3339 - }, - { - "epoch": 0.2569033151296054, - "learning_rate": 0.0025374095816685103, - "loss": 1.1023, - "step": 3340 - }, - { - "epoch": 0.25698023228982386, - "learning_rate": 0.0025371477531457233, - "loss": 1.3482, - "step": 3341 - }, - { - "epoch": 0.2570571494500423, - "learning_rate": 0.0025368858640627987, - "loss": 1.6154, - "step": 3342 - }, - { - "epoch": 0.2571340666102607, - "learning_rate": 0.002536623914435028, - "loss": 0.895, - "step": 3343 - }, - { - "epoch": 0.2572109837704792, - "learning_rate": 0.0025363619042777065, - "loss": 1.1876, - "step": 3344 - }, - { - "epoch": 0.25728790093069764, - "learning_rate": 0.002536099833606133, - "loss": 1.3241, - "step": 3345 - }, - { - "epoch": 0.2573648180909161, - "learning_rate": 0.0025358377024356105, - "loss": 0.7273, - "step": 3346 - }, - { - "epoch": 0.2574417352511345, - "learning_rate": 0.0025355755107814453, - "loss": 1.1544, - "step": 3347 - }, - { - "epoch": 0.25751865241135297, - "learning_rate": 0.0025353132586589462, - "loss": 1.1422, - "step": 3348 - }, - { - "epoch": 0.2575955695715714, - "learning_rate": 0.0025350509460834274, - "loss": 1.3658, - "step": 3349 - }, - { - "epoch": 0.2576724867317899, - "learning_rate": 0.002534788573070205, - "loss": 1.0071, - "step": 3350 - }, - { - "epoch": 0.2577494038920083, - "learning_rate": 0.0025345261396345995, - "loss": 1.0649, - "step": 3351 - }, - { - "epoch": 0.25782632105222675, - "learning_rate": 0.002534263645791935, - "loss": 1.415, - "step": 3352 - }, - { - "epoch": 0.2579032382124452, - "learning_rate": 0.0025340010915575373, - "loss": 1.492, - "step": 3353 - }, - { - "epoch": 0.25798015537266367, - "learning_rate": 0.002533738476946739, - "loss": 1.0133, - "step": 3354 - }, - { - "epoch": 0.25805707253288207, - "learning_rate": 0.002533475801974874, - "loss": 1.13, - "step": 3355 - }, - { - "epoch": 0.25813398969310053, - "learning_rate": 0.002533213066657279, - "loss": 1.1198, - "step": 3356 - }, - { - "epoch": 0.258210906853319, - "learning_rate": 0.0025329502710092966, - "loss": 0.7984, - "step": 3357 - }, - { - "epoch": 0.2582878240135374, - "learning_rate": 0.0025326874150462713, - "loss": 1.0155, - "step": 3358 - }, - { - "epoch": 0.25836474117375585, - "learning_rate": 0.002532424498783552, - "loss": 1.0591, - "step": 3359 - }, - { - "epoch": 0.2584416583339743, - "learning_rate": 0.0025321615222364895, - "loss": 0.9495, - "step": 3360 - }, - { - "epoch": 0.25851857549419277, - "learning_rate": 0.0025318984854204407, - "loss": 1.349, - "step": 3361 - }, - { - "epoch": 0.2585954926544112, - "learning_rate": 0.0025316353883507634, - "loss": 1.036, - "step": 3362 - }, - { - "epoch": 0.25867240981462963, - "learning_rate": 0.002531372231042821, - "loss": 1.448, - "step": 3363 - }, - { - "epoch": 0.2587493269748481, - "learning_rate": 0.002531109013511979, - "loss": 1.267, - "step": 3364 - }, - { - "epoch": 0.25882624413506655, - "learning_rate": 0.0025308457357736073, - "loss": 1.0893, - "step": 3365 - }, - { - "epoch": 0.25890316129528496, - "learning_rate": 0.0025305823978430788, - "loss": 1.2601, - "step": 3366 - }, - { - "epoch": 0.2589800784555034, - "learning_rate": 0.00253031899973577, - "loss": 1.5132, - "step": 3367 - }, - { - "epoch": 0.2590569956157219, - "learning_rate": 0.0025300555414670607, - "loss": 1.187, - "step": 3368 - }, - { - "epoch": 0.25913391277594033, - "learning_rate": 0.0025297920230523347, - "loss": 1.0362, - "step": 3369 - }, - { - "epoch": 0.25921082993615874, - "learning_rate": 0.00252952844450698, - "loss": 1.0495, - "step": 3370 - }, - { - "epoch": 0.2592877470963772, - "learning_rate": 0.002529264805846386, - "loss": 1.2691, - "step": 3371 - }, - { - "epoch": 0.25936466425659566, - "learning_rate": 0.0025290011070859472, - "loss": 1.1531, - "step": 3372 - }, - { - "epoch": 0.2594415814168141, - "learning_rate": 0.0025287373482410615, - "loss": 1.0173, - "step": 3373 - }, - { - "epoch": 0.2595184985770325, - "learning_rate": 0.00252847352932713, - "loss": 0.9931, - "step": 3374 - }, - { - "epoch": 0.259595415737251, - "learning_rate": 0.002528209650359557, - "loss": 1.2584, - "step": 3375 - }, - { - "epoch": 0.25967233289746944, - "learning_rate": 0.0025279457113537513, - "loss": 1.049, - "step": 3376 - }, - { - "epoch": 0.2597492500576879, - "learning_rate": 0.002527681712325124, - "loss": 1.3439, - "step": 3377 - }, - { - "epoch": 0.2598261672179063, - "learning_rate": 0.0025274176532890905, - "loss": 1.3921, - "step": 3378 - }, - { - "epoch": 0.25990308437812476, - "learning_rate": 0.0025271535342610697, - "loss": 0.5957, - "step": 3379 - }, - { - "epoch": 0.2599800015383432, - "learning_rate": 0.0025268893552564836, - "loss": 0.9592, - "step": 3380 - }, - { - "epoch": 0.2600569186985616, - "learning_rate": 0.0025266251162907576, - "loss": 1.0464, - "step": 3381 - }, - { - "epoch": 0.2601338358587801, - "learning_rate": 0.0025263608173793214, - "loss": 1.1172, - "step": 3382 - }, - { - "epoch": 0.26021075301899854, - "learning_rate": 0.002526096458537607, - "loss": 1.1096, - "step": 3383 - }, - { - "epoch": 0.260287670179217, - "learning_rate": 0.0025258320397810513, - "loss": 0.8709, - "step": 3384 - }, - { - "epoch": 0.2603645873394354, - "learning_rate": 0.0025255675611250933, - "loss": 0.8465, - "step": 3385 - }, - { - "epoch": 0.26044150449965386, - "learning_rate": 0.0025253030225851772, - "loss": 1.331, - "step": 3386 - }, - { - "epoch": 0.2605184216598723, - "learning_rate": 0.0025250384241767487, - "loss": 1.1716, - "step": 3387 - }, - { - "epoch": 0.2605953388200908, - "learning_rate": 0.0025247737659152584, - "loss": 1.1109, - "step": 3388 - }, - { - "epoch": 0.2606722559803092, - "learning_rate": 0.00252450904781616, - "loss": 1.1885, - "step": 3389 - }, - { - "epoch": 0.26074917314052765, - "learning_rate": 0.0025242442698949104, - "loss": 1.655, - "step": 3390 - }, - { - "epoch": 0.2608260903007461, - "learning_rate": 0.0025239794321669706, - "loss": 1.1128, - "step": 3391 - }, - { - "epoch": 0.26090300746096456, - "learning_rate": 0.0025237145346478046, - "loss": 0.9386, - "step": 3392 - }, - { - "epoch": 0.26097992462118297, - "learning_rate": 0.0025234495773528804, - "loss": 1.4328, - "step": 3393 - }, - { - "epoch": 0.2610568417814014, - "learning_rate": 0.002523184560297668, - "loss": 1.1064, - "step": 3394 - }, - { - "epoch": 0.2611337589416199, - "learning_rate": 0.002522919483497643, - "loss": 1.2672, - "step": 3395 - }, - { - "epoch": 0.26121067610183835, - "learning_rate": 0.0025226543469682836, - "loss": 1.6387, - "step": 3396 - }, - { - "epoch": 0.26128759326205675, - "learning_rate": 0.0025223891507250708, - "loss": 1.2837, - "step": 3397 - }, - { - "epoch": 0.2613645104222752, - "learning_rate": 0.00252212389478349, - "loss": 1.0324, - "step": 3398 - }, - { - "epoch": 0.26144142758249367, - "learning_rate": 0.00252185857915903, - "loss": 1.277, - "step": 3399 - }, - { - "epoch": 0.26151834474271207, - "learning_rate": 0.0025215932038671826, - "loss": 1.1001, - "step": 3400 - }, - { - "epoch": 0.26159526190293053, - "learning_rate": 0.002521327768923443, - "loss": 1.6005, - "step": 3401 - }, - { - "epoch": 0.261672179063149, - "learning_rate": 0.002521062274343311, - "loss": 1.1225, - "step": 3402 - }, - { - "epoch": 0.26174909622336745, - "learning_rate": 0.0025207967201422883, - "loss": 0.8252, - "step": 3403 - }, - { - "epoch": 0.26182601338358585, - "learning_rate": 0.0025205311063358814, - "loss": 1.2752, - "step": 3404 - }, - { - "epoch": 0.2619029305438043, - "learning_rate": 0.0025202654329395994, - "loss": 1.1486, - "step": 3405 - }, - { - "epoch": 0.2619798477040228, - "learning_rate": 0.002519999699968956, - "loss": 1.2136, - "step": 3406 - }, - { - "epoch": 0.26205676486424123, - "learning_rate": 0.002519733907439467, - "loss": 1.1214, - "step": 3407 - }, - { - "epoch": 0.26213368202445964, - "learning_rate": 0.0025194680553666526, - "loss": 1.0703, - "step": 3408 - }, - { - "epoch": 0.2622105991846781, - "learning_rate": 0.002519202143766035, - "loss": 1.2793, - "step": 3409 - }, - { - "epoch": 0.26228751634489655, - "learning_rate": 0.0025189361726531425, - "loss": 1.1329, - "step": 3410 - }, - { - "epoch": 0.262364433505115, - "learning_rate": 0.002518670142043505, - "loss": 1.107, - "step": 3411 - }, - { - "epoch": 0.2624413506653334, - "learning_rate": 0.0025184040519526567, - "loss": 0.8191, - "step": 3412 - }, - { - "epoch": 0.2625182678255519, - "learning_rate": 0.002518137902396134, - "loss": 0.9387, - "step": 3413 - }, - { - "epoch": 0.26259518498577034, - "learning_rate": 0.0025178716933894783, - "loss": 0.9596, - "step": 3414 - }, - { - "epoch": 0.2626721021459888, - "learning_rate": 0.0025176054249482337, - "loss": 0.8431, - "step": 3415 - }, - { - "epoch": 0.2627490193062072, - "learning_rate": 0.002517339097087948, - "loss": 1.1453, - "step": 3416 - }, - { - "epoch": 0.26282593646642566, - "learning_rate": 0.0025170727098241716, - "loss": 1.3631, - "step": 3417 - }, - { - "epoch": 0.2629028536266441, - "learning_rate": 0.00251680626317246, - "loss": 1.4974, - "step": 3418 - }, - { - "epoch": 0.2629797707868626, - "learning_rate": 0.002516539757148371, - "loss": 0.9919, - "step": 3419 - }, - { - "epoch": 0.263056687947081, - "learning_rate": 0.0025162731917674664, - "loss": 1.4821, - "step": 3420 - }, - { - "epoch": 0.26313360510729944, - "learning_rate": 0.0025160065670453103, - "loss": 1.1854, - "step": 3421 - }, - { - "epoch": 0.2632105222675179, - "learning_rate": 0.0025157398829974724, - "loss": 1.5294, - "step": 3422 - }, - { - "epoch": 0.2632874394277363, - "learning_rate": 0.002515473139639524, - "loss": 1.1279, - "step": 3423 - }, - { - "epoch": 0.26336435658795476, - "learning_rate": 0.002515206336987041, - "loss": 1.087, - "step": 3424 - }, - { - "epoch": 0.2634412737481732, - "learning_rate": 0.002514939475055602, - "loss": 1.1478, - "step": 3425 - }, - { - "epoch": 0.2635181909083917, - "learning_rate": 0.0025146725538607887, - "loss": 1.114, - "step": 3426 - }, - { - "epoch": 0.2635951080686101, - "learning_rate": 0.002514405573418188, - "loss": 1.2608, - "step": 3427 - }, - { - "epoch": 0.26367202522882854, - "learning_rate": 0.0025141385337433884, - "loss": 1.3321, - "step": 3428 - }, - { - "epoch": 0.263748942389047, - "learning_rate": 0.0025138714348519825, - "loss": 0.8488, - "step": 3429 - }, - { - "epoch": 0.26382585954926546, - "learning_rate": 0.002513604276759568, - "loss": 1.1953, - "step": 3430 - }, - { - "epoch": 0.26390277670948387, - "learning_rate": 0.002513337059481743, - "loss": 1.1408, - "step": 3431 - }, - { - "epoch": 0.2639796938697023, - "learning_rate": 0.002513069783034111, - "loss": 1.1631, - "step": 3432 - }, - { - "epoch": 0.2640566110299208, - "learning_rate": 0.002512802447432278, - "loss": 1.4827, - "step": 3433 - }, - { - "epoch": 0.26413352819013924, - "learning_rate": 0.002512535052691855, - "loss": 1.1841, - "step": 3434 - }, - { - "epoch": 0.26421044535035765, - "learning_rate": 0.0025122675988284555, - "loss": 0.8926, - "step": 3435 - }, - { - "epoch": 0.2642873625105761, - "learning_rate": 0.002512000085857695, - "loss": 1.0023, - "step": 3436 - }, - { - "epoch": 0.26436427967079457, - "learning_rate": 0.0025117325137951952, - "loss": 0.9331, - "step": 3437 - }, - { - "epoch": 0.264441196831013, - "learning_rate": 0.0025114648826565795, - "loss": 1.2008, - "step": 3438 - }, - { - "epoch": 0.26451811399123143, - "learning_rate": 0.0025111971924574755, - "loss": 1.0714, - "step": 3439 - }, - { - "epoch": 0.2645950311514499, - "learning_rate": 0.002510929443213513, - "loss": 1.1681, - "step": 3440 - }, - { - "epoch": 0.26467194831166835, - "learning_rate": 0.0025106616349403277, - "loss": 0.9974, - "step": 3441 - }, - { - "epoch": 0.26474886547188675, - "learning_rate": 0.002510393767653556, - "loss": 1.2625, - "step": 3442 - }, - { - "epoch": 0.2648257826321052, - "learning_rate": 0.0025101258413688385, - "loss": 0.9855, - "step": 3443 - }, - { - "epoch": 0.26490269979232367, - "learning_rate": 0.002509857856101821, - "loss": 1.0247, - "step": 3444 - }, - { - "epoch": 0.26497961695254213, - "learning_rate": 0.0025095898118681504, - "loss": 1.1187, - "step": 3445 - }, - { - "epoch": 0.26505653411276053, - "learning_rate": 0.0025093217086834788, - "loss": 1.1966, - "step": 3446 - }, - { - "epoch": 0.265133451272979, - "learning_rate": 0.0025090535465634605, - "loss": 1.6931, - "step": 3447 - }, - { - "epoch": 0.26521036843319745, - "learning_rate": 0.002508785325523754, - "loss": 1.1623, - "step": 3448 - }, - { - "epoch": 0.2652872855934159, - "learning_rate": 0.002508517045580021, - "loss": 1.3653, - "step": 3449 - }, - { - "epoch": 0.2653642027536343, - "learning_rate": 0.0025082487067479274, - "loss": 1.1628, - "step": 3450 - }, - { - "epoch": 0.2654411199138528, - "learning_rate": 0.0025079803090431404, - "loss": 1.2927, - "step": 3451 - }, - { - "epoch": 0.26551803707407123, - "learning_rate": 0.002507711852481332, - "loss": 0.9913, - "step": 3452 - }, - { - "epoch": 0.2655949542342897, - "learning_rate": 0.0025074433370781794, - "loss": 1.4476, - "step": 3453 - }, - { - "epoch": 0.2656718713945081, - "learning_rate": 0.0025071747628493605, - "loss": 0.8821, - "step": 3454 - }, - { - "epoch": 0.26574878855472656, - "learning_rate": 0.002506906129810557, - "loss": 1.3082, - "step": 3455 - }, - { - "epoch": 0.265825705714945, - "learning_rate": 0.0025066374379774554, - "loss": 1.1008, - "step": 3456 - }, - { - "epoch": 0.2659026228751635, - "learning_rate": 0.0025063686873657443, - "loss": 0.8634, - "step": 3457 - }, - { - "epoch": 0.2659795400353819, - "learning_rate": 0.0025060998779911177, - "loss": 1.2657, - "step": 3458 - }, - { - "epoch": 0.26605645719560034, - "learning_rate": 0.0025058310098692695, - "loss": 1.0036, - "step": 3459 - }, - { - "epoch": 0.2661333743558188, - "learning_rate": 0.002505562083015901, - "loss": 0.9075, - "step": 3460 - }, - { - "epoch": 0.2662102915160372, - "learning_rate": 0.0025052930974467143, - "loss": 1.2736, - "step": 3461 - }, - { - "epoch": 0.26628720867625566, - "learning_rate": 0.0025050240531774163, - "loss": 1.2193, - "step": 3462 - }, - { - "epoch": 0.2663641258364741, - "learning_rate": 0.0025047549502237158, - "loss": 1.2223, - "step": 3463 - }, - { - "epoch": 0.2664410429966926, - "learning_rate": 0.0025044857886013273, - "loss": 1.0444, - "step": 3464 - }, - { - "epoch": 0.266517960156911, - "learning_rate": 0.0025042165683259667, - "loss": 1.1946, - "step": 3465 - }, - { - "epoch": 0.26659487731712944, - "learning_rate": 0.002503947289413354, - "loss": 0.9865, - "step": 3466 - }, - { - "epoch": 0.2666717944773479, - "learning_rate": 0.0025036779518792123, - "loss": 1.3231, - "step": 3467 - }, - { - "epoch": 0.26674871163756636, - "learning_rate": 0.002503408555739269, - "loss": 1.0282, - "step": 3468 - }, - { - "epoch": 0.26682562879778476, - "learning_rate": 0.0025031391010092554, - "loss": 1.5534, - "step": 3469 - }, - { - "epoch": 0.2669025459580032, - "learning_rate": 0.0025028695877049034, - "loss": 1.2378, - "step": 3470 - }, - { - "epoch": 0.2669794631182217, - "learning_rate": 0.0025026000158419512, - "loss": 1.0397, - "step": 3471 - }, - { - "epoch": 0.26705638027844014, - "learning_rate": 0.0025023303854361393, - "loss": 1.2919, - "step": 3472 - }, - { - "epoch": 0.26713329743865855, - "learning_rate": 0.0025020606965032114, - "loss": 1.1562, - "step": 3473 - }, - { - "epoch": 0.267210214598877, - "learning_rate": 0.002501790949058915, - "loss": 1.002, - "step": 3474 - }, - { - "epoch": 0.26728713175909546, - "learning_rate": 0.002501521143119001, - "loss": 1.2503, - "step": 3475 - }, - { - "epoch": 0.2673640489193139, - "learning_rate": 0.0025012512786992246, - "loss": 0.9608, - "step": 3476 - }, - { - "epoch": 0.2674409660795323, - "learning_rate": 0.0025009813558153414, - "loss": 0.7097, - "step": 3477 - }, - { - "epoch": 0.2675178832397508, - "learning_rate": 0.0025007113744831143, - "loss": 0.8568, - "step": 3478 - }, - { - "epoch": 0.26759480039996925, - "learning_rate": 0.0025004413347183073, - "loss": 1.1077, - "step": 3479 - }, - { - "epoch": 0.2676717175601877, - "learning_rate": 0.002500171236536687, - "loss": 0.9338, - "step": 3480 - }, - { - "epoch": 0.2677486347204061, - "learning_rate": 0.0024999010799540275, - "loss": 1.3056, - "step": 3481 - }, - { - "epoch": 0.26782555188062457, - "learning_rate": 0.0024996308649861006, - "loss": 1.1272, - "step": 3482 - }, - { - "epoch": 0.267902469040843, - "learning_rate": 0.002499360591648686, - "loss": 0.8716, - "step": 3483 - }, - { - "epoch": 0.26797938620106143, - "learning_rate": 0.0024990902599575653, - "loss": 0.7946, - "step": 3484 - }, - { - "epoch": 0.2680563033612799, - "learning_rate": 0.002498819869928523, - "loss": 1.2492, - "step": 3485 - }, - { - "epoch": 0.26813322052149835, - "learning_rate": 0.0024985494215773473, - "loss": 1.1077, - "step": 3486 - }, - { - "epoch": 0.2682101376817168, - "learning_rate": 0.00249827891491983, - "loss": 1.0933, - "step": 3487 - }, - { - "epoch": 0.2682870548419352, - "learning_rate": 0.002498008349971767, - "loss": 1.0039, - "step": 3488 - }, - { - "epoch": 0.2683639720021537, - "learning_rate": 0.0024977377267489565, - "loss": 0.8665, - "step": 3489 - }, - { - "epoch": 0.26844088916237213, - "learning_rate": 0.0024974670452671995, - "loss": 1.0237, - "step": 3490 - }, - { - "epoch": 0.2685178063225906, - "learning_rate": 0.0024971963055423025, - "loss": 1.3132, - "step": 3491 - }, - { - "epoch": 0.268594723482809, - "learning_rate": 0.0024969255075900742, - "loss": 0.9821, - "step": 3492 - }, - { - "epoch": 0.26867164064302745, - "learning_rate": 0.0024966546514263266, - "loss": 1.2235, - "step": 3493 - }, - { - "epoch": 0.2687485578032459, - "learning_rate": 0.002496383737066875, - "loss": 0.8197, - "step": 3494 - }, - { - "epoch": 0.2688254749634644, - "learning_rate": 0.0024961127645275385, - "loss": 1.2345, - "step": 3495 - }, - { - "epoch": 0.2689023921236828, - "learning_rate": 0.0024958417338241403, - "loss": 1.2893, - "step": 3496 - }, - { - "epoch": 0.26897930928390124, - "learning_rate": 0.0024955706449725046, - "loss": 0.9128, - "step": 3497 - }, - { - "epoch": 0.2690562264441197, - "learning_rate": 0.0024952994979884615, - "loss": 1.0012, - "step": 3498 - }, - { - "epoch": 0.26913314360433815, - "learning_rate": 0.0024950282928878436, - "loss": 0.927, - "step": 3499 - }, - { - "epoch": 0.26921006076455656, - "learning_rate": 0.0024947570296864866, - "loss": 1.5774, - "step": 3500 - }, - { - "epoch": 0.269286977924775, - "learning_rate": 0.0024944857084002302, - "loss": 1.029, - "step": 3501 - }, - { - "epoch": 0.2693638950849935, - "learning_rate": 0.0024942143290449166, - "loss": 0.9192, - "step": 3502 - }, - { - "epoch": 0.2694408122452119, - "learning_rate": 0.002493942891636392, - "loss": 0.9637, - "step": 3503 - }, - { - "epoch": 0.26951772940543034, - "learning_rate": 0.002493671396190506, - "loss": 1.0466, - "step": 3504 - }, - { - "epoch": 0.2695946465656488, - "learning_rate": 0.002493399842723112, - "loss": 1.235, - "step": 3505 - }, - { - "epoch": 0.26967156372586726, - "learning_rate": 0.002493128231250066, - "loss": 1.0379, - "step": 3506 - }, - { - "epoch": 0.26974848088608566, - "learning_rate": 0.0024928565617872273, - "loss": 0.975, - "step": 3507 - }, - { - "epoch": 0.2698253980463041, - "learning_rate": 0.0024925848343504593, - "loss": 0.7817, - "step": 3508 - }, - { - "epoch": 0.2699023152065226, - "learning_rate": 0.002492313048955628, - "loss": 1.0762, - "step": 3509 - }, - { - "epoch": 0.26997923236674104, - "learning_rate": 0.002492041205618604, - "loss": 1.2626, - "step": 3510 - }, - { - "epoch": 0.27005614952695944, - "learning_rate": 0.00249176930435526, - "loss": 1.6238, - "step": 3511 - }, - { - "epoch": 0.2701330666871779, - "learning_rate": 0.002491497345181473, - "loss": 1.3942, - "step": 3512 - }, - { - "epoch": 0.27020998384739636, - "learning_rate": 0.0024912253281131227, - "loss": 0.9294, - "step": 3513 - }, - { - "epoch": 0.2702869010076148, - "learning_rate": 0.0024909532531660924, - "loss": 1.2417, - "step": 3514 - }, - { - "epoch": 0.2703638181678332, - "learning_rate": 0.0024906811203562686, - "loss": 0.9573, - "step": 3515 - }, - { - "epoch": 0.2704407353280517, - "learning_rate": 0.0024904089296995417, - "loss": 1.312, - "step": 3516 - }, - { - "epoch": 0.27051765248827014, - "learning_rate": 0.0024901366812118057, - "loss": 1.0174, - "step": 3517 - }, - { - "epoch": 0.2705945696484886, - "learning_rate": 0.0024898643749089564, - "loss": 1.3006, - "step": 3518 - }, - { - "epoch": 0.270671486808707, - "learning_rate": 0.0024895920108068954, - "loss": 1.2633, - "step": 3519 - }, - { - "epoch": 0.27074840396892547, - "learning_rate": 0.0024893195889215246, - "loss": 1.3204, - "step": 3520 - }, - { - "epoch": 0.2708253211291439, - "learning_rate": 0.0024890471092687523, - "loss": 0.9997, - "step": 3521 - }, - { - "epoch": 0.27090223828936233, - "learning_rate": 0.0024887745718644894, - "loss": 1.1393, - "step": 3522 - }, - { - "epoch": 0.2709791554495808, - "learning_rate": 0.002488501976724648, - "loss": 1.2127, - "step": 3523 - }, - { - "epoch": 0.27105607260979925, - "learning_rate": 0.0024882293238651454, - "loss": 1.2812, - "step": 3524 - }, - { - "epoch": 0.2711329897700177, - "learning_rate": 0.0024879566133019036, - "loss": 1.0829, - "step": 3525 - }, - { - "epoch": 0.2712099069302361, - "learning_rate": 0.002487683845050845, - "loss": 1.2449, - "step": 3526 - }, - { - "epoch": 0.27128682409045457, - "learning_rate": 0.0024874110191278978, - "loss": 1.3035, - "step": 3527 - }, - { - "epoch": 0.27136374125067303, - "learning_rate": 0.0024871381355489916, - "loss": 0.972, - "step": 3528 - }, - { - "epoch": 0.2714406584108915, - "learning_rate": 0.002486865194330061, - "loss": 0.9873, - "step": 3529 - }, - { - "epoch": 0.2715175755711099, - "learning_rate": 0.0024865921954870442, - "loss": 1.1617, - "step": 3530 - }, - { - "epoch": 0.27159449273132835, - "learning_rate": 0.0024863191390358795, - "loss": 0.7969, - "step": 3531 - }, - { - "epoch": 0.2716714098915468, - "learning_rate": 0.0024860460249925133, - "loss": 1.3353, - "step": 3532 - }, - { - "epoch": 0.27174832705176527, - "learning_rate": 0.0024857728533728917, - "loss": 0.9596, - "step": 3533 - }, - { - "epoch": 0.2718252442119837, - "learning_rate": 0.002485499624192966, - "loss": 1.1102, - "step": 3534 - }, - { - "epoch": 0.27190216137220213, - "learning_rate": 0.0024852263374686902, - "loss": 1.0854, - "step": 3535 - }, - { - "epoch": 0.2719790785324206, - "learning_rate": 0.0024849529932160215, - "loss": 1.0259, - "step": 3536 - }, - { - "epoch": 0.27205599569263905, - "learning_rate": 0.002484679591450921, - "loss": 1.3463, - "step": 3537 - }, - { - "epoch": 0.27213291285285746, - "learning_rate": 0.002484406132189353, - "loss": 1.1481, - "step": 3538 - }, - { - "epoch": 0.2722098300130759, - "learning_rate": 0.002484132615447285, - "loss": 0.9511, - "step": 3539 - }, - { - "epoch": 0.2722867471732944, - "learning_rate": 0.002483859041240688, - "loss": 1.3642, - "step": 3540 - }, - { - "epoch": 0.27236366433351283, - "learning_rate": 0.0024835854095855365, - "loss": 1.2416, - "step": 3541 - }, - { - "epoch": 0.27244058149373124, - "learning_rate": 0.002483311720497808, - "loss": 0.8573, - "step": 3542 - }, - { - "epoch": 0.2725174986539497, - "learning_rate": 0.0024830379739934827, - "loss": 1.0931, - "step": 3543 - }, - { - "epoch": 0.27259441581416816, - "learning_rate": 0.002482764170088546, - "loss": 1.2971, - "step": 3544 - }, - { - "epoch": 0.27267133297438656, - "learning_rate": 0.0024824903087989855, - "loss": 1.4864, - "step": 3545 - }, - { - "epoch": 0.272748250134605, - "learning_rate": 0.0024822163901407914, - "loss": 1.2757, - "step": 3546 - }, - { - "epoch": 0.2728251672948235, - "learning_rate": 0.0024819424141299586, - "loss": 1.1284, - "step": 3547 - }, - { - "epoch": 0.27290208445504194, - "learning_rate": 0.002481668380782485, - "loss": 1.2827, - "step": 3548 - }, - { - "epoch": 0.27297900161526034, - "learning_rate": 0.0024813942901143722, - "loss": 1.1603, - "step": 3549 - }, - { - "epoch": 0.2730559187754788, - "learning_rate": 0.002481120142141623, - "loss": 1.0989, - "step": 3550 - }, - { - "epoch": 0.27313283593569726, - "learning_rate": 0.0024808459368802467, - "loss": 1.1768, - "step": 3551 - }, - { - "epoch": 0.2732097530959157, - "learning_rate": 0.002480571674346254, - "loss": 1.3563, - "step": 3552 - }, - { - "epoch": 0.2732866702561341, - "learning_rate": 0.00248029735455566, - "loss": 1.5579, - "step": 3553 - }, - { - "epoch": 0.2733635874163526, - "learning_rate": 0.0024800229775244803, - "loss": 1.052, - "step": 3554 - }, - { - "epoch": 0.27344050457657104, - "learning_rate": 0.0024797485432687385, - "loss": 0.9302, - "step": 3555 - }, - { - "epoch": 0.2735174217367895, - "learning_rate": 0.002479474051804458, - "loss": 1.2097, - "step": 3556 - }, - { - "epoch": 0.2735943388970079, - "learning_rate": 0.002479199503147667, - "loss": 0.9372, - "step": 3557 - }, - { - "epoch": 0.27367125605722636, - "learning_rate": 0.0024789248973143964, - "loss": 0.8427, - "step": 3558 - }, - { - "epoch": 0.2737481732174448, - "learning_rate": 0.0024786502343206807, - "loss": 1.0251, - "step": 3559 - }, - { - "epoch": 0.2738250903776633, - "learning_rate": 0.0024783755141825578, - "loss": 1.4592, - "step": 3560 - }, - { - "epoch": 0.2739020075378817, - "learning_rate": 0.0024781007369160694, - "loss": 1.468, - "step": 3561 - }, - { - "epoch": 0.27397892469810015, - "learning_rate": 0.0024778259025372592, - "loss": 0.9707, - "step": 3562 - }, - { - "epoch": 0.2740558418583186, - "learning_rate": 0.002477551011062176, - "loss": 1.1006, - "step": 3563 - }, - { - "epoch": 0.274132759018537, - "learning_rate": 0.0024772760625068704, - "loss": 1.2722, - "step": 3564 - }, - { - "epoch": 0.27420967617875547, - "learning_rate": 0.0024770010568873966, - "loss": 1.1913, - "step": 3565 - }, - { - "epoch": 0.2742865933389739, - "learning_rate": 0.002476725994219813, - "loss": 1.4452, - "step": 3566 - }, - { - "epoch": 0.2743635104991924, - "learning_rate": 0.002476450874520181, - "loss": 1.3885, - "step": 3567 - }, - { - "epoch": 0.2744404276594108, - "learning_rate": 0.0024761756978045643, - "loss": 0.9229, - "step": 3568 - }, - { - "epoch": 0.27451734481962925, - "learning_rate": 0.0024759004640890316, - "loss": 1.1716, - "step": 3569 - }, - { - "epoch": 0.2745942619798477, - "learning_rate": 0.0024756251733896537, - "loss": 1.1123, - "step": 3570 - }, - { - "epoch": 0.27467117914006617, - "learning_rate": 0.002475349825722505, - "loss": 1.6297, - "step": 3571 - }, - { - "epoch": 0.27474809630028457, - "learning_rate": 0.002475074421103664, - "loss": 0.9711, - "step": 3572 - }, - { - "epoch": 0.27482501346050303, - "learning_rate": 0.0024747989595492108, - "loss": 1.18, - "step": 3573 - }, - { - "epoch": 0.2749019306207215, - "learning_rate": 0.0024745234410752303, - "loss": 1.1942, - "step": 3574 - }, - { - "epoch": 0.27497884778093995, - "learning_rate": 0.0024742478656978105, - "loss": 1.2194, - "step": 3575 - }, - { - "epoch": 0.27505576494115835, - "learning_rate": 0.0024739722334330434, - "loss": 0.9593, - "step": 3576 - }, - { - "epoch": 0.2751326821013768, - "learning_rate": 0.0024736965442970216, - "loss": 1.352, - "step": 3577 - }, - { - "epoch": 0.2752095992615953, - "learning_rate": 0.0024734207983058442, - "loss": 1.0481, - "step": 3578 - }, - { - "epoch": 0.27528651642181373, - "learning_rate": 0.0024731449954756117, - "loss": 1.1783, - "step": 3579 - }, - { - "epoch": 0.27536343358203214, - "learning_rate": 0.002472869135822429, - "loss": 1.1786, - "step": 3580 - }, - { - "epoch": 0.2754403507422506, - "learning_rate": 0.0024725932193624036, - "loss": 0.9528, - "step": 3581 - }, - { - "epoch": 0.27551726790246905, - "learning_rate": 0.002472317246111646, - "loss": 1.1879, - "step": 3582 - }, - { - "epoch": 0.27559418506268746, - "learning_rate": 0.0024720412160862716, - "loss": 0.9679, - "step": 3583 - }, - { - "epoch": 0.2756711022229059, - "learning_rate": 0.0024717651293023977, - "loss": 1.1578, - "step": 3584 - }, - { - "epoch": 0.2757480193831244, - "learning_rate": 0.0024714889857761443, - "loss": 1.125, - "step": 3585 - }, - { - "epoch": 0.27582493654334284, - "learning_rate": 0.002471212785523637, - "loss": 1.2239, - "step": 3586 - }, - { - "epoch": 0.27590185370356124, - "learning_rate": 0.002470936528561003, - "loss": 1.25, - "step": 3587 - }, - { - "epoch": 0.2759787708637797, - "learning_rate": 0.0024706602149043733, - "loss": 1.1979, - "step": 3588 - }, - { - "epoch": 0.27605568802399816, - "learning_rate": 0.002470383844569882, - "loss": 1.1033, - "step": 3589 - }, - { - "epoch": 0.2761326051842166, - "learning_rate": 0.0024701074175736666, - "loss": 1.0717, - "step": 3590 - }, - { - "epoch": 0.276209522344435, - "learning_rate": 0.0024698309339318686, - "loss": 1.2374, - "step": 3591 - }, - { - "epoch": 0.2762864395046535, - "learning_rate": 0.0024695543936606307, - "loss": 1.2066, - "step": 3592 - }, - { - "epoch": 0.27636335666487194, - "learning_rate": 0.002469277796776102, - "loss": 1.4984, - "step": 3593 - }, - { - "epoch": 0.2764402738250904, - "learning_rate": 0.0024690011432944325, - "loss": 1.2349, - "step": 3594 - }, - { - "epoch": 0.2765171909853088, - "learning_rate": 0.0024687244332317762, - "loss": 1.0037, - "step": 3595 - }, - { - "epoch": 0.27659410814552726, - "learning_rate": 0.0024684476666042908, - "loss": 1.0457, - "step": 3596 - }, - { - "epoch": 0.2766710253057457, - "learning_rate": 0.002468170843428137, - "loss": 0.9947, - "step": 3597 - }, - { - "epoch": 0.2767479424659642, - "learning_rate": 0.002467893963719478, - "loss": 1.3606, - "step": 3598 - }, - { - "epoch": 0.2768248596261826, - "learning_rate": 0.0024676170274944825, - "loss": 1.1684, - "step": 3599 - }, - { - "epoch": 0.27690177678640104, - "learning_rate": 0.00246734003476932, - "loss": 1.1067, - "step": 3600 - }, - { - "epoch": 0.2769786939466195, - "learning_rate": 0.0024670629855601646, - "loss": 1.1872, - "step": 3601 - }, - { - "epoch": 0.27705561110683796, - "learning_rate": 0.0024667858798831936, - "loss": 0.9808, - "step": 3602 - }, - { - "epoch": 0.27713252826705637, - "learning_rate": 0.002466508717754588, - "loss": 1.1883, - "step": 3603 - }, - { - "epoch": 0.2772094454272748, - "learning_rate": 0.0024662314991905306, - "loss": 1.243, - "step": 3604 - }, - { - "epoch": 0.2772863625874933, - "learning_rate": 0.0024659542242072095, - "loss": 1.1596, - "step": 3605 - }, - { - "epoch": 0.2773632797477117, - "learning_rate": 0.0024656768928208142, - "loss": 1.1401, - "step": 3606 - }, - { - "epoch": 0.27744019690793015, - "learning_rate": 0.0024653995050475386, - "loss": 1.2892, - "step": 3607 - }, - { - "epoch": 0.2775171140681486, - "learning_rate": 0.00246512206090358, - "loss": 0.7485, - "step": 3608 - }, - { - "epoch": 0.27759403122836707, - "learning_rate": 0.002464844560405138, - "loss": 0.9778, - "step": 3609 - }, - { - "epoch": 0.27767094838858547, - "learning_rate": 0.0024645670035684172, - "loss": 1.1943, - "step": 3610 - }, - { - "epoch": 0.27774786554880393, - "learning_rate": 0.0024642893904096236, - "loss": 0.7985, - "step": 3611 - }, - { - "epoch": 0.2778247827090224, - "learning_rate": 0.0024640117209449677, - "loss": 0.995, - "step": 3612 - }, - { - "epoch": 0.27790169986924085, - "learning_rate": 0.002463733995190662, - "loss": 1.1795, - "step": 3613 - }, - { - "epoch": 0.27797861702945925, - "learning_rate": 0.002463456213162925, - "loss": 1.114, - "step": 3614 - }, - { - "epoch": 0.2780555341896777, - "learning_rate": 0.002463178374877975, - "loss": 1.2579, - "step": 3615 - }, - { - "epoch": 0.27813245134989617, - "learning_rate": 0.002462900480352036, - "loss": 1.1171, - "step": 3616 - }, - { - "epoch": 0.27820936851011463, - "learning_rate": 0.0024626225296013344, - "loss": 0.9647, - "step": 3617 - }, - { - "epoch": 0.27828628567033303, - "learning_rate": 0.0024623445226421006, - "loss": 1.0202, - "step": 3618 - }, - { - "epoch": 0.2783632028305515, - "learning_rate": 0.002462066459490566, - "loss": 1.4352, - "step": 3619 - }, - { - "epoch": 0.27844011999076995, - "learning_rate": 0.0024617883401629693, - "loss": 1.032, - "step": 3620 - }, - { - "epoch": 0.2785170371509884, - "learning_rate": 0.0024615101646755485, - "loss": 1.2368, - "step": 3621 - }, - { - "epoch": 0.2785939543112068, - "learning_rate": 0.0024612319330445474, - "loss": 0.7482, - "step": 3622 - }, - { - "epoch": 0.2786708714714253, - "learning_rate": 0.002460953645286212, - "loss": 0.9346, - "step": 3623 - }, - { - "epoch": 0.27874778863164373, - "learning_rate": 0.0024606753014167915, - "loss": 0.919, - "step": 3624 - }, - { - "epoch": 0.27882470579186214, - "learning_rate": 0.002460396901452539, - "loss": 1.1329, - "step": 3625 - }, - { - "epoch": 0.2789016229520806, - "learning_rate": 0.0024601184454097107, - "loss": 1.5333, - "step": 3626 - }, - { - "epoch": 0.27897854011229906, - "learning_rate": 0.0024598399333045653, - "loss": 1.2765, - "step": 3627 - }, - { - "epoch": 0.2790554572725175, - "learning_rate": 0.002459561365153366, - "loss": 1.0334, - "step": 3628 - }, - { - "epoch": 0.2791323744327359, - "learning_rate": 0.002459282740972379, - "loss": 1.2858, - "step": 3629 - }, - { - "epoch": 0.2792092915929544, - "learning_rate": 0.002459004060777873, - "loss": 1.2152, - "step": 3630 - }, - { - "epoch": 0.27928620875317284, - "learning_rate": 0.0024587253245861207, - "loss": 1.3838, - "step": 3631 - }, - { - "epoch": 0.2793631259133913, - "learning_rate": 0.0024584465324133968, - "loss": 0.9945, - "step": 3632 - }, - { - "epoch": 0.2794400430736097, - "learning_rate": 0.002458167684275981, - "loss": 1.118, - "step": 3633 - }, - { - "epoch": 0.27951696023382816, - "learning_rate": 0.002457888780190156, - "loss": 1.2525, - "step": 3634 - }, - { - "epoch": 0.2795938773940466, - "learning_rate": 0.0024576098201722065, - "loss": 1.3401, - "step": 3635 - }, - { - "epoch": 0.2796707945542651, - "learning_rate": 0.0024573308042384217, - "loss": 1.0822, - "step": 3636 - }, - { - "epoch": 0.2797477117144835, - "learning_rate": 0.002457051732405094, - "loss": 1.2132, - "step": 3637 - }, - { - "epoch": 0.27982462887470194, - "learning_rate": 0.002456772604688518, - "loss": 1.1069, - "step": 3638 - }, - { - "epoch": 0.2799015460349204, - "learning_rate": 0.0024564934211049917, - "loss": 0.8862, - "step": 3639 - }, - { - "epoch": 0.27997846319513886, - "learning_rate": 0.0024562141816708185, - "loss": 1.2245, - "step": 3640 - }, - { - "epoch": 0.28005538035535726, - "learning_rate": 0.002455934886402303, - "loss": 1.2619, - "step": 3641 - }, - { - "epoch": 0.2801322975155757, - "learning_rate": 0.0024556555353157524, - "loss": 1.1116, - "step": 3642 - }, - { - "epoch": 0.2802092146757942, - "learning_rate": 0.0024553761284274796, - "loss": 1.1542, - "step": 3643 - }, - { - "epoch": 0.28028613183601264, - "learning_rate": 0.002455096665753799, - "loss": 1.2643, - "step": 3644 - }, - { - "epoch": 0.28036304899623105, - "learning_rate": 0.0024548171473110286, - "loss": 1.1555, - "step": 3645 - }, - { - "epoch": 0.2804399661564495, - "learning_rate": 0.0024545375731154897, - "loss": 0.8542, - "step": 3646 - }, - { - "epoch": 0.28051688331666796, - "learning_rate": 0.002454257943183507, - "loss": 2.0818, - "step": 3647 - }, - { - "epoch": 0.28059380047688637, - "learning_rate": 0.0024539782575314093, - "loss": 1.2558, - "step": 3648 - }, - { - "epoch": 0.2806707176371048, - "learning_rate": 0.002453698516175526, - "loss": 0.8071, - "step": 3649 - }, - { - "epoch": 0.2807476347973233, - "learning_rate": 0.0024534187191321928, - "loss": 1.1587, - "step": 3650 - }, - { - "epoch": 0.28082455195754175, - "learning_rate": 0.002453138866417747, - "loss": 1.2945, - "step": 3651 - }, - { - "epoch": 0.28090146911776015, - "learning_rate": 0.00245285895804853, - "loss": 1.2694, - "step": 3652 - }, - { - "epoch": 0.2809783862779786, - "learning_rate": 0.0024525789940408843, - "loss": 0.9074, - "step": 3653 - }, - { - "epoch": 0.28105530343819707, - "learning_rate": 0.0024522989744111593, - "loss": 1.0224, - "step": 3654 - }, - { - "epoch": 0.2811322205984155, - "learning_rate": 0.002452018899175704, - "loss": 1.1235, - "step": 3655 - }, - { - "epoch": 0.28120913775863393, - "learning_rate": 0.0024517387683508736, - "loss": 1.1201, - "step": 3656 - }, - { - "epoch": 0.2812860549188524, - "learning_rate": 0.002451458581953024, - "loss": 1.1645, - "step": 3657 - }, - { - "epoch": 0.28136297207907085, - "learning_rate": 0.002451178339998517, - "loss": 1.1267, - "step": 3658 - }, - { - "epoch": 0.2814398892392893, - "learning_rate": 0.002450898042503715, - "loss": 1.298, - "step": 3659 - }, - { - "epoch": 0.2815168063995077, - "learning_rate": 0.002450617689484985, - "loss": 1.2631, - "step": 3660 - }, - { - "epoch": 0.2815937235597262, - "learning_rate": 0.0024503372809586976, - "loss": 1.2871, - "step": 3661 - }, - { - "epoch": 0.28167064071994463, - "learning_rate": 0.002450056816941226, - "loss": 1.3321, - "step": 3662 - }, - { - "epoch": 0.2817475578801631, - "learning_rate": 0.002449776297448947, - "loss": 0.9775, - "step": 3663 - }, - { - "epoch": 0.2818244750403815, - "learning_rate": 0.00244949572249824, - "loss": 1.445, - "step": 3664 - }, - { - "epoch": 0.28190139220059995, - "learning_rate": 0.002449215092105488, - "loss": 0.9526, - "step": 3665 - }, - { - "epoch": 0.2819783093608184, - "learning_rate": 0.002448934406287078, - "loss": 1.1173, - "step": 3666 - }, - { - "epoch": 0.2820552265210368, - "learning_rate": 0.0024486536650593985, - "loss": 1.3006, - "step": 3667 - }, - { - "epoch": 0.2821321436812553, - "learning_rate": 0.0024483728684388433, - "loss": 0.9413, - "step": 3668 - }, - { - "epoch": 0.28220906084147374, - "learning_rate": 0.002448092016441808, - "loss": 1.1158, - "step": 3669 - }, - { - "epoch": 0.2822859780016922, - "learning_rate": 0.002447811109084691, - "loss": 1.1066, - "step": 3670 - }, - { - "epoch": 0.2823628951619106, - "learning_rate": 0.002447530146383896, - "loss": 1.2595, - "step": 3671 - }, - { - "epoch": 0.28243981232212906, - "learning_rate": 0.0024472491283558284, - "loss": 0.7838, - "step": 3672 - }, - { - "epoch": 0.2825167294823475, - "learning_rate": 0.0024469680550168967, - "loss": 1.1258, - "step": 3673 - }, - { - "epoch": 0.282593646642566, - "learning_rate": 0.002446686926383514, - "loss": 0.934, - "step": 3674 - }, - { - "epoch": 0.2826705638027844, - "learning_rate": 0.002446405742472095, - "loss": 1.1691, - "step": 3675 - }, - { - "epoch": 0.28274748096300284, - "learning_rate": 0.0024461245032990574, - "loss": 1.3394, - "step": 3676 - }, - { - "epoch": 0.2828243981232213, - "learning_rate": 0.002445843208880825, - "loss": 1.3558, - "step": 3677 - }, - { - "epoch": 0.28290131528343976, - "learning_rate": 0.0024455618592338214, - "loss": 1.2262, - "step": 3678 - }, - { - "epoch": 0.28297823244365816, - "learning_rate": 0.0024452804543744757, - "loss": 1.4341, - "step": 3679 - }, - { - "epoch": 0.2830551496038766, - "learning_rate": 0.002444998994319219, - "loss": 1.3195, - "step": 3680 - }, - { - "epoch": 0.2831320667640951, - "learning_rate": 0.002444717479084486, - "loss": 1.0892, - "step": 3681 - }, - { - "epoch": 0.28320898392431354, - "learning_rate": 0.002444435908686715, - "loss": 1.1523, - "step": 3682 - }, - { - "epoch": 0.28328590108453194, - "learning_rate": 0.0024441542831423473, - "loss": 0.8263, - "step": 3683 - }, - { - "epoch": 0.2833628182447504, - "learning_rate": 0.002443872602467826, - "loss": 1.1053, - "step": 3684 - }, - { - "epoch": 0.28343973540496886, - "learning_rate": 0.0024435908666796007, - "loss": 0.9128, - "step": 3685 - }, - { - "epoch": 0.28351665256518727, - "learning_rate": 0.0024433090757941217, - "loss": 1.2603, - "step": 3686 - }, - { - "epoch": 0.2835935697254057, - "learning_rate": 0.002443027229827842, - "loss": 1.3255, - "step": 3687 - }, - { - "epoch": 0.2836704868856242, - "learning_rate": 0.002442745328797219, - "loss": 0.8551, - "step": 3688 - }, - { - "epoch": 0.28374740404584264, - "learning_rate": 0.002442463372718715, - "loss": 1.2226, - "step": 3689 - }, - { - "epoch": 0.28382432120606105, - "learning_rate": 0.002442181361608792, - "loss": 1.0914, - "step": 3690 - }, - { - "epoch": 0.2839012383662795, - "learning_rate": 0.002441899295483917, - "loss": 1.2384, - "step": 3691 - }, - { - "epoch": 0.28397815552649797, - "learning_rate": 0.002441617174360561, - "loss": 1.3121, - "step": 3692 - }, - { - "epoch": 0.2840550726867164, - "learning_rate": 0.002441334998255196, - "loss": 1.0933, - "step": 3693 - }, - { - "epoch": 0.28413198984693483, - "learning_rate": 0.0024410527671843004, - "loss": 0.8185, - "step": 3694 - }, - { - "epoch": 0.2842089070071533, - "learning_rate": 0.0024407704811643524, - "loss": 0.9503, - "step": 3695 - }, - { - "epoch": 0.28428582416737175, - "learning_rate": 0.0024404881402118354, - "loss": 1.2215, - "step": 3696 - }, - { - "epoch": 0.2843627413275902, - "learning_rate": 0.0024402057443432363, - "loss": 0.7783, - "step": 3697 - }, - { - "epoch": 0.2844396584878086, - "learning_rate": 0.002439923293575044, - "loss": 0.9716, - "step": 3698 - }, - { - "epoch": 0.28451657564802707, - "learning_rate": 0.0024396407879237504, - "loss": 1.3816, - "step": 3699 - }, - { - "epoch": 0.28459349280824553, - "learning_rate": 0.002439358227405852, - "loss": 0.9018, - "step": 3700 - }, - { - "epoch": 0.284670409968464, - "learning_rate": 0.002439075612037848, - "loss": 0.8355, - "step": 3701 - }, - { - "epoch": 0.2847473271286824, - "learning_rate": 0.0024387929418362405, - "loss": 1.3953, - "step": 3702 - }, - { - "epoch": 0.28482424428890085, - "learning_rate": 0.0024385102168175345, - "loss": 1.1761, - "step": 3703 - }, - { - "epoch": 0.2849011614491193, - "learning_rate": 0.0024382274369982387, - "loss": 1.2913, - "step": 3704 - }, - { - "epoch": 0.28497807860933777, - "learning_rate": 0.0024379446023948654, - "loss": 0.9577, - "step": 3705 - }, - { - "epoch": 0.2850549957695562, - "learning_rate": 0.0024376617130239292, - "loss": 1.095, - "step": 3706 - }, - { - "epoch": 0.28513191292977463, - "learning_rate": 0.002437378768901948, - "loss": 1.3237, - "step": 3707 - }, - { - "epoch": 0.2852088300899931, - "learning_rate": 0.0024370957700454443, - "loss": 1.1775, - "step": 3708 - }, - { - "epoch": 0.2852857472502115, - "learning_rate": 0.002436812716470941, - "loss": 1.2467, - "step": 3709 - }, - { - "epoch": 0.28536266441042996, - "learning_rate": 0.0024365296081949677, - "loss": 0.9706, - "step": 3710 - }, - { - "epoch": 0.2854395815706484, - "learning_rate": 0.0024362464452340545, - "loss": 1.258, - "step": 3711 - }, - { - "epoch": 0.2855164987308669, - "learning_rate": 0.002435963227604735, - "loss": 1.1666, - "step": 3712 - }, - { - "epoch": 0.2855934158910853, - "learning_rate": 0.0024356799553235478, - "loss": 0.7968, - "step": 3713 - }, - { - "epoch": 0.28567033305130374, - "learning_rate": 0.0024353966284070324, - "loss": 1.0997, - "step": 3714 - }, - { - "epoch": 0.2857472502115222, - "learning_rate": 0.002435113246871733, - "loss": 1.0189, - "step": 3715 - }, - { - "epoch": 0.28582416737174066, - "learning_rate": 0.0024348298107341967, - "loss": 1.1352, - "step": 3716 - }, - { - "epoch": 0.28590108453195906, - "learning_rate": 0.002434546320010974, - "loss": 0.9592, - "step": 3717 - }, - { - "epoch": 0.2859780016921775, - "learning_rate": 0.002434262774718618, - "loss": 1.073, - "step": 3718 - }, - { - "epoch": 0.286054918852396, - "learning_rate": 0.002433979174873684, - "loss": 1.1065, - "step": 3719 - }, - { - "epoch": 0.28613183601261444, - "learning_rate": 0.0024336955204927323, - "loss": 1.1247, - "step": 3720 - }, - { - "epoch": 0.28620875317283284, - "learning_rate": 0.0024334118115923267, - "loss": 1.1813, - "step": 3721 - }, - { - "epoch": 0.2862856703330513, - "learning_rate": 0.0024331280481890327, - "loss": 1.3859, - "step": 3722 - }, - { - "epoch": 0.28636258749326976, - "learning_rate": 0.0024328442302994193, - "loss": 1.3367, - "step": 3723 - }, - { - "epoch": 0.2864395046534882, - "learning_rate": 0.0024325603579400594, - "loss": 1.3159, - "step": 3724 - }, - { - "epoch": 0.2865164218137066, - "learning_rate": 0.002432276431127528, - "loss": 1.1745, - "step": 3725 - }, - { - "epoch": 0.2865933389739251, - "learning_rate": 0.002431992449878404, - "loss": 0.9767, - "step": 3726 - }, - { - "epoch": 0.28667025613414354, - "learning_rate": 0.00243170841420927, - "loss": 1.24, - "step": 3727 - }, - { - "epoch": 0.28674717329436195, - "learning_rate": 0.0024314243241367107, - "loss": 0.8242, - "step": 3728 - }, - { - "epoch": 0.2868240904545804, - "learning_rate": 0.002431140179677314, - "loss": 0.8692, - "step": 3729 - }, - { - "epoch": 0.28690100761479886, - "learning_rate": 0.002430855980847672, - "loss": 1.1353, - "step": 3730 - }, - { - "epoch": 0.2869779247750173, - "learning_rate": 0.002430571727664379, - "loss": 1.2912, - "step": 3731 - }, - { - "epoch": 0.2870548419352357, - "learning_rate": 0.0024302874201440334, - "loss": 1.0579, - "step": 3732 - }, - { - "epoch": 0.2871317590954542, - "learning_rate": 0.0024300030583032355, - "loss": 1.1849, - "step": 3733 - }, - { - "epoch": 0.28720867625567265, - "learning_rate": 0.0024297186421585897, - "loss": 1.1034, - "step": 3734 - }, - { - "epoch": 0.2872855934158911, - "learning_rate": 0.002429434171726704, - "loss": 0.9201, - "step": 3735 - }, - { - "epoch": 0.2873625105761095, - "learning_rate": 0.002429149647024188, - "loss": 1.1395, - "step": 3736 - }, - { - "epoch": 0.28743942773632797, - "learning_rate": 0.0024288650680676558, - "loss": 1.0924, - "step": 3737 - }, - { - "epoch": 0.2875163448965464, - "learning_rate": 0.0024285804348737243, - "loss": 1.451, - "step": 3738 - }, - { - "epoch": 0.2875932620567649, - "learning_rate": 0.002428295747459013, - "loss": 0.8121, - "step": 3739 - }, - { - "epoch": 0.2876701792169833, - "learning_rate": 0.0024280110058401464, - "loss": 0.8656, - "step": 3740 - }, - { - "epoch": 0.28774709637720175, - "learning_rate": 0.0024277262100337493, - "loss": 0.8274, - "step": 3741 - }, - { - "epoch": 0.2878240135374202, - "learning_rate": 0.0024274413600564523, - "loss": 1.1982, - "step": 3742 - }, - { - "epoch": 0.28790093069763867, - "learning_rate": 0.0024271564559248874, - "loss": 1.1877, - "step": 3743 - }, - { - "epoch": 0.28797784785785707, - "learning_rate": 0.0024268714976556915, - "loss": 0.7434, - "step": 3744 - }, - { - "epoch": 0.28805476501807553, - "learning_rate": 0.0024265864852655024, - "loss": 1.0574, - "step": 3745 - }, - { - "epoch": 0.288131682178294, - "learning_rate": 0.002426301418770963, - "loss": 1.2975, - "step": 3746 - }, - { - "epoch": 0.2882085993385124, - "learning_rate": 0.002426016298188718, - "loss": 1.3393, - "step": 3747 - }, - { - "epoch": 0.28828551649873085, - "learning_rate": 0.0024257311235354163, - "loss": 1.0958, - "step": 3748 - }, - { - "epoch": 0.2883624336589493, - "learning_rate": 0.00242544589482771, - "loss": 0.8638, - "step": 3749 - }, - { - "epoch": 0.2884393508191678, - "learning_rate": 0.0024251606120822525, - "loss": 1.2106, - "step": 3750 - }, - { - "epoch": 0.2885162679793862, - "learning_rate": 0.0024248752753157038, - "loss": 1.0024, - "step": 3751 - }, - { - "epoch": 0.28859318513960464, - "learning_rate": 0.0024245898845447235, - "loss": 0.8348, - "step": 3752 - }, - { - "epoch": 0.2886701022998231, - "learning_rate": 0.002424304439785976, - "loss": 1.1999, - "step": 3753 - }, - { - "epoch": 0.28874701946004155, - "learning_rate": 0.0024240189410561293, - "loss": 1.11, - "step": 3754 - }, - { - "epoch": 0.28882393662025996, - "learning_rate": 0.0024237333883718533, - "loss": 1.1857, - "step": 3755 - }, - { - "epoch": 0.2889008537804784, - "learning_rate": 0.002423447781749822, - "loss": 1.0494, - "step": 3756 - }, - { - "epoch": 0.2889777709406969, - "learning_rate": 0.002423162121206712, - "loss": 1.0682, - "step": 3757 - }, - { - "epoch": 0.28905468810091534, - "learning_rate": 0.0024228764067592045, - "loss": 1.0615, - "step": 3758 - }, - { - "epoch": 0.28913160526113374, - "learning_rate": 0.002422590638423982, - "loss": 1.4432, - "step": 3759 - }, - { - "epoch": 0.2892085224213522, - "learning_rate": 0.00242230481621773, - "loss": 1.26, - "step": 3760 - }, - { - "epoch": 0.28928543958157066, - "learning_rate": 0.0024220189401571385, - "loss": 1.0253, - "step": 3761 - }, - { - "epoch": 0.2893623567417891, - "learning_rate": 0.0024217330102589, - "loss": 1.2405, - "step": 3762 - }, - { - "epoch": 0.2894392739020075, - "learning_rate": 0.0024214470265397116, - "loss": 1.1035, - "step": 3763 - }, - { - "epoch": 0.289516191062226, - "learning_rate": 0.00242116098901627, - "loss": 1.0638, - "step": 3764 - }, - { - "epoch": 0.28959310822244444, - "learning_rate": 0.0024208748977052785, - "loss": 1.1296, - "step": 3765 - }, - { - "epoch": 0.2896700253826629, - "learning_rate": 0.002420588752623442, - "loss": 1.0415, - "step": 3766 - }, - { - "epoch": 0.2897469425428813, - "learning_rate": 0.0024203025537874686, - "loss": 1.5781, - "step": 3767 - }, - { - "epoch": 0.28982385970309976, - "learning_rate": 0.00242001630121407, - "loss": 1.0284, - "step": 3768 - }, - { - "epoch": 0.2899007768633182, - "learning_rate": 0.002419729994919961, - "loss": 1.1952, - "step": 3769 - }, - { - "epoch": 0.2899776940235366, - "learning_rate": 0.0024194436349218593, - "loss": 0.9078, - "step": 3770 - }, - { - "epoch": 0.2900546111837551, - "learning_rate": 0.0024191572212364848, - "loss": 0.93, - "step": 3771 - }, - { - "epoch": 0.29013152834397354, - "learning_rate": 0.0024188707538805626, - "loss": 0.9572, - "step": 3772 - }, - { - "epoch": 0.290208445504192, - "learning_rate": 0.0024185842328708195, - "loss": 0.8494, - "step": 3773 - }, - { - "epoch": 0.2902853626644104, - "learning_rate": 0.0024182976582239855, - "loss": 1.2251, - "step": 3774 - }, - { - "epoch": 0.29036227982462887, - "learning_rate": 0.0024180110299567947, - "loss": 1.0756, - "step": 3775 - }, - { - "epoch": 0.2904391969848473, - "learning_rate": 0.0024177243480859828, - "loss": 1.2445, - "step": 3776 - }, - { - "epoch": 0.2905161141450658, - "learning_rate": 0.00241743761262829, - "loss": 1.0082, - "step": 3777 - }, - { - "epoch": 0.2905930313052842, - "learning_rate": 0.002417150823600459, - "loss": 1.6257, - "step": 3778 - }, - { - "epoch": 0.29066994846550265, - "learning_rate": 0.002416863981019235, - "loss": 1.2663, - "step": 3779 - }, - { - "epoch": 0.2907468656257211, - "learning_rate": 0.0024165770849013677, - "loss": 1.21, - "step": 3780 - }, - { - "epoch": 0.29082378278593957, - "learning_rate": 0.002416290135263609, - "loss": 1.0922, - "step": 3781 - }, - { - "epoch": 0.29090069994615797, - "learning_rate": 0.0024160031321227154, - "loss": 1.2834, - "step": 3782 - }, - { - "epoch": 0.29097761710637643, - "learning_rate": 0.0024157160754954437, - "loss": 1.0925, - "step": 3783 - }, - { - "epoch": 0.2910545342665949, - "learning_rate": 0.0024154289653985556, - "loss": 1.0391, - "step": 3784 - }, - { - "epoch": 0.29113145142681335, - "learning_rate": 0.0024151418018488165, - "loss": 1.0189, - "step": 3785 - }, - { - "epoch": 0.29120836858703175, - "learning_rate": 0.002414854584862994, - "loss": 1.3025, - "step": 3786 - }, - { - "epoch": 0.2912852857472502, - "learning_rate": 0.002414567314457859, - "loss": 1.1334, - "step": 3787 - }, - { - "epoch": 0.29136220290746867, - "learning_rate": 0.002414279990650185, - "loss": 1.2363, - "step": 3788 - }, - { - "epoch": 0.2914391200676871, - "learning_rate": 0.0024139926134567496, - "loss": 1.0794, - "step": 3789 - }, - { - "epoch": 0.29151603722790553, - "learning_rate": 0.002413705182894333, - "loss": 1.3255, - "step": 3790 - }, - { - "epoch": 0.291592954388124, - "learning_rate": 0.0024134176989797185, - "loss": 0.7982, - "step": 3791 - }, - { - "epoch": 0.29166987154834245, - "learning_rate": 0.0024131301617296924, - "loss": 1.1689, - "step": 3792 - }, - { - "epoch": 0.29174678870856086, - "learning_rate": 0.0024128425711610452, - "loss": 1.076, - "step": 3793 - }, - { - "epoch": 0.2918237058687793, - "learning_rate": 0.002412554927290568, - "loss": 0.9094, - "step": 3794 - }, - { - "epoch": 0.2919006230289978, - "learning_rate": 0.0024122672301350578, - "loss": 1.0881, - "step": 3795 - }, - { - "epoch": 0.29197754018921623, - "learning_rate": 0.002411979479711314, - "loss": 1.0724, - "step": 3796 - }, - { - "epoch": 0.29205445734943464, - "learning_rate": 0.0024116916760361373, - "loss": 1.0396, - "step": 3797 - }, - { - "epoch": 0.2921313745096531, - "learning_rate": 0.0024114038191263335, - "loss": 1.3407, - "step": 3798 - }, - { - "epoch": 0.29220829166987156, - "learning_rate": 0.0024111159089987106, - "loss": 1.3073, - "step": 3799 - }, - { - "epoch": 0.29228520883009, - "learning_rate": 0.002410827945670081, - "loss": 1.0673, - "step": 3800 - }, - { - "epoch": 0.2923621259903084, - "learning_rate": 0.0024105399291572582, - "loss": 1.3231, - "step": 3801 - }, - { - "epoch": 0.2924390431505269, - "learning_rate": 0.00241025185947706, - "loss": 1.0472, - "step": 3802 - }, - { - "epoch": 0.29251596031074534, - "learning_rate": 0.0024099637366463067, - "loss": 1.1477, - "step": 3803 - }, - { - "epoch": 0.2925928774709638, - "learning_rate": 0.0024096755606818226, - "loss": 1.341, - "step": 3804 - }, - { - "epoch": 0.2926697946311822, - "learning_rate": 0.0024093873316004344, - "loss": 1.3286, - "step": 3805 - }, - { - "epoch": 0.29274671179140066, - "learning_rate": 0.0024090990494189724, - "loss": 1.1217, - "step": 3806 - }, - { - "epoch": 0.2928236289516191, - "learning_rate": 0.0024088107141542695, - "loss": 1.1633, - "step": 3807 - }, - { - "epoch": 0.2929005461118376, - "learning_rate": 0.0024085223258231616, - "loss": 1.3188, - "step": 3808 - }, - { - "epoch": 0.292977463272056, - "learning_rate": 0.0024082338844424885, - "loss": 1.0036, - "step": 3809 - }, - { - "epoch": 0.29305438043227444, - "learning_rate": 0.002407945390029092, - "loss": 1.3416, - "step": 3810 - }, - { - "epoch": 0.2931312975924929, - "learning_rate": 0.0024076568425998182, - "loss": 0.8524, - "step": 3811 - }, - { - "epoch": 0.2932082147527113, - "learning_rate": 0.002407368242171516, - "loss": 1.2356, - "step": 3812 - }, - { - "epoch": 0.29328513191292976, - "learning_rate": 0.002407079588761036, - "loss": 1.1089, - "step": 3813 - }, - { - "epoch": 0.2933620490731482, - "learning_rate": 0.002406790882385233, - "loss": 1.4834, - "step": 3814 - }, - { - "epoch": 0.2934389662333667, - "learning_rate": 0.0024065021230609655, - "loss": 0.8913, - "step": 3815 - }, - { - "epoch": 0.2935158833935851, - "learning_rate": 0.002406213310805095, - "loss": 1.3506, - "step": 3816 - }, - { - "epoch": 0.29359280055380355, - "learning_rate": 0.002405924445634484, - "loss": 0.9484, - "step": 3817 - }, - { - "epoch": 0.293669717714022, - "learning_rate": 0.0024056355275660006, - "loss": 1.6326, - "step": 3818 - }, - { - "epoch": 0.29374663487424046, - "learning_rate": 0.0024053465566165155, - "loss": 1.4045, - "step": 3819 - }, - { - "epoch": 0.29382355203445887, - "learning_rate": 0.002405057532802901, - "loss": 1.3565, - "step": 3820 - }, - { - "epoch": 0.2939004691946773, - "learning_rate": 0.002404768456142034, - "loss": 1.2735, - "step": 3821 - }, - { - "epoch": 0.2939773863548958, - "learning_rate": 0.002404479326650794, - "loss": 1.3249, - "step": 3822 - }, - { - "epoch": 0.29405430351511425, - "learning_rate": 0.0024041901443460634, - "loss": 1.3285, - "step": 3823 - }, - { - "epoch": 0.29413122067533265, - "learning_rate": 0.002403900909244728, - "loss": 1.3567, - "step": 3824 - }, - { - "epoch": 0.2942081378355511, - "learning_rate": 0.0024036116213636766, - "loss": 0.9761, - "step": 3825 - }, - { - "epoch": 0.29428505499576957, - "learning_rate": 0.002403322280719801, - "loss": 0.9587, - "step": 3826 - }, - { - "epoch": 0.294361972155988, - "learning_rate": 0.0024030328873299953, - "loss": 1.0449, - "step": 3827 - }, - { - "epoch": 0.29443888931620643, - "learning_rate": 0.0024027434412111593, - "loss": 1.0609, - "step": 3828 - }, - { - "epoch": 0.2945158064764249, - "learning_rate": 0.002402453942380192, - "loss": 1.0602, - "step": 3829 - }, - { - "epoch": 0.29459272363664335, - "learning_rate": 0.002402164390853999, - "loss": 0.949, - "step": 3830 - }, - { - "epoch": 0.29466964079686175, - "learning_rate": 0.0024018747866494865, - "loss": 1.1173, - "step": 3831 - }, - { - "epoch": 0.2947465579570802, - "learning_rate": 0.0024015851297835658, - "loss": 0.6908, - "step": 3832 - }, - { - "epoch": 0.2948234751172987, - "learning_rate": 0.0024012954202731495, - "loss": 0.8463, - "step": 3833 - }, - { - "epoch": 0.29490039227751713, - "learning_rate": 0.0024010056581351543, - "loss": 1.4411, - "step": 3834 - }, - { - "epoch": 0.29497730943773554, - "learning_rate": 0.0024007158433865003, - "loss": 1.1703, - "step": 3835 - }, - { - "epoch": 0.295054226597954, - "learning_rate": 0.0024004259760441087, - "loss": 1.1127, - "step": 3836 - }, - { - "epoch": 0.29513114375817245, - "learning_rate": 0.0024001360561249066, - "loss": 1.3076, - "step": 3837 - }, - { - "epoch": 0.2952080609183909, - "learning_rate": 0.002399846083645822, - "loss": 0.7568, - "step": 3838 - }, - { - "epoch": 0.2952849780786093, - "learning_rate": 0.002399556058623786, - "loss": 1.1442, - "step": 3839 - }, - { - "epoch": 0.2953618952388278, - "learning_rate": 0.0023992659810757352, - "loss": 0.9839, - "step": 3840 - }, - { - "epoch": 0.29543881239904624, - "learning_rate": 0.0023989758510186057, - "loss": 1.0016, - "step": 3841 - }, - { - "epoch": 0.2955157295592647, - "learning_rate": 0.00239868566846934, - "loss": 1.1556, - "step": 3842 - }, - { - "epoch": 0.2955926467194831, - "learning_rate": 0.0023983954334448813, - "loss": 1.1459, - "step": 3843 - }, - { - "epoch": 0.29566956387970156, - "learning_rate": 0.0023981051459621766, - "loss": 1.6542, - "step": 3844 - }, - { - "epoch": 0.29574648103992, - "learning_rate": 0.0023978148060381775, - "loss": 1.0692, - "step": 3845 - }, - { - "epoch": 0.2958233982001385, - "learning_rate": 0.0023975244136898353, - "loss": 0.9232, - "step": 3846 - }, - { - "epoch": 0.2959003153603569, - "learning_rate": 0.0023972339689341073, - "loss": 1.0787, - "step": 3847 - }, - { - "epoch": 0.29597723252057534, - "learning_rate": 0.0023969434717879532, - "loss": 1.2244, - "step": 3848 - }, - { - "epoch": 0.2960541496807938, - "learning_rate": 0.0023966529222683346, - "loss": 1.1639, - "step": 3849 - }, - { - "epoch": 0.2961310668410122, - "learning_rate": 0.0023963623203922175, - "loss": 1.1043, - "step": 3850 - }, - { - "epoch": 0.29620798400123066, - "learning_rate": 0.0023960716661765706, - "loss": 1.1608, - "step": 3851 - }, - { - "epoch": 0.2962849011614491, - "learning_rate": 0.002395780959638365, - "loss": 1.0279, - "step": 3852 - }, - { - "epoch": 0.2963618183216676, - "learning_rate": 0.0023954902007945755, - "loss": 1.1058, - "step": 3853 - }, - { - "epoch": 0.296438735481886, - "learning_rate": 0.00239519938966218, - "loss": 1.2928, - "step": 3854 - }, - { - "epoch": 0.29651565264210444, - "learning_rate": 0.0023949085262581597, - "loss": 1.0284, - "step": 3855 - }, - { - "epoch": 0.2965925698023229, - "learning_rate": 0.0023946176105994967, - "loss": 1.5161, - "step": 3856 - }, - { - "epoch": 0.29666948696254136, - "learning_rate": 0.00239432664270318, - "loss": 1.0126, - "step": 3857 - }, - { - "epoch": 0.29674640412275977, - "learning_rate": 0.002394035622586199, - "loss": 1.4353, - "step": 3858 - }, - { - "epoch": 0.2968233212829782, - "learning_rate": 0.0023937445502655455, - "loss": 1.2178, - "step": 3859 - }, - { - "epoch": 0.2969002384431967, - "learning_rate": 0.0023934534257582163, - "loss": 1.2485, - "step": 3860 - }, - { - "epoch": 0.29697715560341514, - "learning_rate": 0.0023931622490812104, - "loss": 1.1826, - "step": 3861 - }, - { - "epoch": 0.29705407276363355, - "learning_rate": 0.00239287102025153, - "loss": 1.4365, - "step": 3862 - }, - { - "epoch": 0.297130989923852, - "learning_rate": 0.00239257973928618, - "loss": 1.123, - "step": 3863 - }, - { - "epoch": 0.29720790708407047, - "learning_rate": 0.00239228840620217, - "loss": 1.1501, - "step": 3864 - }, - { - "epoch": 0.2972848242442889, - "learning_rate": 0.0023919970210165087, - "loss": 1.2852, - "step": 3865 - }, - { - "epoch": 0.29736174140450733, - "learning_rate": 0.002391705583746212, - "loss": 1.384, - "step": 3866 - }, - { - "epoch": 0.2974386585647258, - "learning_rate": 0.0023914140944082973, - "loss": 1.5268, - "step": 3867 - }, - { - "epoch": 0.29751557572494425, - "learning_rate": 0.0023911225530197843, - "loss": 1.1828, - "step": 3868 - }, - { - "epoch": 0.2975924928851627, - "learning_rate": 0.0023908309595976965, - "loss": 1.4875, - "step": 3869 - }, - { - "epoch": 0.2976694100453811, - "learning_rate": 0.0023905393141590613, - "loss": 1.1925, - "step": 3870 - }, - { - "epoch": 0.29774632720559957, - "learning_rate": 0.0023902476167209077, - "loss": 0.9692, - "step": 3871 - }, - { - "epoch": 0.29782324436581803, - "learning_rate": 0.002389955867300267, - "loss": 0.8897, - "step": 3872 - }, - { - "epoch": 0.29790016152603643, - "learning_rate": 0.002389664065914176, - "loss": 0.7786, - "step": 3873 - }, - { - "epoch": 0.2979770786862549, - "learning_rate": 0.002389372212579673, - "loss": 1.1258, - "step": 3874 - }, - { - "epoch": 0.29805399584647335, - "learning_rate": 0.0023890803073138, - "loss": 1.0717, - "step": 3875 - }, - { - "epoch": 0.2981309130066918, - "learning_rate": 0.0023887883501336007, - "loss": 0.6882, - "step": 3876 - }, - { - "epoch": 0.2982078301669102, - "learning_rate": 0.0023884963410561234, - "loss": 1.1122, - "step": 3877 - }, - { - "epoch": 0.2982847473271287, - "learning_rate": 0.002388204280098419, - "loss": 1.3263, - "step": 3878 - }, - { - "epoch": 0.29836166448734713, - "learning_rate": 0.0023879121672775415, - "loss": 0.8651, - "step": 3879 - }, - { - "epoch": 0.2984385816475656, - "learning_rate": 0.0023876200026105463, - "loss": 1.3739, - "step": 3880 - }, - { - "epoch": 0.298515498807784, - "learning_rate": 0.002387327786114495, - "loss": 0.985, - "step": 3881 - }, - { - "epoch": 0.29859241596800246, - "learning_rate": 0.002387035517806449, - "loss": 1.0379, - "step": 3882 - }, - { - "epoch": 0.2986693331282209, - "learning_rate": 0.0023867431977034744, - "loss": 1.0018, - "step": 3883 - }, - { - "epoch": 0.2987462502884394, - "learning_rate": 0.0023864508258226408, - "loss": 0.9304, - "step": 3884 - }, - { - "epoch": 0.2988231674486578, - "learning_rate": 0.0023861584021810196, - "loss": 0.8609, - "step": 3885 - }, - { - "epoch": 0.29890008460887624, - "learning_rate": 0.002385865926795686, - "loss": 1.1605, - "step": 3886 - }, - { - "epoch": 0.2989770017690947, - "learning_rate": 0.002385573399683717, - "loss": 1.0836, - "step": 3887 - }, - { - "epoch": 0.29905391892931316, - "learning_rate": 0.0023852808208621947, - "loss": 1.4671, - "step": 3888 - }, - { - "epoch": 0.29913083608953156, - "learning_rate": 0.0023849881903482027, - "loss": 1.2837, - "step": 3889 - }, - { - "epoch": 0.29920775324975, - "learning_rate": 0.0023846955081588276, - "loss": 1.079, - "step": 3890 - }, - { - "epoch": 0.2992846704099685, - "learning_rate": 0.00238440277431116, - "loss": 1.202, - "step": 3891 - }, - { - "epoch": 0.2993615875701869, - "learning_rate": 0.002384109988822293, - "loss": 1.0768, - "step": 3892 - }, - { - "epoch": 0.29943850473040534, - "learning_rate": 0.002383817151709322, - "loss": 1.8944, - "step": 3893 - }, - { - "epoch": 0.2995154218906238, - "learning_rate": 0.002383524262989346, - "loss": 1.1621, - "step": 3894 - }, - { - "epoch": 0.29959233905084226, - "learning_rate": 0.002383231322679468, - "loss": 0.9345, - "step": 3895 - }, - { - "epoch": 0.29966925621106066, - "learning_rate": 0.0023829383307967925, - "loss": 1.178, - "step": 3896 - }, - { - "epoch": 0.2997461733712791, - "learning_rate": 0.002382645287358428, - "loss": 1.193, - "step": 3897 - }, - { - "epoch": 0.2998230905314976, - "learning_rate": 0.002382352192381485, - "loss": 1.0396, - "step": 3898 - }, - { - "epoch": 0.29990000769171604, - "learning_rate": 0.002382059045883078, - "loss": 1.4754, - "step": 3899 - }, - { - "epoch": 0.29997692485193445, - "learning_rate": 0.0023817658478803246, - "loss": 1.4991, - "step": 3900 - }, - { - "epoch": 0.3000538420121529, - "learning_rate": 0.0023814725983903437, - "loss": 0.7582, - "step": 3901 - }, - { - "epoch": 0.30013075917237136, - "learning_rate": 0.002381179297430259, - "loss": 1.2912, - "step": 3902 - }, - { - "epoch": 0.3002076763325898, - "learning_rate": 0.002380885945017197, - "loss": 1.1994, - "step": 3903 - }, - { - "epoch": 0.3002845934928082, - "learning_rate": 0.002380592541168287, - "loss": 1.0211, - "step": 3904 - }, - { - "epoch": 0.3003615106530267, - "learning_rate": 0.00238029908590066, - "loss": 1.4471, - "step": 3905 - }, - { - "epoch": 0.30043842781324515, - "learning_rate": 0.0023800055792314524, - "loss": 1.5325, - "step": 3906 - }, - { - "epoch": 0.3005153449734636, - "learning_rate": 0.002379712021177802, - "loss": 1.1501, - "step": 3907 - }, - { - "epoch": 0.300592262133682, - "learning_rate": 0.00237941841175685, - "loss": 0.637, - "step": 3908 - }, - { - "epoch": 0.30066917929390047, - "learning_rate": 0.00237912475098574, - "loss": 1.2793, - "step": 3909 - }, - { - "epoch": 0.3007460964541189, - "learning_rate": 0.0023788310388816202, - "loss": 1.1716, - "step": 3910 - }, - { - "epoch": 0.30082301361433733, - "learning_rate": 0.00237853727546164, - "loss": 1.6392, - "step": 3911 - }, - { - "epoch": 0.3008999307745558, - "learning_rate": 0.002378243460742952, - "loss": 0.9761, - "step": 3912 - }, - { - "epoch": 0.30097684793477425, - "learning_rate": 0.0023779495947427137, - "loss": 1.1093, - "step": 3913 - }, - { - "epoch": 0.3010537650949927, - "learning_rate": 0.0023776556774780834, - "loss": 1.2699, - "step": 3914 - }, - { - "epoch": 0.3011306822552111, - "learning_rate": 0.002377361708966223, - "loss": 1.3389, - "step": 3915 - }, - { - "epoch": 0.30120759941542957, - "learning_rate": 0.002377067689224299, - "loss": 1.124, - "step": 3916 - }, - { - "epoch": 0.30128451657564803, - "learning_rate": 0.0023767736182694777, - "loss": 1.1271, - "step": 3917 - }, - { - "epoch": 0.3013614337358665, - "learning_rate": 0.002376479496118931, - "loss": 1.1094, - "step": 3918 - }, - { - "epoch": 0.3014383508960849, - "learning_rate": 0.0023761853227898338, - "loss": 1.0094, - "step": 3919 - }, - { - "epoch": 0.30151526805630335, - "learning_rate": 0.0023758910982993623, - "loss": 1.157, - "step": 3920 - }, - { - "epoch": 0.3015921852165218, - "learning_rate": 0.002375596822664696, - "loss": 1.0624, - "step": 3921 - }, - { - "epoch": 0.3016691023767403, - "learning_rate": 0.0023753024959030193, - "loss": 1.0249, - "step": 3922 - }, - { - "epoch": 0.3017460195369587, - "learning_rate": 0.002375008118031518, - "loss": 1.2358, - "step": 3923 - }, - { - "epoch": 0.30182293669717714, - "learning_rate": 0.0023747136890673805, - "loss": 1.0304, - "step": 3924 - }, - { - "epoch": 0.3018998538573956, - "learning_rate": 0.0023744192090277986, - "loss": 1.3563, - "step": 3925 - }, - { - "epoch": 0.30197677101761405, - "learning_rate": 0.0023741246779299687, - "loss": 1.2886, - "step": 3926 - }, - { - "epoch": 0.30205368817783246, - "learning_rate": 0.002373830095791087, - "loss": 1.0715, - "step": 3927 - }, - { - "epoch": 0.3021306053380509, - "learning_rate": 0.0023735354626283557, - "loss": 1.3066, - "step": 3928 - }, - { - "epoch": 0.3022075224982694, - "learning_rate": 0.0023732407784589783, - "loss": 1.2763, - "step": 3929 - }, - { - "epoch": 0.30228443965848784, - "learning_rate": 0.0023729460433001615, - "loss": 1.4041, - "step": 3930 - }, - { - "epoch": 0.30236135681870624, - "learning_rate": 0.0023726512571691162, - "loss": 1.1763, - "step": 3931 - }, - { - "epoch": 0.3024382739789247, - "learning_rate": 0.0023723564200830542, - "loss": 1.3388, - "step": 3932 - }, - { - "epoch": 0.30251519113914316, - "learning_rate": 0.0023720615320591914, - "loss": 1.1631, - "step": 3933 - }, - { - "epoch": 0.30259210829936156, - "learning_rate": 0.0023717665931147474, - "loss": 0.9775, - "step": 3934 - }, - { - "epoch": 0.30266902545958, - "learning_rate": 0.002371471603266943, - "loss": 1.1244, - "step": 3935 - }, - { - "epoch": 0.3027459426197985, - "learning_rate": 0.0023711765625330037, - "loss": 1.2908, - "step": 3936 - }, - { - "epoch": 0.30282285978001694, - "learning_rate": 0.002370881470930157, - "loss": 0.8563, - "step": 3937 - }, - { - "epoch": 0.30289977694023534, - "learning_rate": 0.0023705863284756335, - "loss": 1.1576, - "step": 3938 - }, - { - "epoch": 0.3029766941004538, - "learning_rate": 0.0023702911351866666, - "loss": 1.0741, - "step": 3939 - }, - { - "epoch": 0.30305361126067226, - "learning_rate": 0.0023699958910804943, - "loss": 0.9602, - "step": 3940 - }, - { - "epoch": 0.3031305284208907, - "learning_rate": 0.0023697005961743544, - "loss": 1.0266, - "step": 3941 - }, - { - "epoch": 0.3032074455811091, - "learning_rate": 0.0023694052504854906, - "loss": 1.3567, - "step": 3942 - }, - { - "epoch": 0.3032843627413276, - "learning_rate": 0.0023691098540311478, - "loss": 1.1743, - "step": 3943 - }, - { - "epoch": 0.30336127990154604, - "learning_rate": 0.0023688144068285755, - "loss": 1.0754, - "step": 3944 - }, - { - "epoch": 0.3034381970617645, - "learning_rate": 0.002368518908895024, - "loss": 1.1054, - "step": 3945 - }, - { - "epoch": 0.3035151142219829, - "learning_rate": 0.0023682233602477483, - "loss": 1.0102, - "step": 3946 - }, - { - "epoch": 0.30359203138220137, - "learning_rate": 0.002367927760904006, - "loss": 1.1887, - "step": 3947 - }, - { - "epoch": 0.3036689485424198, - "learning_rate": 0.002367632110881057, - "loss": 0.8444, - "step": 3948 - }, - { - "epoch": 0.3037458657026383, - "learning_rate": 0.002367336410196165, - "loss": 1.2563, - "step": 3949 - }, - { - "epoch": 0.3038227828628567, - "learning_rate": 0.002367040658866596, - "loss": 1.0764, - "step": 3950 - }, - { - "epoch": 0.30389970002307515, - "learning_rate": 0.00236674485690962, - "loss": 1.0052, - "step": 3951 - }, - { - "epoch": 0.3039766171832936, - "learning_rate": 0.0023664490043425078, - "loss": 0.9344, - "step": 3952 - }, - { - "epoch": 0.304053534343512, - "learning_rate": 0.002366153101182535, - "loss": 0.9685, - "step": 3953 - }, - { - "epoch": 0.30413045150373047, - "learning_rate": 0.0023658571474469807, - "loss": 0.9161, - "step": 3954 - }, - { - "epoch": 0.30420736866394893, - "learning_rate": 0.0023655611431531247, - "loss": 1.4992, - "step": 3955 - }, - { - "epoch": 0.3042842858241674, - "learning_rate": 0.0023652650883182512, - "loss": 1.3994, - "step": 3956 - }, - { - "epoch": 0.3043612029843858, - "learning_rate": 0.002364968982959648, - "loss": 1.1665, - "step": 3957 - }, - { - "epoch": 0.30443812014460425, - "learning_rate": 0.002364672827094604, - "loss": 1.2797, - "step": 3958 - }, - { - "epoch": 0.3045150373048227, - "learning_rate": 0.0023643766207404127, - "loss": 1.1326, - "step": 3959 - }, - { - "epoch": 0.30459195446504117, - "learning_rate": 0.00236408036391437, - "loss": 1.1032, - "step": 3960 - }, - { - "epoch": 0.3046688716252596, - "learning_rate": 0.002363784056633774, - "loss": 1.1207, - "step": 3961 - }, - { - "epoch": 0.30474578878547803, - "learning_rate": 0.0023634876989159265, - "loss": 0.9382, - "step": 3962 - }, - { - "epoch": 0.3048227059456965, - "learning_rate": 0.0023631912907781326, - "loss": 0.9859, - "step": 3963 - }, - { - "epoch": 0.30489962310591495, - "learning_rate": 0.0023628948322376997, - "loss": 1.4293, - "step": 3964 - }, - { - "epoch": 0.30497654026613336, - "learning_rate": 0.0023625983233119385, - "loss": 1.1807, - "step": 3965 - }, - { - "epoch": 0.3050534574263518, - "learning_rate": 0.0023623017640181615, - "loss": 1.0156, - "step": 3966 - }, - { - "epoch": 0.3051303745865703, - "learning_rate": 0.002362005154373686, - "loss": 0.7093, - "step": 3967 - }, - { - "epoch": 0.30520729174678873, - "learning_rate": 0.0023617084943958313, - "loss": 0.9847, - "step": 3968 - }, - { - "epoch": 0.30528420890700714, - "learning_rate": 0.00236141178410192, - "loss": 1.0178, - "step": 3969 - }, - { - "epoch": 0.3053611260672256, - "learning_rate": 0.0023611150235092766, - "loss": 0.8512, - "step": 3970 - }, - { - "epoch": 0.30543804322744406, - "learning_rate": 0.0023608182126352295, - "loss": 1.0829, - "step": 3971 - }, - { - "epoch": 0.30551496038766246, - "learning_rate": 0.00236052135149711, - "loss": 1.1115, - "step": 3972 - }, - { - "epoch": 0.3055918775478809, - "learning_rate": 0.0023602244401122515, - "loss": 1.2082, - "step": 3973 - }, - { - "epoch": 0.3056687947080994, - "learning_rate": 0.0023599274784979916, - "loss": 1.5141, - "step": 3974 - }, - { - "epoch": 0.30574571186831784, - "learning_rate": 0.00235963046667167, - "loss": 1.2173, - "step": 3975 - }, - { - "epoch": 0.30582262902853624, - "learning_rate": 0.00235933340465063, - "loss": 1.3931, - "step": 3976 - }, - { - "epoch": 0.3058995461887547, - "learning_rate": 0.0023590362924522166, - "loss": 1.0222, - "step": 3977 - }, - { - "epoch": 0.30597646334897316, - "learning_rate": 0.0023587391300937794, - "loss": 1.5301, - "step": 3978 - }, - { - "epoch": 0.3060533805091916, - "learning_rate": 0.0023584419175926683, - "loss": 1.0222, - "step": 3979 - }, - { - "epoch": 0.30613029766941, - "learning_rate": 0.00235814465496624, - "loss": 0.9644, - "step": 3980 - }, - { - "epoch": 0.3062072148296285, - "learning_rate": 0.0023578473422318507, - "loss": 1.4717, - "step": 3981 - }, - { - "epoch": 0.30628413198984694, - "learning_rate": 0.0023575499794068608, - "loss": 1.1743, - "step": 3982 - }, - { - "epoch": 0.3063610491500654, - "learning_rate": 0.002357252566508634, - "loss": 0.9093, - "step": 3983 - }, - { - "epoch": 0.3064379663102838, - "learning_rate": 0.002356955103554536, - "loss": 1.092, - "step": 3984 - }, - { - "epoch": 0.30651488347050226, - "learning_rate": 0.0023566575905619373, - "loss": 1.0453, - "step": 3985 - }, - { - "epoch": 0.3065918006307207, - "learning_rate": 0.0023563600275482086, - "loss": 0.8073, - "step": 3986 - }, - { - "epoch": 0.3066687177909392, - "learning_rate": 0.0023560624145307256, - "loss": 1.0317, - "step": 3987 - }, - { - "epoch": 0.3067456349511576, - "learning_rate": 0.002355764751526866, - "loss": 0.9002, - "step": 3988 - }, - { - "epoch": 0.30682255211137605, - "learning_rate": 0.0023554670385540107, - "loss": 1.3194, - "step": 3989 - }, - { - "epoch": 0.3068994692715945, - "learning_rate": 0.0023551692756295433, - "loss": 0.9924, - "step": 3990 - }, - { - "epoch": 0.30697638643181296, - "learning_rate": 0.0023548714627708504, - "loss": 1.0192, - "step": 3991 - }, - { - "epoch": 0.30705330359203137, - "learning_rate": 0.0023545735999953225, - "loss": 1.0896, - "step": 3992 - }, - { - "epoch": 0.3071302207522498, - "learning_rate": 0.002354275687320351, - "loss": 1.0743, - "step": 3993 - }, - { - "epoch": 0.3072071379124683, - "learning_rate": 0.002353977724763332, - "loss": 1.3867, - "step": 3994 - }, - { - "epoch": 0.3072840550726867, - "learning_rate": 0.002353679712341664, - "loss": 1.0094, - "step": 3995 - }, - { - "epoch": 0.30736097223290515, - "learning_rate": 0.0023533816500727472, - "loss": 1.2725, - "step": 3996 - }, - { - "epoch": 0.3074378893931236, - "learning_rate": 0.002353083537973987, - "loss": 1.4228, - "step": 3997 - }, - { - "epoch": 0.30751480655334207, - "learning_rate": 0.0023527853760627894, - "loss": 1.0509, - "step": 3998 - }, - { - "epoch": 0.30759172371356047, - "learning_rate": 0.002352487164356566, - "loss": 0.9508, - "step": 3999 - }, - { - "epoch": 0.30766864087377893, - "learning_rate": 0.002352188902872728, - "loss": 1.188, - "step": 4000 - }, - { - "epoch": 0.3077455580339974, - "learning_rate": 0.0023518905916286915, - "loss": 1.5387, - "step": 4001 - }, - { - "epoch": 0.30782247519421585, - "learning_rate": 0.002351592230641876, - "loss": 1.3311, - "step": 4002 - }, - { - "epoch": 0.30789939235443425, - "learning_rate": 0.0023512938199297027, - "loss": 1.3555, - "step": 4003 - }, - { - "epoch": 0.3079763095146527, - "learning_rate": 0.0023509953595095955, - "loss": 0.9321, - "step": 4004 - }, - { - "epoch": 0.3080532266748712, - "learning_rate": 0.0023506968493989828, - "loss": 1.0828, - "step": 4005 - }, - { - "epoch": 0.30813014383508963, - "learning_rate": 0.0023503982896152945, - "loss": 1.1461, - "step": 4006 - }, - { - "epoch": 0.30820706099530804, - "learning_rate": 0.0023500996801759638, - "loss": 1.1811, - "step": 4007 - }, - { - "epoch": 0.3082839781555265, - "learning_rate": 0.0023498010210984267, - "loss": 1.2982, - "step": 4008 - }, - { - "epoch": 0.30836089531574495, - "learning_rate": 0.002349502312400122, - "loss": 1.238, - "step": 4009 - }, - { - "epoch": 0.3084378124759634, - "learning_rate": 0.0023492035540984926, - "loss": 1.1188, - "step": 4010 - }, - { - "epoch": 0.3085147296361818, - "learning_rate": 0.0023489047462109825, - "loss": 1.2138, - "step": 4011 - }, - { - "epoch": 0.3085916467964003, - "learning_rate": 0.0023486058887550393, - "loss": 1.0183, - "step": 4012 - }, - { - "epoch": 0.30866856395661874, - "learning_rate": 0.0023483069817481143, - "loss": 1.0868, - "step": 4013 - }, - { - "epoch": 0.30874548111683714, - "learning_rate": 0.00234800802520766, - "loss": 0.971, - "step": 4014 - }, - { - "epoch": 0.3088223982770556, - "learning_rate": 0.0023477090191511337, - "loss": 1.1807, - "step": 4015 - }, - { - "epoch": 0.30889931543727406, - "learning_rate": 0.002347409963595994, - "loss": 1.1903, - "step": 4016 - }, - { - "epoch": 0.3089762325974925, - "learning_rate": 0.0023471108585597033, - "loss": 1.2067, - "step": 4017 - }, - { - "epoch": 0.3090531497577109, - "learning_rate": 0.0023468117040597275, - "loss": 1.2638, - "step": 4018 - }, - { - "epoch": 0.3091300669179294, - "learning_rate": 0.002346512500113533, - "loss": 1.1632, - "step": 4019 - }, - { - "epoch": 0.30920698407814784, - "learning_rate": 0.002346213246738592, - "loss": 1.1534, - "step": 4020 - }, - { - "epoch": 0.3092839012383663, - "learning_rate": 0.002345913943952377, - "loss": 0.7224, - "step": 4021 - }, - { - "epoch": 0.3093608183985847, - "learning_rate": 0.002345614591772366, - "loss": 0.9565, - "step": 4022 - }, - { - "epoch": 0.30943773555880316, - "learning_rate": 0.002345315190216037, - "loss": 1.1661, - "step": 4023 - }, - { - "epoch": 0.3095146527190216, - "learning_rate": 0.0023450157393008733, - "loss": 1.1602, - "step": 4024 - }, - { - "epoch": 0.3095915698792401, - "learning_rate": 0.00234471623904436, - "loss": 0.9932, - "step": 4025 - }, - { - "epoch": 0.3096684870394585, - "learning_rate": 0.0023444166894639852, - "loss": 1.0469, - "step": 4026 - }, - { - "epoch": 0.30974540419967694, - "learning_rate": 0.00234411709057724, - "loss": 1.4895, - "step": 4027 - }, - { - "epoch": 0.3098223213598954, - "learning_rate": 0.0023438174424016173, - "loss": 1.4801, - "step": 4028 - }, - { - "epoch": 0.30989923852011386, - "learning_rate": 0.0023435177449546156, - "loss": 0.8212, - "step": 4029 - }, - { - "epoch": 0.30997615568033227, - "learning_rate": 0.0023432179982537335, - "loss": 1.0803, - "step": 4030 - }, - { - "epoch": 0.3100530728405507, - "learning_rate": 0.0023429182023164737, - "loss": 1.3106, - "step": 4031 - }, - { - "epoch": 0.3101299900007692, - "learning_rate": 0.0023426183571603415, - "loss": 0.9331, - "step": 4032 - }, - { - "epoch": 0.31020690716098764, - "learning_rate": 0.002342318462802845, - "loss": 1.1308, - "step": 4033 - }, - { - "epoch": 0.31028382432120605, - "learning_rate": 0.002342018519261496, - "loss": 1.585, - "step": 4034 - }, - { - "epoch": 0.3103607414814245, - "learning_rate": 0.0023417185265538084, - "loss": 1.0905, - "step": 4035 - }, - { - "epoch": 0.31043765864164297, - "learning_rate": 0.002341418484697298, - "loss": 1.3169, - "step": 4036 - }, - { - "epoch": 0.31051457580186137, - "learning_rate": 0.002341118393709486, - "loss": 1.0269, - "step": 4037 - }, - { - "epoch": 0.31059149296207983, - "learning_rate": 0.0023408182536078944, - "loss": 1.0275, - "step": 4038 - }, - { - "epoch": 0.3106684101222983, - "learning_rate": 0.0023405180644100487, - "loss": 1.3213, - "step": 4039 - }, - { - "epoch": 0.31074532728251675, - "learning_rate": 0.0023402178261334766, - "loss": 1.1029, - "step": 4040 - }, - { - "epoch": 0.31082224444273515, - "learning_rate": 0.0023399175387957107, - "loss": 1.0443, - "step": 4041 - }, - { - "epoch": 0.3108991616029536, - "learning_rate": 0.0023396172024142846, - "loss": 1.1436, - "step": 4042 - }, - { - "epoch": 0.31097607876317207, - "learning_rate": 0.002339316817006734, - "loss": 1.0799, - "step": 4043 - }, - { - "epoch": 0.31105299592339053, - "learning_rate": 0.0023390163825906004, - "loss": 1.241, - "step": 4044 - }, - { - "epoch": 0.31112991308360893, - "learning_rate": 0.002338715899183426, - "loss": 1.2144, - "step": 4045 - }, - { - "epoch": 0.3112068302438274, - "learning_rate": 0.002338415366802756, - "loss": 1.4211, - "step": 4046 - }, - { - "epoch": 0.31128374740404585, - "learning_rate": 0.0023381147854661384, - "loss": 1.2576, - "step": 4047 - }, - { - "epoch": 0.3113606645642643, - "learning_rate": 0.0023378141551911257, - "loss": 1.1191, - "step": 4048 - }, - { - "epoch": 0.3114375817244827, - "learning_rate": 0.0023375134759952712, - "loss": 1.3006, - "step": 4049 - }, - { - "epoch": 0.3115144988847012, - "learning_rate": 0.0023372127478961317, - "loss": 1.401, - "step": 4050 - }, - { - "epoch": 0.31159141604491963, - "learning_rate": 0.0023369119709112673, - "loss": 0.8778, - "step": 4051 - }, - { - "epoch": 0.3116683332051381, - "learning_rate": 0.0023366111450582407, - "loss": 1.3404, - "step": 4052 - }, - { - "epoch": 0.3117452503653565, - "learning_rate": 0.0023363102703546183, - "loss": 1.3865, - "step": 4053 - }, - { - "epoch": 0.31182216752557496, - "learning_rate": 0.0023360093468179667, - "loss": 1.2789, - "step": 4054 - }, - { - "epoch": 0.3118990846857934, - "learning_rate": 0.0023357083744658586, - "loss": 1.0586, - "step": 4055 - }, - { - "epoch": 0.3119760018460118, - "learning_rate": 0.0023354073533158673, - "loss": 1.1281, - "step": 4056 - }, - { - "epoch": 0.3120529190062303, - "learning_rate": 0.00233510628338557, - "loss": 1.0259, - "step": 4057 - }, - { - "epoch": 0.31212983616644874, - "learning_rate": 0.002334805164692546, - "loss": 1.2627, - "step": 4058 - }, - { - "epoch": 0.3122067533266672, - "learning_rate": 0.0023345039972543797, - "loss": 1.2103, - "step": 4059 - }, - { - "epoch": 0.3122836704868856, - "learning_rate": 0.0023342027810886543, - "loss": 1.1784, - "step": 4060 - }, - { - "epoch": 0.31236058764710406, - "learning_rate": 0.0023339015162129593, - "loss": 1.3992, - "step": 4061 - }, - { - "epoch": 0.3124375048073225, - "learning_rate": 0.0023336002026448856, - "loss": 1.173, - "step": 4062 - }, - { - "epoch": 0.312514421967541, - "learning_rate": 0.0023332988404020276, - "loss": 1.2913, - "step": 4063 - }, - { - "epoch": 0.3125913391277594, - "learning_rate": 0.002332997429501982, - "loss": 0.9053, - "step": 4064 - }, - { - "epoch": 0.31266825628797784, - "learning_rate": 0.002332695969962348, - "loss": 1.0656, - "step": 4065 - }, - { - "epoch": 0.3127451734481963, - "learning_rate": 0.0023323944618007285, - "loss": 1.3543, - "step": 4066 - }, - { - "epoch": 0.31282209060841476, - "learning_rate": 0.002332092905034729, - "loss": 1.1413, - "step": 4067 - }, - { - "epoch": 0.31289900776863316, - "learning_rate": 0.002331791299681958, - "loss": 0.9858, - "step": 4068 - }, - { - "epoch": 0.3129759249288516, - "learning_rate": 0.0023314896457600253, - "loss": 0.8808, - "step": 4069 - }, - { - "epoch": 0.3130528420890701, - "learning_rate": 0.0023311879432865463, - "loss": 1.2249, - "step": 4070 - }, - { - "epoch": 0.31312975924928854, - "learning_rate": 0.0023308861922791365, - "loss": 0.693, - "step": 4071 - }, - { - "epoch": 0.31320667640950695, - "learning_rate": 0.0023305843927554164, - "loss": 1.0655, - "step": 4072 - }, - { - "epoch": 0.3132835935697254, - "learning_rate": 0.0023302825447330078, - "loss": 0.8207, - "step": 4073 - }, - { - "epoch": 0.31336051072994386, - "learning_rate": 0.0023299806482295357, - "loss": 1.6583, - "step": 4074 - }, - { - "epoch": 0.31343742789016227, - "learning_rate": 0.0023296787032626295, - "loss": 1.1852, - "step": 4075 - }, - { - "epoch": 0.3135143450503807, - "learning_rate": 0.0023293767098499186, - "loss": 1.0463, - "step": 4076 - }, - { - "epoch": 0.3135912622105992, - "learning_rate": 0.0023290746680090374, - "loss": 1.3161, - "step": 4077 - }, - { - "epoch": 0.31366817937081765, - "learning_rate": 0.0023287725777576223, - "loss": 1.1284, - "step": 4078 - }, - { - "epoch": 0.31374509653103605, - "learning_rate": 0.002328470439113313, - "loss": 1.3051, - "step": 4079 - }, - { - "epoch": 0.3138220136912545, - "learning_rate": 0.0023281682520937506, - "loss": 1.2625, - "step": 4080 - }, - { - "epoch": 0.31389893085147297, - "learning_rate": 0.0023278660167165813, - "loss": 0.9103, - "step": 4081 - }, - { - "epoch": 0.3139758480116914, - "learning_rate": 0.0023275637329994523, - "loss": 1.3384, - "step": 4082 - }, - { - "epoch": 0.31405276517190983, - "learning_rate": 0.0023272614009600145, - "loss": 1.1082, - "step": 4083 - }, - { - "epoch": 0.3141296823321283, - "learning_rate": 0.0023269590206159213, - "loss": 1.1432, - "step": 4084 - }, - { - "epoch": 0.31420659949234675, - "learning_rate": 0.002326656591984829, - "loss": 1.2934, - "step": 4085 - }, - { - "epoch": 0.3142835166525652, - "learning_rate": 0.0023263541150843974, - "loss": 1.097, - "step": 4086 - }, - { - "epoch": 0.3143604338127836, - "learning_rate": 0.0023260515899322875, - "loss": 0.9245, - "step": 4087 - }, - { - "epoch": 0.31443735097300207, - "learning_rate": 0.0023257490165461644, - "loss": 1.3487, - "step": 4088 - }, - { - "epoch": 0.31451426813322053, - "learning_rate": 0.002325446394943695, - "loss": 1.3961, - "step": 4089 - }, - { - "epoch": 0.314591185293439, - "learning_rate": 0.002325143725142551, - "loss": 0.8543, - "step": 4090 - }, - { - "epoch": 0.3146681024536574, - "learning_rate": 0.002324841007160405, - "loss": 1.5301, - "step": 4091 - }, - { - "epoch": 0.31474501961387585, - "learning_rate": 0.0023245382410149325, - "loss": 1.275, - "step": 4092 - }, - { - "epoch": 0.3148219367740943, - "learning_rate": 0.002324235426723813, - "loss": 1.4179, - "step": 4093 - }, - { - "epoch": 0.3148988539343128, - "learning_rate": 0.002323932564304728, - "loss": 1.6431, - "step": 4094 - }, - { - "epoch": 0.3149757710945312, - "learning_rate": 0.0023236296537753618, - "loss": 1.0404, - "step": 4095 - }, - { - "epoch": 0.31505268825474964, - "learning_rate": 0.002323326695153402, - "loss": 0.6271, - "step": 4096 - }, - { - "epoch": 0.3151296054149681, - "learning_rate": 0.002323023688456538, - "loss": 1.2285, - "step": 4097 - }, - { - "epoch": 0.3152065225751865, - "learning_rate": 0.0023227206337024637, - "loss": 1.1556, - "step": 4098 - }, - { - "epoch": 0.31528343973540496, - "learning_rate": 0.0023224175309088735, - "loss": 0.8867, - "step": 4099 - }, - { - "epoch": 0.3153603568956234, - "learning_rate": 0.002322114380093467, - "loss": 1.1383, - "step": 4100 - }, - { - "epoch": 0.3154372740558419, - "learning_rate": 0.0023218111812739447, - "loss": 1.4028, - "step": 4101 - }, - { - "epoch": 0.3155141912160603, - "learning_rate": 0.0023215079344680116, - "loss": 1.4083, - "step": 4102 - }, - { - "epoch": 0.31559110837627874, - "learning_rate": 0.0023212046396933737, - "loss": 1.3346, - "step": 4103 - }, - { - "epoch": 0.3156680255364972, - "learning_rate": 0.002320901296967741, - "loss": 0.8614, - "step": 4104 - }, - { - "epoch": 0.31574494269671566, - "learning_rate": 0.002320597906308826, - "loss": 1.1192, - "step": 4105 - }, - { - "epoch": 0.31582185985693406, - "learning_rate": 0.0023202944677343444, - "loss": 1.0291, - "step": 4106 - }, - { - "epoch": 0.3158987770171525, - "learning_rate": 0.0023199909812620136, - "loss": 1.2566, - "step": 4107 - }, - { - "epoch": 0.315975694177371, - "learning_rate": 0.002319687446909555, - "loss": 1.0361, - "step": 4108 - }, - { - "epoch": 0.31605261133758944, - "learning_rate": 0.0023193838646946917, - "loss": 1.1147, - "step": 4109 - }, - { - "epoch": 0.31612952849780784, - "learning_rate": 0.0023190802346351514, - "loss": 1.281, - "step": 4110 - }, - { - "epoch": 0.3162064456580263, - "learning_rate": 0.002318776556748662, - "loss": 1.2085, - "step": 4111 - }, - { - "epoch": 0.31628336281824476, - "learning_rate": 0.0023184728310529563, - "loss": 1.219, - "step": 4112 - }, - { - "epoch": 0.3163602799784632, - "learning_rate": 0.002318169057565769, - "loss": 1.1128, - "step": 4113 - }, - { - "epoch": 0.3164371971386816, - "learning_rate": 0.002317865236304838, - "loss": 1.2199, - "step": 4114 - }, - { - "epoch": 0.3165141142989001, - "learning_rate": 0.002317561367287903, - "loss": 0.9051, - "step": 4115 - }, - { - "epoch": 0.31659103145911854, - "learning_rate": 0.0023172574505327076, - "loss": 0.8508, - "step": 4116 - }, - { - "epoch": 0.31666794861933695, - "learning_rate": 0.0023169534860569985, - "loss": 1.1534, - "step": 4117 - }, - { - "epoch": 0.3167448657795554, - "learning_rate": 0.002316649473878524, - "loss": 1.2333, - "step": 4118 - }, - { - "epoch": 0.31682178293977387, - "learning_rate": 0.0023163454140150354, - "loss": 1.0667, - "step": 4119 - }, - { - "epoch": 0.3168987000999923, - "learning_rate": 0.002316041306484287, - "loss": 1.2747, - "step": 4120 - }, - { - "epoch": 0.31697561726021073, - "learning_rate": 0.0023157371513040366, - "loss": 1.0607, - "step": 4121 - }, - { - "epoch": 0.3170525344204292, - "learning_rate": 0.0023154329484920437, - "loss": 1.0513, - "step": 4122 - }, - { - "epoch": 0.31712945158064765, - "learning_rate": 0.002315128698066071, - "loss": 1.1036, - "step": 4123 - }, - { - "epoch": 0.3172063687408661, - "learning_rate": 0.0023148244000438844, - "loss": 1.3047, - "step": 4124 - }, - { - "epoch": 0.3172832859010845, - "learning_rate": 0.0023145200544432516, - "loss": 1.1692, - "step": 4125 - }, - { - "epoch": 0.31736020306130297, - "learning_rate": 0.0023142156612819442, - "loss": 1.297, - "step": 4126 - }, - { - "epoch": 0.31743712022152143, - "learning_rate": 0.0023139112205777354, - "loss": 0.9115, - "step": 4127 - }, - { - "epoch": 0.3175140373817399, - "learning_rate": 0.0023136067323484026, - "loss": 1.3012, - "step": 4128 - }, - { - "epoch": 0.3175909545419583, - "learning_rate": 0.002313302196611725, - "loss": 1.0477, - "step": 4129 - }, - { - "epoch": 0.31766787170217675, - "learning_rate": 0.0023129976133854837, - "loss": 0.9798, - "step": 4130 - }, - { - "epoch": 0.3177447888623952, - "learning_rate": 0.002312692982687465, - "loss": 1.22, - "step": 4131 - }, - { - "epoch": 0.31782170602261367, - "learning_rate": 0.002312388304535456, - "loss": 1.0055, - "step": 4132 - }, - { - "epoch": 0.3178986231828321, - "learning_rate": 0.0023120835789472477, - "loss": 1.3632, - "step": 4133 - }, - { - "epoch": 0.31797554034305053, - "learning_rate": 0.0023117788059406324, - "loss": 1.1977, - "step": 4134 - }, - { - "epoch": 0.318052457503269, - "learning_rate": 0.0023114739855334063, - "loss": 1.1121, - "step": 4135 - }, - { - "epoch": 0.3181293746634874, - "learning_rate": 0.002311169117743369, - "loss": 1.1742, - "step": 4136 - }, - { - "epoch": 0.31820629182370586, - "learning_rate": 0.0023108642025883213, - "loss": 1.1818, - "step": 4137 - }, - { - "epoch": 0.3182832089839243, - "learning_rate": 0.002310559240086068, - "loss": 1.237, - "step": 4138 - }, - { - "epoch": 0.3183601261441428, - "learning_rate": 0.0023102542302544157, - "loss": 1.0121, - "step": 4139 - }, - { - "epoch": 0.3184370433043612, - "learning_rate": 0.002309949173111174, - "loss": 1.2504, - "step": 4140 - }, - { - "epoch": 0.31851396046457964, - "learning_rate": 0.0023096440686741567, - "loss": 0.679, - "step": 4141 - }, - { - "epoch": 0.3185908776247981, - "learning_rate": 0.0023093389169611786, - "loss": 1.0136, - "step": 4142 - }, - { - "epoch": 0.31866779478501656, - "learning_rate": 0.002309033717990057, - "loss": 1.1826, - "step": 4143 - }, - { - "epoch": 0.31874471194523496, - "learning_rate": 0.002308728471778614, - "loss": 1.2552, - "step": 4144 - }, - { - "epoch": 0.3188216291054534, - "learning_rate": 0.0023084231783446723, - "loss": 1.3521, - "step": 4145 - }, - { - "epoch": 0.3188985462656719, - "learning_rate": 0.0023081178377060588, - "loss": 1.0351, - "step": 4146 - }, - { - "epoch": 0.31897546342589034, - "learning_rate": 0.0023078124498806026, - "loss": 1.1174, - "step": 4147 - }, - { - "epoch": 0.31905238058610874, - "learning_rate": 0.0023075070148861356, - "loss": 0.9617, - "step": 4148 - }, - { - "epoch": 0.3191292977463272, - "learning_rate": 0.0023072015327404924, - "loss": 1.0803, - "step": 4149 - }, - { - "epoch": 0.31920621490654566, - "learning_rate": 0.0023068960034615107, - "loss": 1.105, - "step": 4150 - }, - { - "epoch": 0.3192831320667641, - "learning_rate": 0.00230659042706703, - "loss": 0.7961, - "step": 4151 - }, - { - "epoch": 0.3193600492269825, - "learning_rate": 0.0023062848035748937, - "loss": 1.0708, - "step": 4152 - }, - { - "epoch": 0.319436966387201, - "learning_rate": 0.0023059791330029477, - "loss": 1.3105, - "step": 4153 - }, - { - "epoch": 0.31951388354741944, - "learning_rate": 0.0023056734153690398, - "loss": 1.6109, - "step": 4154 - }, - { - "epoch": 0.3195908007076379, - "learning_rate": 0.002305367650691022, - "loss": 1.0377, - "step": 4155 - }, - { - "epoch": 0.3196677178678563, - "learning_rate": 0.0023050618389867474, - "loss": 1.045, - "step": 4156 - }, - { - "epoch": 0.31974463502807476, - "learning_rate": 0.002304755980274073, - "loss": 0.939, - "step": 4157 - }, - { - "epoch": 0.3198215521882932, - "learning_rate": 0.0023044500745708586, - "loss": 0.9277, - "step": 4158 - }, - { - "epoch": 0.3198984693485116, - "learning_rate": 0.0023041441218949656, - "loss": 1.1165, - "step": 4159 - }, - { - "epoch": 0.3199753865087301, - "learning_rate": 0.0023038381222642594, - "loss": 1.1278, - "step": 4160 - }, - { - "epoch": 0.32005230366894855, - "learning_rate": 0.0023035320756966075, - "loss": 1.3572, - "step": 4161 - }, - { - "epoch": 0.320129220829167, - "learning_rate": 0.00230322598220988, - "loss": 1.1209, - "step": 4162 - }, - { - "epoch": 0.3202061379893854, - "learning_rate": 0.0023029198418219507, - "loss": 1.0094, - "step": 4163 - }, - { - "epoch": 0.32028305514960387, - "learning_rate": 0.002302613654550695, - "loss": 1.3123, - "step": 4164 - }, - { - "epoch": 0.3203599723098223, - "learning_rate": 0.0023023074204139913, - "loss": 1.2515, - "step": 4165 - }, - { - "epoch": 0.3204368894700408, - "learning_rate": 0.0023020011394297214, - "loss": 1.5766, - "step": 4166 - }, - { - "epoch": 0.3205138066302592, - "learning_rate": 0.002301694811615769, - "loss": 0.941, - "step": 4167 - }, - { - "epoch": 0.32059072379047765, - "learning_rate": 0.002301388436990021, - "loss": 1.3693, - "step": 4168 - }, - { - "epoch": 0.3206676409506961, - "learning_rate": 0.002301082015570367, - "loss": 1.0084, - "step": 4169 - }, - { - "epoch": 0.32074455811091457, - "learning_rate": 0.002300775547374699, - "loss": 1.3284, - "step": 4170 - }, - { - "epoch": 0.32082147527113297, - "learning_rate": 0.002300469032420913, - "loss": 1.178, - "step": 4171 - }, - { - "epoch": 0.32089839243135143, - "learning_rate": 0.0023001624707269055, - "loss": 1.3273, - "step": 4172 - }, - { - "epoch": 0.3209753095915699, - "learning_rate": 0.002299855862310577, - "loss": 1.3236, - "step": 4173 - }, - { - "epoch": 0.32105222675178835, - "learning_rate": 0.0022995492071898323, - "loss": 1.0124, - "step": 4174 - }, - { - "epoch": 0.32112914391200675, - "learning_rate": 0.002299242505382575, - "loss": 1.075, - "step": 4175 - }, - { - "epoch": 0.3212060610722252, - "learning_rate": 0.002298935756906715, - "loss": 1.2145, - "step": 4176 - }, - { - "epoch": 0.3212829782324437, - "learning_rate": 0.0022986289617801642, - "loss": 1.1242, - "step": 4177 - }, - { - "epoch": 0.3213598953926621, - "learning_rate": 0.002298322120020836, - "loss": 1.0867, - "step": 4178 - }, - { - "epoch": 0.32143681255288054, - "learning_rate": 0.0022980152316466463, - "loss": 0.7148, - "step": 4179 - }, - { - "epoch": 0.321513729713099, - "learning_rate": 0.002297708296675516, - "loss": 1.2394, - "step": 4180 - }, - { - "epoch": 0.32159064687331745, - "learning_rate": 0.002297401315125367, - "loss": 1.2044, - "step": 4181 - }, - { - "epoch": 0.32166756403353586, - "learning_rate": 0.0022970942870141242, - "loss": 0.8243, - "step": 4182 - }, - { - "epoch": 0.3217444811937543, - "learning_rate": 0.002296787212359715, - "loss": 1.3398, - "step": 4183 - }, - { - "epoch": 0.3218213983539728, - "learning_rate": 0.0022964800911800705, - "loss": 1.2309, - "step": 4184 - }, - { - "epoch": 0.32189831551419124, - "learning_rate": 0.002296172923493123, - "loss": 1.1032, - "step": 4185 - }, - { - "epoch": 0.32197523267440964, - "learning_rate": 0.002295865709316809, - "loss": 1.1799, - "step": 4186 - }, - { - "epoch": 0.3220521498346281, - "learning_rate": 0.002295558448669067, - "loss": 1.0306, - "step": 4187 - }, - { - "epoch": 0.32212906699484656, - "learning_rate": 0.0022952511415678376, - "loss": 0.8425, - "step": 4188 - }, - { - "epoch": 0.322205984155065, - "learning_rate": 0.002294943788031065, - "loss": 0.9645, - "step": 4189 - }, - { - "epoch": 0.3222829013152834, - "learning_rate": 0.002294636388076697, - "loss": 1.2359, - "step": 4190 - }, - { - "epoch": 0.3223598184755019, - "learning_rate": 0.0022943289417226816, - "loss": 0.9698, - "step": 4191 - }, - { - "epoch": 0.32243673563572034, - "learning_rate": 0.0022940214489869716, - "loss": 1.5443, - "step": 4192 - }, - { - "epoch": 0.3225136527959388, - "learning_rate": 0.002293713909887522, - "loss": 1.3285, - "step": 4193 - }, - { - "epoch": 0.3225905699561572, - "learning_rate": 0.00229340632444229, - "loss": 1.0976, - "step": 4194 - }, - { - "epoch": 0.32266748711637566, - "learning_rate": 0.002293098692669235, - "loss": 1.0602, - "step": 4195 - }, - { - "epoch": 0.3227444042765941, - "learning_rate": 0.0022927910145863215, - "loss": 0.9043, - "step": 4196 - }, - { - "epoch": 0.3228213214368126, - "learning_rate": 0.002292483290211514, - "loss": 1.242, - "step": 4197 - }, - { - "epoch": 0.322898238597031, - "learning_rate": 0.0022921755195627817, - "loss": 1.4223, - "step": 4198 - }, - { - "epoch": 0.32297515575724944, - "learning_rate": 0.0022918677026580953, - "loss": 1.2745, - "step": 4199 - }, - { - "epoch": 0.3230520729174679, - "learning_rate": 0.002291559839515428, - "loss": 1.0063, - "step": 4200 - }, - { - "epoch": 0.3231289900776863, - "learning_rate": 0.0022912519301527576, - "loss": 1.0889, - "step": 4201 - }, - { - "epoch": 0.32320590723790477, - "learning_rate": 0.002290943974588061, - "loss": 0.9462, - "step": 4202 - }, - { - "epoch": 0.3232828243981232, - "learning_rate": 0.0022906359728393226, - "loss": 1.152, - "step": 4203 - }, - { - "epoch": 0.3233597415583417, - "learning_rate": 0.002290327924924525, - "loss": 1.0191, - "step": 4204 - }, - { - "epoch": 0.3234366587185601, - "learning_rate": 0.0022900198308616567, - "loss": 0.9533, - "step": 4205 - }, - { - "epoch": 0.32351357587877855, - "learning_rate": 0.002289711690668707, - "loss": 1.4973, - "step": 4206 - }, - { - "epoch": 0.323590493038997, - "learning_rate": 0.002289403504363669, - "loss": 1.248, - "step": 4207 - }, - { - "epoch": 0.32366741019921547, - "learning_rate": 0.002289095271964537, - "loss": 1.2784, - "step": 4208 - }, - { - "epoch": 0.32374432735943387, - "learning_rate": 0.00228878699348931, - "loss": 1.163, - "step": 4209 - }, - { - "epoch": 0.32382124451965233, - "learning_rate": 0.0022884786689559887, - "loss": 1.165, - "step": 4210 - }, - { - "epoch": 0.3238981616798708, - "learning_rate": 0.0022881702983825756, - "loss": 1.1404, - "step": 4211 - }, - { - "epoch": 0.32397507884008925, - "learning_rate": 0.002287861881787078, - "loss": 1.1549, - "step": 4212 - }, - { - "epoch": 0.32405199600030765, - "learning_rate": 0.002287553419187503, - "loss": 1.3102, - "step": 4213 - }, - { - "epoch": 0.3241289131605261, - "learning_rate": 0.0022872449106018642, - "loss": 1.3165, - "step": 4214 - }, - { - "epoch": 0.32420583032074457, - "learning_rate": 0.0022869363560481743, - "loss": 0.9043, - "step": 4215 - }, - { - "epoch": 0.32428274748096303, - "learning_rate": 0.00228662775554445, - "loss": 1.0413, - "step": 4216 - }, - { - "epoch": 0.32435966464118143, - "learning_rate": 0.0022863191091087123, - "loss": 1.1427, - "step": 4217 - }, - { - "epoch": 0.3244365818013999, - "learning_rate": 0.0022860104167589813, - "loss": 1.1331, - "step": 4218 - }, - { - "epoch": 0.32451349896161835, - "learning_rate": 0.002285701678513283, - "loss": 1.4227, - "step": 4219 - }, - { - "epoch": 0.32459041612183676, - "learning_rate": 0.0022853928943896455, - "loss": 1.3405, - "step": 4220 - }, - { - "epoch": 0.3246673332820552, - "learning_rate": 0.0022850840644060982, - "loss": 1.2141, - "step": 4221 - }, - { - "epoch": 0.3247442504422737, - "learning_rate": 0.002284775188580674, - "loss": 0.99, - "step": 4222 - }, - { - "epoch": 0.32482116760249213, - "learning_rate": 0.0022844662669314086, - "loss": 1.253, - "step": 4223 - }, - { - "epoch": 0.32489808476271054, - "learning_rate": 0.00228415729947634, - "loss": 1.3627, - "step": 4224 - }, - { - "epoch": 0.324975001922929, - "learning_rate": 0.0022838482862335098, - "loss": 1.1932, - "step": 4225 - }, - { - "epoch": 0.32505191908314746, - "learning_rate": 0.002283539227220961, - "loss": 1.1084, - "step": 4226 - }, - { - "epoch": 0.3251288362433659, - "learning_rate": 0.0022832301224567407, - "loss": 0.7417, - "step": 4227 - }, - { - "epoch": 0.3252057534035843, - "learning_rate": 0.0022829209719588974, - "loss": 1.1925, - "step": 4228 - }, - { - "epoch": 0.3252826705638028, - "learning_rate": 0.0022826117757454817, - "loss": 1.0792, - "step": 4229 - }, - { - "epoch": 0.32535958772402124, - "learning_rate": 0.0022823025338345494, - "loss": 0.8884, - "step": 4230 - }, - { - "epoch": 0.3254365048842397, - "learning_rate": 0.0022819932462441565, - "loss": 1.1749, - "step": 4231 - }, - { - "epoch": 0.3255134220444581, - "learning_rate": 0.002281683912992364, - "loss": 0.7102, - "step": 4232 - }, - { - "epoch": 0.32559033920467656, - "learning_rate": 0.0022813745340972322, - "loss": 1.4558, - "step": 4233 - }, - { - "epoch": 0.325667256364895, - "learning_rate": 0.002281065109576827, - "loss": 0.8879, - "step": 4234 - }, - { - "epoch": 0.3257441735251135, - "learning_rate": 0.002280755639449216, - "loss": 0.9001, - "step": 4235 - }, - { - "epoch": 0.3258210906853319, - "learning_rate": 0.0022804461237324702, - "loss": 1.4333, - "step": 4236 - }, - { - "epoch": 0.32589800784555034, - "learning_rate": 0.0022801365624446615, - "loss": 1.1534, - "step": 4237 - }, - { - "epoch": 0.3259749250057688, - "learning_rate": 0.0022798269556038656, - "loss": 0.814, - "step": 4238 - }, - { - "epoch": 0.3260518421659872, - "learning_rate": 0.0022795173032281618, - "loss": 1.3223, - "step": 4239 - }, - { - "epoch": 0.32612875932620566, - "learning_rate": 0.0022792076053356294, - "loss": 0.7201, - "step": 4240 - }, - { - "epoch": 0.3262056764864241, - "learning_rate": 0.002278897861944353, - "loss": 0.9512, - "step": 4241 - }, - { - "epoch": 0.3262825936466426, - "learning_rate": 0.002278588073072419, - "loss": 1.002, - "step": 4242 - }, - { - "epoch": 0.326359510806861, - "learning_rate": 0.0022782782387379165, - "loss": 1.1497, - "step": 4243 - }, - { - "epoch": 0.32643642796707945, - "learning_rate": 0.002277968358958936, - "loss": 0.7548, - "step": 4244 - }, - { - "epoch": 0.3265133451272979, - "learning_rate": 0.0022776584337535714, - "loss": 0.9711, - "step": 4245 - }, - { - "epoch": 0.32659026228751636, - "learning_rate": 0.0022773484631399216, - "loss": 1.067, - "step": 4246 - }, - { - "epoch": 0.32666717944773477, - "learning_rate": 0.0022770384471360846, - "loss": 0.9141, - "step": 4247 - }, - { - "epoch": 0.3267440966079532, - "learning_rate": 0.0022767283857601625, - "loss": 1.3968, - "step": 4248 - }, - { - "epoch": 0.3268210137681717, - "learning_rate": 0.002276418279030261, - "loss": 0.8375, - "step": 4249 - }, - { - "epoch": 0.32689793092839015, - "learning_rate": 0.0022761081269644864, - "loss": 1.1958, - "step": 4250 - }, - { - "epoch": 0.32697484808860855, - "learning_rate": 0.0022757979295809495, - "loss": 1.0153, - "step": 4251 - }, - { - "epoch": 0.327051765248827, - "learning_rate": 0.0022754876868977636, - "loss": 1.0296, - "step": 4252 - }, - { - "epoch": 0.32712868240904547, - "learning_rate": 0.002275177398933043, - "loss": 1.0223, - "step": 4253 - }, - { - "epoch": 0.3272055995692639, - "learning_rate": 0.0022748670657049062, - "loss": 1.2272, - "step": 4254 - }, - { - "epoch": 0.32728251672948233, - "learning_rate": 0.002274556687231474, - "loss": 1.0495, - "step": 4255 - }, - { - "epoch": 0.3273594338897008, - "learning_rate": 0.00227424626353087, - "loss": 1.1607, - "step": 4256 - }, - { - "epoch": 0.32743635104991925, - "learning_rate": 0.002273935794621219, - "loss": 1.1606, - "step": 4257 - }, - { - "epoch": 0.3275132682101377, - "learning_rate": 0.002273625280520651, - "loss": 1.4155, - "step": 4258 - }, - { - "epoch": 0.3275901853703561, - "learning_rate": 0.0022733147212472965, - "loss": 1.1348, - "step": 4259 - }, - { - "epoch": 0.32766710253057457, - "learning_rate": 0.0022730041168192894, - "loss": 1.1552, - "step": 4260 - }, - { - "epoch": 0.32774401969079303, - "learning_rate": 0.0022726934672547662, - "loss": 1.3827, - "step": 4261 - }, - { - "epoch": 0.32782093685101144, - "learning_rate": 0.002272382772571866, - "loss": 1.7051, - "step": 4262 - }, - { - "epoch": 0.3278978540112299, - "learning_rate": 0.0022720720327887313, - "loss": 1.2682, - "step": 4263 - }, - { - "epoch": 0.32797477117144835, - "learning_rate": 0.0022717612479235053, - "loss": 1.2746, - "step": 4264 - }, - { - "epoch": 0.3280516883316668, - "learning_rate": 0.0022714504179943366, - "loss": 1.46, - "step": 4265 - }, - { - "epoch": 0.3281286054918852, - "learning_rate": 0.002271139543019373, - "loss": 1.0588, - "step": 4266 - }, - { - "epoch": 0.3282055226521037, - "learning_rate": 0.0022708286230167687, - "loss": 1.1343, - "step": 4267 - }, - { - "epoch": 0.32828243981232214, - "learning_rate": 0.0022705176580046773, - "loss": 1.0675, - "step": 4268 - }, - { - "epoch": 0.3283593569725406, - "learning_rate": 0.002270206648001257, - "loss": 1.0478, - "step": 4269 - }, - { - "epoch": 0.328436274132759, - "learning_rate": 0.002269895593024668, - "loss": 1.1152, - "step": 4270 - }, - { - "epoch": 0.32851319129297746, - "learning_rate": 0.0022695844930930727, - "loss": 1.1935, - "step": 4271 - }, - { - "epoch": 0.3285901084531959, - "learning_rate": 0.0022692733482246367, - "loss": 1.316, - "step": 4272 - }, - { - "epoch": 0.3286670256134144, - "learning_rate": 0.0022689621584375285, - "loss": 1.1137, - "step": 4273 - }, - { - "epoch": 0.3287439427736328, - "learning_rate": 0.0022686509237499182, - "loss": 0.7445, - "step": 4274 - }, - { - "epoch": 0.32882085993385124, - "learning_rate": 0.0022683396441799787, - "loss": 1.191, - "step": 4275 - }, - { - "epoch": 0.3288977770940697, - "learning_rate": 0.002268028319745888, - "loss": 1.276, - "step": 4276 - }, - { - "epoch": 0.32897469425428816, - "learning_rate": 0.0022677169504658223, - "loss": 0.9966, - "step": 4277 - }, - { - "epoch": 0.32905161141450656, - "learning_rate": 0.0022674055363579647, - "loss": 1.2092, - "step": 4278 - }, - { - "epoch": 0.329128528574725, - "learning_rate": 0.0022670940774404975, - "loss": 1.024, - "step": 4279 - }, - { - "epoch": 0.3292054457349435, - "learning_rate": 0.002266782573731607, - "loss": 0.9996, - "step": 4280 - }, - { - "epoch": 0.3292823628951619, - "learning_rate": 0.002266471025249484, - "loss": 1.3885, - "step": 4281 - }, - { - "epoch": 0.32935928005538034, - "learning_rate": 0.0022661594320123185, - "loss": 0.7952, - "step": 4282 - }, - { - "epoch": 0.3294361972155988, - "learning_rate": 0.0022658477940383057, - "loss": 1.1963, - "step": 4283 - }, - { - "epoch": 0.32951311437581726, - "learning_rate": 0.0022655361113456418, - "loss": 1.2864, - "step": 4284 - }, - { - "epoch": 0.32959003153603567, - "learning_rate": 0.0022652243839525266, - "loss": 1.033, - "step": 4285 - }, - { - "epoch": 0.3296669486962541, - "learning_rate": 0.002264912611877162, - "loss": 1.2902, - "step": 4286 - }, - { - "epoch": 0.3297438658564726, - "learning_rate": 0.002264600795137753, - "loss": 1.088, - "step": 4287 - }, - { - "epoch": 0.32982078301669104, - "learning_rate": 0.002264288933752506, - "loss": 1.2543, - "step": 4288 - }, - { - "epoch": 0.32989770017690945, - "learning_rate": 0.0022639770277396326, - "loss": 1.0899, - "step": 4289 - }, - { - "epoch": 0.3299746173371279, - "learning_rate": 0.002263665077117344, - "loss": 1.0596, - "step": 4290 - }, - { - "epoch": 0.33005153449734637, - "learning_rate": 0.0022633530819038557, - "loss": 1.5231, - "step": 4291 - }, - { - "epoch": 0.3301284516575648, - "learning_rate": 0.0022630410421173855, - "loss": 0.9046, - "step": 4292 - }, - { - "epoch": 0.33020536881778323, - "learning_rate": 0.002262728957776153, - "loss": 1.118, - "step": 4293 - }, - { - "epoch": 0.3302822859780017, - "learning_rate": 0.0022624168288983824, - "loss": 1.0512, - "step": 4294 - }, - { - "epoch": 0.33035920313822015, - "learning_rate": 0.0022621046555022983, - "loss": 0.9748, - "step": 4295 - }, - { - "epoch": 0.3304361202984386, - "learning_rate": 0.002261792437606129, - "loss": 1.0904, - "step": 4296 - }, - { - "epoch": 0.330513037458657, - "learning_rate": 0.002261480175228106, - "loss": 1.3217, - "step": 4297 - }, - { - "epoch": 0.33058995461887547, - "learning_rate": 0.0022611678683864614, - "loss": 1.1509, - "step": 4298 - }, - { - "epoch": 0.33066687177909393, - "learning_rate": 0.002260855517099432, - "loss": 1.2228, - "step": 4299 - }, - { - "epoch": 0.33074378893931233, - "learning_rate": 0.0022605431213852563, - "loss": 1.2046, - "step": 4300 - }, - { - "epoch": 0.3308207060995308, - "learning_rate": 0.0022602306812621745, - "loss": 1.298, - "step": 4301 - }, - { - "epoch": 0.33089762325974925, - "learning_rate": 0.002259918196748431, - "loss": 1.1577, - "step": 4302 - }, - { - "epoch": 0.3309745404199677, - "learning_rate": 0.002259605667862272, - "loss": 1.0975, - "step": 4303 - }, - { - "epoch": 0.3310514575801861, - "learning_rate": 0.0022592930946219465, - "loss": 1.2118, - "step": 4304 - }, - { - "epoch": 0.3311283747404046, - "learning_rate": 0.0022589804770457057, - "loss": 0.7915, - "step": 4305 - }, - { - "epoch": 0.33120529190062303, - "learning_rate": 0.0022586678151518044, - "loss": 1.1003, - "step": 4306 - }, - { - "epoch": 0.3312822090608415, - "learning_rate": 0.0022583551089584986, - "loss": 1.1668, - "step": 4307 - }, - { - "epoch": 0.3313591262210599, - "learning_rate": 0.0022580423584840476, - "loss": 0.9239, - "step": 4308 - }, - { - "epoch": 0.33143604338127836, - "learning_rate": 0.002257729563746713, - "loss": 1.1236, - "step": 4309 - }, - { - "epoch": 0.3315129605414968, - "learning_rate": 0.0022574167247647594, - "loss": 1.1646, - "step": 4310 - }, - { - "epoch": 0.3315898777017153, - "learning_rate": 0.002257103841556454, - "loss": 1.3413, - "step": 4311 - }, - { - "epoch": 0.3316667948619337, - "learning_rate": 0.0022567909141400664, - "loss": 1.2623, - "step": 4312 - }, - { - "epoch": 0.33174371202215214, - "learning_rate": 0.002256477942533869, - "loss": 1.1116, - "step": 4313 - }, - { - "epoch": 0.3318206291823706, - "learning_rate": 0.0022561649267561355, - "loss": 1.0319, - "step": 4314 - }, - { - "epoch": 0.33189754634258906, - "learning_rate": 0.0022558518668251443, - "loss": 0.8188, - "step": 4315 - }, - { - "epoch": 0.33197446350280746, - "learning_rate": 0.0022555387627591745, - "loss": 1.2136, - "step": 4316 - }, - { - "epoch": 0.3320513806630259, - "learning_rate": 0.0022552256145765086, - "loss": 1.2836, - "step": 4317 - }, - { - "epoch": 0.3321282978232444, - "learning_rate": 0.002254912422295433, - "loss": 1.3871, - "step": 4318 - }, - { - "epoch": 0.33220521498346284, - "learning_rate": 0.002254599185934233, - "loss": 1.4418, - "step": 4319 - }, - { - "epoch": 0.33228213214368124, - "learning_rate": 0.0022542859055112016, - "loss": 1.2481, - "step": 4320 - }, - { - "epoch": 0.3323590493038997, - "learning_rate": 0.0022539725810446294, - "loss": 1.0429, - "step": 4321 - }, - { - "epoch": 0.33243596646411816, - "learning_rate": 0.0022536592125528125, - "loss": 1.0748, - "step": 4322 - }, - { - "epoch": 0.33251288362433656, - "learning_rate": 0.0022533458000540485, - "loss": 1.3121, - "step": 4323 - }, - { - "epoch": 0.332589800784555, - "learning_rate": 0.0022530323435666385, - "loss": 1.3557, - "step": 4324 - }, - { - "epoch": 0.3326667179447735, - "learning_rate": 0.0022527188431088847, - "loss": 1.1272, - "step": 4325 - }, - { - "epoch": 0.33274363510499194, - "learning_rate": 0.002252405298699093, - "loss": 1.029, - "step": 4326 - }, - { - "epoch": 0.33282055226521035, - "learning_rate": 0.0022520917103555724, - "loss": 1.1077, - "step": 4327 - }, - { - "epoch": 0.3328974694254288, - "learning_rate": 0.0022517780780966327, - "loss": 1.3373, - "step": 4328 - }, - { - "epoch": 0.33297438658564726, - "learning_rate": 0.0022514644019405875, - "loss": 1.39, - "step": 4329 - }, - { - "epoch": 0.3330513037458657, - "learning_rate": 0.0022511506819057527, - "loss": 1.1247, - "step": 4330 - }, - { - "epoch": 0.3331282209060841, - "learning_rate": 0.0022508369180104476, - "loss": 1.3734, - "step": 4331 - }, - { - "epoch": 0.3332051380663026, - "learning_rate": 0.002250523110272991, - "loss": 1.5231, - "step": 4332 - }, - { - "epoch": 0.33328205522652105, - "learning_rate": 0.002250209258711709, - "loss": 1.0011, - "step": 4333 - }, - { - "epoch": 0.3333589723867395, - "learning_rate": 0.0022498953633449258, - "loss": 1.0763, - "step": 4334 - }, - { - "epoch": 0.3334358895469579, - "learning_rate": 0.0022495814241909715, - "loss": 1.0338, - "step": 4335 - }, - { - "epoch": 0.33351280670717637, - "learning_rate": 0.0022492674412681758, - "loss": 1.3041, - "step": 4336 - }, - { - "epoch": 0.3335897238673948, - "learning_rate": 0.002248953414594874, - "loss": 1.0705, - "step": 4337 - }, - { - "epoch": 0.3336666410276133, - "learning_rate": 0.002248639344189401, - "loss": 1.1476, - "step": 4338 - }, - { - "epoch": 0.3337435581878317, - "learning_rate": 0.002248325230070097, - "loss": 0.6311, - "step": 4339 - }, - { - "epoch": 0.33382047534805015, - "learning_rate": 0.002248011072255303, - "loss": 1.1914, - "step": 4340 - }, - { - "epoch": 0.3338973925082686, - "learning_rate": 0.002247696870763363, - "loss": 1.354, - "step": 4341 - }, - { - "epoch": 0.333974309668487, - "learning_rate": 0.0022473826256126234, - "loss": 1.2951, - "step": 4342 - }, - { - "epoch": 0.33405122682870547, - "learning_rate": 0.0022470683368214332, - "loss": 0.8793, - "step": 4343 - }, - { - "epoch": 0.33412814398892393, - "learning_rate": 0.0022467540044081445, - "loss": 0.9412, - "step": 4344 - }, - { - "epoch": 0.3342050611491424, - "learning_rate": 0.0022464396283911107, - "loss": 1.0309, - "step": 4345 - }, - { - "epoch": 0.3342819783093608, - "learning_rate": 0.0022461252087886898, - "loss": 1.2234, - "step": 4346 - }, - { - "epoch": 0.33435889546957925, - "learning_rate": 0.00224581074561924, - "loss": 1.4772, - "step": 4347 - }, - { - "epoch": 0.3344358126297977, - "learning_rate": 0.0022454962389011237, - "loss": 1.2051, - "step": 4348 - }, - { - "epoch": 0.3345127297900162, - "learning_rate": 0.0022451816886527045, - "loss": 1.0304, - "step": 4349 - }, - { - "epoch": 0.3345896469502346, - "learning_rate": 0.0022448670948923503, - "loss": 1.6065, - "step": 4350 - }, - { - "epoch": 0.33466656411045304, - "learning_rate": 0.00224455245763843, - "loss": 0.9958, - "step": 4351 - }, - { - "epoch": 0.3347434812706715, - "learning_rate": 0.0022442377769093156, - "loss": 1.2912, - "step": 4352 - }, - { - "epoch": 0.33482039843088995, - "learning_rate": 0.002243923052723382, - "loss": 1.1904, - "step": 4353 - }, - { - "epoch": 0.33489731559110836, - "learning_rate": 0.0022436082850990067, - "loss": 1.496, - "step": 4354 - }, - { - "epoch": 0.3349742327513268, - "learning_rate": 0.002243293474054568, - "loss": 1.3317, - "step": 4355 - }, - { - "epoch": 0.3350511499115453, - "learning_rate": 0.002242978619608449, - "loss": 1.3624, - "step": 4356 - }, - { - "epoch": 0.33512806707176374, - "learning_rate": 0.0022426637217790335, - "loss": 1.3946, - "step": 4357 - }, - { - "epoch": 0.33520498423198214, - "learning_rate": 0.0022423487805847096, - "loss": 0.9476, - "step": 4358 - }, - { - "epoch": 0.3352819013922006, - "learning_rate": 0.0022420337960438667, - "loss": 1.0602, - "step": 4359 - }, - { - "epoch": 0.33535881855241906, - "learning_rate": 0.002241718768174897, - "loss": 1.3431, - "step": 4360 - }, - { - "epoch": 0.33543573571263746, - "learning_rate": 0.0022414036969961963, - "loss": 1.1943, - "step": 4361 - }, - { - "epoch": 0.3355126528728559, - "learning_rate": 0.00224108858252616, - "loss": 0.9365, - "step": 4362 - }, - { - "epoch": 0.3355895700330744, - "learning_rate": 0.0022407734247831897, - "loss": 1.1168, - "step": 4363 - }, - { - "epoch": 0.33566648719329284, - "learning_rate": 0.0022404582237856865, - "loss": 1.6628, - "step": 4364 - }, - { - "epoch": 0.33574340435351124, - "learning_rate": 0.0022401429795520563, - "loss": 1.1019, - "step": 4365 - }, - { - "epoch": 0.3358203215137297, - "learning_rate": 0.0022398276921007064, - "loss": 0.9463, - "step": 4366 - }, - { - "epoch": 0.33589723867394816, - "learning_rate": 0.002239512361450046, - "loss": 1.6078, - "step": 4367 - }, - { - "epoch": 0.3359741558341666, - "learning_rate": 0.002239196987618489, - "loss": 1.3525, - "step": 4368 - }, - { - "epoch": 0.336051072994385, - "learning_rate": 0.0022388815706244486, - "loss": 1.2302, - "step": 4369 - }, - { - "epoch": 0.3361279901546035, - "learning_rate": 0.002238566110486343, - "loss": 1.0367, - "step": 4370 - }, - { - "epoch": 0.33620490731482194, - "learning_rate": 0.0022382506072225936, - "loss": 1.1865, - "step": 4371 - }, - { - "epoch": 0.3362818244750404, - "learning_rate": 0.002237935060851621, - "loss": 1.2263, - "step": 4372 - }, - { - "epoch": 0.3363587416352588, - "learning_rate": 0.0022376194713918515, - "loss": 1.2354, - "step": 4373 - }, - { - "epoch": 0.33643565879547727, - "learning_rate": 0.0022373038388617122, - "loss": 0.9629, - "step": 4374 - }, - { - "epoch": 0.3365125759556957, - "learning_rate": 0.002236988163279634, - "loss": 1.2289, - "step": 4375 - }, - { - "epoch": 0.3365894931159142, - "learning_rate": 0.0022366724446640476, - "loss": 1.3058, - "step": 4376 - }, - { - "epoch": 0.3366664102761326, - "learning_rate": 0.0022363566830333904, - "loss": 1.0201, - "step": 4377 - }, - { - "epoch": 0.33674332743635105, - "learning_rate": 0.0022360408784060986, - "loss": 0.8795, - "step": 4378 - }, - { - "epoch": 0.3368202445965695, - "learning_rate": 0.002235725030800613, - "loss": 1.0216, - "step": 4379 - }, - { - "epoch": 0.33689716175678797, - "learning_rate": 0.002235409140235376, - "loss": 0.9301, - "step": 4380 - }, - { - "epoch": 0.33697407891700637, - "learning_rate": 0.002235093206728833, - "loss": 1.1659, - "step": 4381 - }, - { - "epoch": 0.33705099607722483, - "learning_rate": 0.0022347772302994317, - "loss": 1.1632, - "step": 4382 - }, - { - "epoch": 0.3371279132374433, - "learning_rate": 0.002234461210965622, - "loss": 1.0076, - "step": 4383 - }, - { - "epoch": 0.3372048303976617, - "learning_rate": 0.002234145148745857, - "loss": 1.1557, - "step": 4384 - }, - { - "epoch": 0.33728174755788015, - "learning_rate": 0.002233829043658591, - "loss": 1.2187, - "step": 4385 - }, - { - "epoch": 0.3373586647180986, - "learning_rate": 0.0022335128957222832, - "loss": 1.0021, - "step": 4386 - }, - { - "epoch": 0.33743558187831707, - "learning_rate": 0.002233196704955392, - "loss": 0.8605, - "step": 4387 - }, - { - "epoch": 0.3375124990385355, - "learning_rate": 0.002232880471376382, - "loss": 0.8381, - "step": 4388 - }, - { - "epoch": 0.33758941619875393, - "learning_rate": 0.002232564195003717, - "loss": 1.1981, - "step": 4389 - }, - { - "epoch": 0.3376663333589724, - "learning_rate": 0.0022322478758558654, - "loss": 0.9065, - "step": 4390 - }, - { - "epoch": 0.33774325051919085, - "learning_rate": 0.002231931513951297, - "loss": 0.9244, - "step": 4391 - }, - { - "epoch": 0.33782016767940926, - "learning_rate": 0.0022316151093084847, - "loss": 1.1886, - "step": 4392 - }, - { - "epoch": 0.3378970848396277, - "learning_rate": 0.002231298661945904, - "loss": 1.2158, - "step": 4393 - }, - { - "epoch": 0.3379740019998462, - "learning_rate": 0.002230982171882032, - "loss": 1.2558, - "step": 4394 - }, - { - "epoch": 0.33805091916006463, - "learning_rate": 0.0022306656391353494, - "loss": 1.4155, - "step": 4395 - }, - { - "epoch": 0.33812783632028304, - "learning_rate": 0.0022303490637243383, - "loss": 1.0438, - "step": 4396 - }, - { - "epoch": 0.3382047534805015, - "learning_rate": 0.0022300324456674845, - "loss": 1.3169, - "step": 4397 - }, - { - "epoch": 0.33828167064071996, - "learning_rate": 0.002229715784983275, - "loss": 0.7401, - "step": 4398 - }, - { - "epoch": 0.3383585878009384, - "learning_rate": 0.0022293990816902006, - "loss": 1.0843, - "step": 4399 - }, - { - "epoch": 0.3384355049611568, - "learning_rate": 0.0022290823358067542, - "loss": 1.296, - "step": 4400 - }, - { - "epoch": 0.3385124221213753, - "learning_rate": 0.0022287655473514295, - "loss": 1.1755, - "step": 4401 - }, - { - "epoch": 0.33858933928159374, - "learning_rate": 0.0022284487163427256, - "loss": 1.2204, - "step": 4402 - }, - { - "epoch": 0.33866625644181214, - "learning_rate": 0.0022281318427991416, - "loss": 1.1812, - "step": 4403 - }, - { - "epoch": 0.3387431736020306, - "learning_rate": 0.0022278149267391805, - "loss": 1.5273, - "step": 4404 - }, - { - "epoch": 0.33882009076224906, - "learning_rate": 0.0022274979681813473, - "loss": 1.298, - "step": 4405 - }, - { - "epoch": 0.3388970079224675, - "learning_rate": 0.00222718096714415, - "loss": 1.2752, - "step": 4406 - }, - { - "epoch": 0.3389739250826859, - "learning_rate": 0.0022268639236460975, - "loss": 1.1704, - "step": 4407 - }, - { - "epoch": 0.3390508422429044, - "learning_rate": 0.0022265468377057037, - "loss": 0.9175, - "step": 4408 - }, - { - "epoch": 0.33912775940312284, - "learning_rate": 0.0022262297093414824, - "loss": 1.1228, - "step": 4409 - }, - { - "epoch": 0.3392046765633413, - "learning_rate": 0.0022259125385719514, - "loss": 1.1993, - "step": 4410 - }, - { - "epoch": 0.3392815937235597, - "learning_rate": 0.002225595325415631, - "loss": 0.747, - "step": 4411 - }, - { - "epoch": 0.33935851088377816, - "learning_rate": 0.0022252780698910433, - "loss": 1.2871, - "step": 4412 - }, - { - "epoch": 0.3394354280439966, - "learning_rate": 0.002224960772016713, - "loss": 1.3381, - "step": 4413 - }, - { - "epoch": 0.3395123452042151, - "learning_rate": 0.0022246434318111685, - "loss": 1.0126, - "step": 4414 - }, - { - "epoch": 0.3395892623644335, - "learning_rate": 0.0022243260492929378, - "loss": 1.4034, - "step": 4415 - }, - { - "epoch": 0.33966617952465195, - "learning_rate": 0.0022240086244805545, - "loss": 1.2425, - "step": 4416 - }, - { - "epoch": 0.3397430966848704, - "learning_rate": 0.0022236911573925533, - "loss": 1.7546, - "step": 4417 - }, - { - "epoch": 0.33982001384508886, - "learning_rate": 0.002223373648047471, - "loss": 1.0383, - "step": 4418 - }, - { - "epoch": 0.33989693100530727, - "learning_rate": 0.002223056096463848, - "loss": 1.3741, - "step": 4419 - }, - { - "epoch": 0.3399738481655257, - "learning_rate": 0.0022227385026602252, - "loss": 0.7326, - "step": 4420 - }, - { - "epoch": 0.3400507653257442, - "learning_rate": 0.0022224208666551484, - "loss": 1.4531, - "step": 4421 - }, - { - "epoch": 0.34012768248596265, - "learning_rate": 0.0022221031884671646, - "loss": 1.0761, - "step": 4422 - }, - { - "epoch": 0.34020459964618105, - "learning_rate": 0.002221785468114823, - "loss": 1.086, - "step": 4423 - }, - { - "epoch": 0.3402815168063995, - "learning_rate": 0.002221467705616675, - "loss": 1.0769, - "step": 4424 - }, - { - "epoch": 0.34035843396661797, - "learning_rate": 0.0022211499009912763, - "loss": 1.2885, - "step": 4425 - }, - { - "epoch": 0.34043535112683637, - "learning_rate": 0.0022208320542571837, - "loss": 1.1621, - "step": 4426 - }, - { - "epoch": 0.34051226828705483, - "learning_rate": 0.0022205141654329554, - "loss": 1.306, - "step": 4427 - }, - { - "epoch": 0.3405891854472733, - "learning_rate": 0.0022201962345371543, - "loss": 1.0948, - "step": 4428 - }, - { - "epoch": 0.34066610260749175, - "learning_rate": 0.0022198782615883445, - "loss": 1.179, - "step": 4429 - }, - { - "epoch": 0.34074301976771015, - "learning_rate": 0.0022195602466050933, - "loss": 1.0467, - "step": 4430 - }, - { - "epoch": 0.3408199369279286, - "learning_rate": 0.002219242189605969, - "loss": 1.0063, - "step": 4431 - }, - { - "epoch": 0.34089685408814707, - "learning_rate": 0.002218924090609544, - "loss": 0.9891, - "step": 4432 - }, - { - "epoch": 0.34097377124836553, - "learning_rate": 0.0022186059496343913, - "loss": 1.3963, - "step": 4433 - }, - { - "epoch": 0.34105068840858394, - "learning_rate": 0.0022182877666990885, - "loss": 1.4076, - "step": 4434 - }, - { - "epoch": 0.3411276055688024, - "learning_rate": 0.002217969541822215, - "loss": 1.2832, - "step": 4435 - }, - { - "epoch": 0.34120452272902085, - "learning_rate": 0.002217651275022352, - "loss": 1.2185, - "step": 4436 - }, - { - "epoch": 0.3412814398892393, - "learning_rate": 0.002217332966318082, - "loss": 1.4446, - "step": 4437 - }, - { - "epoch": 0.3413583570494577, - "learning_rate": 0.0022170146157279927, - "loss": 1.1896, - "step": 4438 - }, - { - "epoch": 0.3414352742096762, - "learning_rate": 0.0022166962232706734, - "loss": 1.1249, - "step": 4439 - }, - { - "epoch": 0.34151219136989464, - "learning_rate": 0.002216377788964714, - "loss": 1.2731, - "step": 4440 - }, - { - "epoch": 0.3415891085301131, - "learning_rate": 0.00221605931282871, - "loss": 1.2442, - "step": 4441 - }, - { - "epoch": 0.3416660256903315, - "learning_rate": 0.0022157407948812558, - "loss": 1.1938, - "step": 4442 - }, - { - "epoch": 0.34174294285054996, - "learning_rate": 0.0022154222351409504, - "loss": 0.8131, - "step": 4443 - }, - { - "epoch": 0.3418198600107684, - "learning_rate": 0.002215103633626395, - "loss": 1.3119, - "step": 4444 - }, - { - "epoch": 0.3418967771709868, - "learning_rate": 0.0022147849903561933, - "loss": 1.3002, - "step": 4445 - }, - { - "epoch": 0.3419736943312053, - "learning_rate": 0.002214466305348952, - "loss": 1.6118, - "step": 4446 - }, - { - "epoch": 0.34205061149142374, - "learning_rate": 0.0022141475786232774, - "loss": 1.2568, - "step": 4447 - }, - { - "epoch": 0.3421275286516422, - "learning_rate": 0.0022138288101977814, - "loss": 1.3951, - "step": 4448 - }, - { - "epoch": 0.3422044458118606, - "learning_rate": 0.002213510000091078, - "loss": 1.1839, - "step": 4449 - }, - { - "epoch": 0.34228136297207906, - "learning_rate": 0.0022131911483217817, - "loss": 0.766, - "step": 4450 - }, - { - "epoch": 0.3423582801322975, - "learning_rate": 0.002212872254908511, - "loss": 1.0955, - "step": 4451 - }, - { - "epoch": 0.342435197292516, - "learning_rate": 0.002212553319869886, - "loss": 1.0926, - "step": 4452 - }, - { - "epoch": 0.3425121144527344, - "learning_rate": 0.002212234343224531, - "loss": 1.3951, - "step": 4453 - }, - { - "epoch": 0.34258903161295284, - "learning_rate": 0.0022119153249910697, - "loss": 1.2506, - "step": 4454 - }, - { - "epoch": 0.3426659487731713, - "learning_rate": 0.0022115962651881307, - "loss": 1.0962, - "step": 4455 - }, - { - "epoch": 0.34274286593338976, - "learning_rate": 0.0022112771638343443, - "loss": 0.963, - "step": 4456 - }, - { - "epoch": 0.34281978309360817, - "learning_rate": 0.002210958020948343, - "loss": 1.0749, - "step": 4457 - }, - { - "epoch": 0.3428967002538266, - "learning_rate": 0.002210638836548762, - "loss": 0.9101, - "step": 4458 - }, - { - "epoch": 0.3429736174140451, - "learning_rate": 0.0022103196106542386, - "loss": 0.7482, - "step": 4459 - }, - { - "epoch": 0.34305053457426354, - "learning_rate": 0.0022100003432834127, - "loss": 1.1059, - "step": 4460 - }, - { - "epoch": 0.34312745173448195, - "learning_rate": 0.0022096810344549274, - "loss": 1.025, - "step": 4461 - }, - { - "epoch": 0.3432043688947004, - "learning_rate": 0.0022093616841874263, - "loss": 1.4481, - "step": 4462 - }, - { - "epoch": 0.34328128605491887, - "learning_rate": 0.0022090422924995576, - "loss": 1.3447, - "step": 4463 - }, - { - "epoch": 0.34335820321513727, - "learning_rate": 0.00220872285940997, - "loss": 0.6945, - "step": 4464 - }, - { - "epoch": 0.34343512037535573, - "learning_rate": 0.0022084033849373167, - "loss": 1.2389, - "step": 4465 - }, - { - "epoch": 0.3435120375355742, - "learning_rate": 0.0022080838691002505, - "loss": 0.7735, - "step": 4466 - }, - { - "epoch": 0.34358895469579265, - "learning_rate": 0.0022077643119174303, - "loss": 1.4623, - "step": 4467 - }, - { - "epoch": 0.34366587185601105, - "learning_rate": 0.002207444713407514, - "loss": 1.0244, - "step": 4468 - }, - { - "epoch": 0.3437427890162295, - "learning_rate": 0.002207125073589163, - "loss": 0.9686, - "step": 4469 - }, - { - "epoch": 0.34381970617644797, - "learning_rate": 0.0022068053924810425, - "loss": 1.1557, - "step": 4470 - }, - { - "epoch": 0.34389662333666643, - "learning_rate": 0.0022064856701018184, - "loss": 1.211, - "step": 4471 - }, - { - "epoch": 0.34397354049688483, - "learning_rate": 0.00220616590647016, - "loss": 1.4653, - "step": 4472 - }, - { - "epoch": 0.3440504576571033, - "learning_rate": 0.002205846101604738, - "loss": 0.9713, - "step": 4473 - }, - { - "epoch": 0.34412737481732175, - "learning_rate": 0.002205526255524227, - "loss": 1.053, - "step": 4474 - }, - { - "epoch": 0.3442042919775402, - "learning_rate": 0.002205206368247302, - "loss": 0.9828, - "step": 4475 - }, - { - "epoch": 0.3442812091377586, - "learning_rate": 0.002204886439792643, - "loss": 1.0335, - "step": 4476 - }, - { - "epoch": 0.3443581262979771, - "learning_rate": 0.0022045664701789295, - "loss": 1.0159, - "step": 4477 - }, - { - "epoch": 0.34443504345819553, - "learning_rate": 0.002204246459424846, - "loss": 0.9004, - "step": 4478 - }, - { - "epoch": 0.344511960618414, - "learning_rate": 0.002203926407549077, - "loss": 1.3003, - "step": 4479 - }, - { - "epoch": 0.3445888777786324, - "learning_rate": 0.002203606314570312, - "loss": 1.1376, - "step": 4480 - }, - { - "epoch": 0.34466579493885086, - "learning_rate": 0.002203286180507241, - "loss": 1.2093, - "step": 4481 - }, - { - "epoch": 0.3447427120990693, - "learning_rate": 0.0022029660053785568, - "loss": 0.9017, - "step": 4482 - }, - { - "epoch": 0.3448196292592878, - "learning_rate": 0.002202645789202955, - "loss": 1.3401, - "step": 4483 - }, - { - "epoch": 0.3448965464195062, - "learning_rate": 0.0022023255319991338, - "loss": 0.975, - "step": 4484 - }, - { - "epoch": 0.34497346357972464, - "learning_rate": 0.0022020052337857923, - "loss": 0.9787, - "step": 4485 - }, - { - "epoch": 0.3450503807399431, - "learning_rate": 0.0022016848945816334, - "loss": 0.9551, - "step": 4486 - }, - { - "epoch": 0.3451272979001615, - "learning_rate": 0.0022013645144053633, - "loss": 0.9308, - "step": 4487 - }, - { - "epoch": 0.34520421506037996, - "learning_rate": 0.002201044093275687, - "loss": 1.2331, - "step": 4488 - }, - { - "epoch": 0.3452811322205984, - "learning_rate": 0.002200723631211316, - "loss": 0.8028, - "step": 4489 - }, - { - "epoch": 0.3453580493808169, - "learning_rate": 0.002200403128230962, - "loss": 1.0846, - "step": 4490 - }, - { - "epoch": 0.3454349665410353, - "learning_rate": 0.0022000825843533395, - "loss": 1.3261, - "step": 4491 - }, - { - "epoch": 0.34551188370125374, - "learning_rate": 0.0021997619995971648, - "loss": 0.9967, - "step": 4492 - }, - { - "epoch": 0.3455888008614722, - "learning_rate": 0.0021994413739811576, - "loss": 1.2422, - "step": 4493 - }, - { - "epoch": 0.34566571802169066, - "learning_rate": 0.00219912070752404, - "loss": 1.0371, - "step": 4494 - }, - { - "epoch": 0.34574263518190906, - "learning_rate": 0.002198800000244536, - "loss": 1.0514, - "step": 4495 - }, - { - "epoch": 0.3458195523421275, - "learning_rate": 0.0021984792521613714, - "loss": 1.101, - "step": 4496 - }, - { - "epoch": 0.345896469502346, - "learning_rate": 0.002198158463293275, - "loss": 0.8815, - "step": 4497 - }, - { - "epoch": 0.34597338666256444, - "learning_rate": 0.002197837633658979, - "loss": 1.0826, - "step": 4498 - }, - { - "epoch": 0.34605030382278285, - "learning_rate": 0.002197516763277216, - "loss": 1.1511, - "step": 4499 - }, - { - "epoch": 0.3461272209830013, - "learning_rate": 0.002197195852166722, - "loss": 0.7556, - "step": 4500 - }, - { - "epoch": 0.34620413814321976, - "learning_rate": 0.0021968749003462356, - "loss": 1.0577, - "step": 4501 - }, - { - "epoch": 0.3462810553034382, - "learning_rate": 0.0021965539078344976, - "loss": 1.1914, - "step": 4502 - }, - { - "epoch": 0.3463579724636566, - "learning_rate": 0.0021962328746502513, - "loss": 0.974, - "step": 4503 - }, - { - "epoch": 0.3464348896238751, - "learning_rate": 0.002195911800812241, - "loss": 1.6516, - "step": 4504 - }, - { - "epoch": 0.34651180678409355, - "learning_rate": 0.0021955906863392162, - "loss": 1.2723, - "step": 4505 - }, - { - "epoch": 0.34658872394431195, - "learning_rate": 0.0021952695312499266, - "loss": 1.1857, - "step": 4506 - }, - { - "epoch": 0.3466656411045304, - "learning_rate": 0.002194948335563124, - "loss": 1.0173, - "step": 4507 - }, - { - "epoch": 0.34674255826474887, - "learning_rate": 0.0021946270992975634, - "loss": 1.3723, - "step": 4508 - }, - { - "epoch": 0.3468194754249673, - "learning_rate": 0.002194305822472003, - "loss": 1.0406, - "step": 4509 - }, - { - "epoch": 0.34689639258518573, - "learning_rate": 0.0021939845051052024, - "loss": 0.9578, - "step": 4510 - }, - { - "epoch": 0.3469733097454042, - "learning_rate": 0.0021936631472159224, - "loss": 1.8563, - "step": 4511 - }, - { - "epoch": 0.34705022690562265, - "learning_rate": 0.002193341748822929, - "loss": 1.5305, - "step": 4512 - }, - { - "epoch": 0.3471271440658411, - "learning_rate": 0.002193020309944988, - "loss": 1.223, - "step": 4513 - }, - { - "epoch": 0.3472040612260595, - "learning_rate": 0.002192698830600869, - "loss": 1.1849, - "step": 4514 - }, - { - "epoch": 0.34728097838627797, - "learning_rate": 0.0021923773108093432, - "loss": 1.4335, - "step": 4515 - }, - { - "epoch": 0.34735789554649643, - "learning_rate": 0.002192055750589185, - "loss": 0.8563, - "step": 4516 - }, - { - "epoch": 0.3474348127067149, - "learning_rate": 0.0021917341499591695, - "loss": 0.8439, - "step": 4517 - }, - { - "epoch": 0.3475117298669333, - "learning_rate": 0.0021914125089380772, - "loss": 0.7949, - "step": 4518 - }, - { - "epoch": 0.34758864702715175, - "learning_rate": 0.0021910908275446874, - "loss": 1.1507, - "step": 4519 - }, - { - "epoch": 0.3476655641873702, - "learning_rate": 0.0021907691057977837, - "loss": 1.2038, - "step": 4520 - }, - { - "epoch": 0.34774248134758867, - "learning_rate": 0.002190447343716152, - "loss": 1.1221, - "step": 4521 - }, - { - "epoch": 0.3478193985078071, - "learning_rate": 0.002190125541318581, - "loss": 1.1422, - "step": 4522 - }, - { - "epoch": 0.34789631566802554, - "learning_rate": 0.00218980369862386, - "loss": 1.2733, - "step": 4523 - }, - { - "epoch": 0.347973232828244, - "learning_rate": 0.002189481815650782, - "loss": 1.0066, - "step": 4524 - }, - { - "epoch": 0.3480501499884624, - "learning_rate": 0.0021891598924181424, - "loss": 1.1536, - "step": 4525 - }, - { - "epoch": 0.34812706714868086, - "learning_rate": 0.0021888379289447383, - "loss": 1.15, - "step": 4526 - }, - { - "epoch": 0.3482039843088993, - "learning_rate": 0.0021885159252493696, - "loss": 1.2677, - "step": 4527 - }, - { - "epoch": 0.3482809014691178, - "learning_rate": 0.0021881938813508384, - "loss": 1.1407, - "step": 4528 - }, - { - "epoch": 0.3483578186293362, - "learning_rate": 0.0021878717972679497, - "loss": 0.9848, - "step": 4529 - }, - { - "epoch": 0.34843473578955464, - "learning_rate": 0.0021875496730195095, - "loss": 1.2132, - "step": 4530 - }, - { - "epoch": 0.3485116529497731, - "learning_rate": 0.0021872275086243275, - "loss": 1.2289, - "step": 4531 - }, - { - "epoch": 0.34858857010999156, - "learning_rate": 0.002186905304101215, - "loss": 1.279, - "step": 4532 - }, - { - "epoch": 0.34866548727020996, - "learning_rate": 0.002186583059468986, - "loss": 1.3782, - "step": 4533 - }, - { - "epoch": 0.3487424044304284, - "learning_rate": 0.0021862607747464564, - "loss": 1.2075, - "step": 4534 - }, - { - "epoch": 0.3488193215906469, - "learning_rate": 0.002185938449952445, - "loss": 1.2174, - "step": 4535 - }, - { - "epoch": 0.34889623875086534, - "learning_rate": 0.0021856160851057727, - "loss": 1.2171, - "step": 4536 - }, - { - "epoch": 0.34897315591108374, - "learning_rate": 0.002185293680225263, - "loss": 0.6313, - "step": 4537 - }, - { - "epoch": 0.3490500730713022, - "learning_rate": 0.002184971235329741, - "loss": 0.7088, - "step": 4538 - }, - { - "epoch": 0.34912699023152066, - "learning_rate": 0.002184648750438034, - "loss": 1.198, - "step": 4539 - }, - { - "epoch": 0.3492039073917391, - "learning_rate": 0.0021843262255689735, - "loss": 0.9856, - "step": 4540 - }, - { - "epoch": 0.3492808245519575, - "learning_rate": 0.0021840036607413916, - "loss": 1.2084, - "step": 4541 - }, - { - "epoch": 0.349357741712176, - "learning_rate": 0.002183681055974123, - "loss": 1.125, - "step": 4542 - }, - { - "epoch": 0.34943465887239444, - "learning_rate": 0.002183358411286005, - "loss": 1.0228, - "step": 4543 - }, - { - "epoch": 0.3495115760326129, - "learning_rate": 0.002183035726695877, - "loss": 1.4435, - "step": 4544 - }, - { - "epoch": 0.3495884931928313, - "learning_rate": 0.0021827130022225812, - "loss": 1.1466, - "step": 4545 - }, - { - "epoch": 0.34966541035304977, - "learning_rate": 0.002182390237884962, - "loss": 0.8455, - "step": 4546 - }, - { - "epoch": 0.3497423275132682, - "learning_rate": 0.0021820674337018654, - "loss": 1.5281, - "step": 4547 - }, - { - "epoch": 0.34981924467348663, - "learning_rate": 0.0021817445896921404, - "loss": 0.7684, - "step": 4548 - }, - { - "epoch": 0.3498961618337051, - "learning_rate": 0.0021814217058746385, - "loss": 1.2232, - "step": 4549 - }, - { - "epoch": 0.34997307899392355, - "learning_rate": 0.0021810987822682127, - "loss": 1.0272, - "step": 4550 - }, - { - "epoch": 0.350049996154142, - "learning_rate": 0.0021807758188917194, - "loss": 0.9636, - "step": 4551 - }, - { - "epoch": 0.3501269133143604, - "learning_rate": 0.002180452815764017, - "loss": 1.3657, - "step": 4552 - }, - { - "epoch": 0.35020383047457887, - "learning_rate": 0.0021801297729039655, - "loss": 1.1887, - "step": 4553 - }, - { - "epoch": 0.35028074763479733, - "learning_rate": 0.0021798066903304268, - "loss": 1.1966, - "step": 4554 - }, - { - "epoch": 0.3503576647950158, - "learning_rate": 0.0021794835680622677, - "loss": 1.034, - "step": 4555 - }, - { - "epoch": 0.3504345819552342, - "learning_rate": 0.0021791604061183545, - "loss": 1.2712, - "step": 4556 - }, - { - "epoch": 0.35051149911545265, - "learning_rate": 0.0021788372045175577, - "loss": 0.8929, - "step": 4557 - }, - { - "epoch": 0.3505884162756711, - "learning_rate": 0.0021785139632787493, - "loss": 1.1922, - "step": 4558 - }, - { - "epoch": 0.35066533343588957, - "learning_rate": 0.0021781906824208026, - "loss": 0.8638, - "step": 4559 - }, - { - "epoch": 0.350742250596108, - "learning_rate": 0.002177867361962596, - "loss": 1.1992, - "step": 4560 - }, - { - "epoch": 0.35081916775632643, - "learning_rate": 0.0021775440019230073, - "loss": 1.2379, - "step": 4561 - }, - { - "epoch": 0.3508960849165449, - "learning_rate": 0.0021772206023209185, - "loss": 1.1376, - "step": 4562 - }, - { - "epoch": 0.35097300207676335, - "learning_rate": 0.002176897163175212, - "loss": 1.2832, - "step": 4563 - }, - { - "epoch": 0.35104991923698176, - "learning_rate": 0.0021765736845047764, - "loss": 1.3489, - "step": 4564 - }, - { - "epoch": 0.3511268363972002, - "learning_rate": 0.0021762501663284968, - "loss": 0.9143, - "step": 4565 - }, - { - "epoch": 0.3512037535574187, - "learning_rate": 0.002175926608665266, - "loss": 1.1331, - "step": 4566 - }, - { - "epoch": 0.3512806707176371, - "learning_rate": 0.002175603011533976, - "loss": 1.1869, - "step": 4567 - }, - { - "epoch": 0.35135758787785554, - "learning_rate": 0.0021752793749535214, - "loss": 0.9946, - "step": 4568 - }, - { - "epoch": 0.351434505038074, - "learning_rate": 0.002174955698942801, - "loss": 1.334, - "step": 4569 - }, - { - "epoch": 0.35151142219829246, - "learning_rate": 0.0021746319835207134, - "loss": 0.83, - "step": 4570 - }, - { - "epoch": 0.35158833935851086, - "learning_rate": 0.002174308228706162, - "loss": 1.3723, - "step": 4571 - }, - { - "epoch": 0.3516652565187293, - "learning_rate": 0.00217398443451805, - "loss": 1.5225, - "step": 4572 - }, - { - "epoch": 0.3517421736789478, - "learning_rate": 0.002173660600975284, - "loss": 1.3333, - "step": 4573 - }, - { - "epoch": 0.35181909083916624, - "learning_rate": 0.002173336728096774, - "loss": 0.8326, - "step": 4574 - }, - { - "epoch": 0.35189600799938464, - "learning_rate": 0.002173012815901431, - "loss": 1.3905, - "step": 4575 - }, - { - "epoch": 0.3519729251596031, - "learning_rate": 0.002172688864408167, - "loss": 0.7822, - "step": 4576 - }, - { - "epoch": 0.35204984231982156, - "learning_rate": 0.0021723648736358997, - "loss": 1.1313, - "step": 4577 - }, - { - "epoch": 0.35212675948004, - "learning_rate": 0.002172040843603547, - "loss": 1.4056, - "step": 4578 - }, - { - "epoch": 0.3522036766402584, - "learning_rate": 0.002171716774330029, - "loss": 1.1409, - "step": 4579 - }, - { - "epoch": 0.3522805938004769, - "learning_rate": 0.0021713926658342687, - "loss": 1.4108, - "step": 4580 - }, - { - "epoch": 0.35235751096069534, - "learning_rate": 0.0021710685181351905, - "loss": 1.1529, - "step": 4581 - }, - { - "epoch": 0.3524344281209138, - "learning_rate": 0.0021707443312517223, - "loss": 1.3354, - "step": 4582 - }, - { - "epoch": 0.3525113452811322, - "learning_rate": 0.0021704201052027937, - "loss": 1.0511, - "step": 4583 - }, - { - "epoch": 0.35258826244135066, - "learning_rate": 0.0021700958400073366, - "loss": 1.4036, - "step": 4584 - }, - { - "epoch": 0.3526651796015691, - "learning_rate": 0.0021697715356842847, - "loss": 1.0696, - "step": 4585 - }, - { - "epoch": 0.3527420967617876, - "learning_rate": 0.002169447192252575, - "loss": 1.4884, - "step": 4586 - }, - { - "epoch": 0.352819013922006, - "learning_rate": 0.002169122809731146, - "loss": 1.3654, - "step": 4587 - }, - { - "epoch": 0.35289593108222445, - "learning_rate": 0.002168798388138939, - "loss": 0.849, - "step": 4588 - }, - { - "epoch": 0.3529728482424429, - "learning_rate": 0.0021684739274948966, - "loss": 1.448, - "step": 4589 - }, - { - "epoch": 0.3530497654026613, - "learning_rate": 0.002168149427817966, - "loss": 0.9463, - "step": 4590 - }, - { - "epoch": 0.35312668256287977, - "learning_rate": 0.0021678248891270928, - "loss": 1.4599, - "step": 4591 - }, - { - "epoch": 0.3532035997230982, - "learning_rate": 0.0021675003114412284, - "loss": 1.377, - "step": 4592 - }, - { - "epoch": 0.3532805168833167, - "learning_rate": 0.0021671756947793256, - "loss": 1.0482, - "step": 4593 - }, - { - "epoch": 0.3533574340435351, - "learning_rate": 0.0021668510391603386, - "loss": 1.351, - "step": 4594 - }, - { - "epoch": 0.35343435120375355, - "learning_rate": 0.0021665263446032246, - "loss": 0.9677, - "step": 4595 - }, - { - "epoch": 0.353511268363972, - "learning_rate": 0.0021662016111269424, - "loss": 1.4628, - "step": 4596 - }, - { - "epoch": 0.35358818552419047, - "learning_rate": 0.0021658768387504542, - "loss": 1.2718, - "step": 4597 - }, - { - "epoch": 0.35366510268440887, - "learning_rate": 0.0021655520274927232, - "loss": 1.4732, - "step": 4598 - }, - { - "epoch": 0.35374201984462733, - "learning_rate": 0.0021652271773727155, - "loss": 0.6943, - "step": 4599 - }, - { - "epoch": 0.3538189370048458, - "learning_rate": 0.0021649022884094, - "loss": 1.192, - "step": 4600 - }, - { - "epoch": 0.35389585416506425, - "learning_rate": 0.0021645773606217464, - "loss": 1.2953, - "step": 4601 - }, - { - "epoch": 0.35397277132528265, - "learning_rate": 0.0021642523940287287, - "loss": 1.2194, - "step": 4602 - }, - { - "epoch": 0.3540496884855011, - "learning_rate": 0.002163927388649321, - "loss": 1.2613, - "step": 4603 - }, - { - "epoch": 0.35412660564571957, - "learning_rate": 0.0021636023445025013, - "loss": 0.9221, - "step": 4604 - }, - { - "epoch": 0.35420352280593803, - "learning_rate": 0.002163277261607249, - "loss": 1.1065, - "step": 4605 - }, - { - "epoch": 0.35428043996615644, - "learning_rate": 0.002162952139982547, - "loss": 1.0068, - "step": 4606 - }, - { - "epoch": 0.3543573571263749, - "learning_rate": 0.0021626269796473775, - "loss": 1.3094, - "step": 4607 - }, - { - "epoch": 0.35443427428659335, - "learning_rate": 0.0021623017806207284, - "loss": 1.2667, - "step": 4608 - }, - { - "epoch": 0.35451119144681176, - "learning_rate": 0.0021619765429215882, - "loss": 1.2654, - "step": 4609 - }, - { - "epoch": 0.3545881086070302, - "learning_rate": 0.002161651266568948, - "loss": 0.8474, - "step": 4610 - }, - { - "epoch": 0.3546650257672487, - "learning_rate": 0.0021613259515818007, - "loss": 1.1862, - "step": 4611 - }, - { - "epoch": 0.35474194292746714, - "learning_rate": 0.002161000597979141, - "loss": 1.1444, - "step": 4612 - }, - { - "epoch": 0.35481886008768554, - "learning_rate": 0.002160675205779969, - "loss": 1.1831, - "step": 4613 - }, - { - "epoch": 0.354895777247904, - "learning_rate": 0.0021603497750032822, - "loss": 1.1047, - "step": 4614 - }, - { - "epoch": 0.35497269440812246, - "learning_rate": 0.002160024305668084, - "loss": 1.4083, - "step": 4615 - }, - { - "epoch": 0.3550496115683409, - "learning_rate": 0.002159698797793379, - "loss": 0.9487, - "step": 4616 - }, - { - "epoch": 0.3551265287285593, - "learning_rate": 0.0021593732513981736, - "loss": 1.0903, - "step": 4617 - }, - { - "epoch": 0.3552034458887778, - "learning_rate": 0.0021590476665014767, - "loss": 1.258, - "step": 4618 - }, - { - "epoch": 0.35528036304899624, - "learning_rate": 0.0021587220431222996, - "loss": 1.1773, - "step": 4619 - }, - { - "epoch": 0.3553572802092147, - "learning_rate": 0.002158396381279656, - "loss": 1.1955, - "step": 4620 - }, - { - "epoch": 0.3554341973694331, - "learning_rate": 0.0021580706809925616, - "loss": 0.899, - "step": 4621 - }, - { - "epoch": 0.35551111452965156, - "learning_rate": 0.0021577449422800344, - "loss": 0.905, - "step": 4622 - }, - { - "epoch": 0.35558803168987, - "learning_rate": 0.0021574191651610943, - "loss": 0.9677, - "step": 4623 - }, - { - "epoch": 0.3556649488500885, - "learning_rate": 0.002157093349654764, - "loss": 1.1556, - "step": 4624 - }, - { - "epoch": 0.3557418660103069, - "learning_rate": 0.002156767495780069, - "loss": 1.2715, - "step": 4625 - }, - { - "epoch": 0.35581878317052534, - "learning_rate": 0.0021564416035560344, - "loss": 1.2876, - "step": 4626 - }, - { - "epoch": 0.3558957003307438, - "learning_rate": 0.002156115673001691, - "loss": 1.1753, - "step": 4627 - }, - { - "epoch": 0.3559726174909622, - "learning_rate": 0.0021557897041360697, - "loss": 1.2324, - "step": 4628 - }, - { - "epoch": 0.35604953465118067, - "learning_rate": 0.0021554636969782037, - "loss": 0.9744, - "step": 4629 - }, - { - "epoch": 0.3561264518113991, - "learning_rate": 0.0021551376515471297, - "loss": 1.1218, - "step": 4630 - }, - { - "epoch": 0.3562033689716176, - "learning_rate": 0.002154811567861885, - "loss": 1.1404, - "step": 4631 - }, - { - "epoch": 0.356280286131836, - "learning_rate": 0.002154485445941511, - "loss": 1.2168, - "step": 4632 - }, - { - "epoch": 0.35635720329205445, - "learning_rate": 0.0021541592858050494, - "loss": 0.9979, - "step": 4633 - }, - { - "epoch": 0.3564341204522729, - "learning_rate": 0.002153833087471545, - "loss": 0.9219, - "step": 4634 - }, - { - "epoch": 0.35651103761249137, - "learning_rate": 0.002153506850960046, - "loss": 1.2117, - "step": 4635 - }, - { - "epoch": 0.35658795477270977, - "learning_rate": 0.0021531805762896004, - "loss": 1.0654, - "step": 4636 - }, - { - "epoch": 0.35666487193292823, - "learning_rate": 0.0021528542634792606, - "loss": 1.3072, - "step": 4637 - }, - { - "epoch": 0.3567417890931467, - "learning_rate": 0.0021525279125480796, - "loss": 1.0846, - "step": 4638 - }, - { - "epoch": 0.35681870625336515, - "learning_rate": 0.002152201523515114, - "loss": 1.2969, - "step": 4639 - }, - { - "epoch": 0.35689562341358355, - "learning_rate": 0.0021518750963994216, - "loss": 1.1594, - "step": 4640 - }, - { - "epoch": 0.356972540573802, - "learning_rate": 0.0021515486312200633, - "loss": 1.2355, - "step": 4641 - }, - { - "epoch": 0.35704945773402047, - "learning_rate": 0.0021512221279961, - "loss": 0.7651, - "step": 4642 - }, - { - "epoch": 0.35712637489423893, - "learning_rate": 0.0021508955867465995, - "loss": 1.0978, - "step": 4643 - }, - { - "epoch": 0.35720329205445733, - "learning_rate": 0.0021505690074906272, - "loss": 1.047, - "step": 4644 - }, - { - "epoch": 0.3572802092146758, - "learning_rate": 0.0021502423902472517, - "loss": 0.964, - "step": 4645 - }, - { - "epoch": 0.35735712637489425, - "learning_rate": 0.0021499157350355464, - "loss": 1.0864, - "step": 4646 - }, - { - "epoch": 0.3574340435351127, - "learning_rate": 0.002149589041874583, - "loss": 1.4225, - "step": 4647 - }, - { - "epoch": 0.3575109606953311, - "learning_rate": 0.0021492623107834397, - "loss": 1.3418, - "step": 4648 - }, - { - "epoch": 0.3575878778555496, - "learning_rate": 0.0021489355417811918, - "loss": 1.0769, - "step": 4649 - }, - { - "epoch": 0.35766479501576803, - "learning_rate": 0.0021486087348869224, - "loss": 1.0327, - "step": 4650 - }, - { - "epoch": 0.35774171217598644, - "learning_rate": 0.002148281890119713, - "loss": 1.295, - "step": 4651 - }, - { - "epoch": 0.3578186293362049, - "learning_rate": 0.002147955007498648, - "loss": 1.1204, - "step": 4652 - }, - { - "epoch": 0.35789554649642336, - "learning_rate": 0.0021476280870428147, - "loss": 1.094, - "step": 4653 - }, - { - "epoch": 0.3579724636566418, - "learning_rate": 0.002147301128771303, - "loss": 1.2706, - "step": 4654 - }, - { - "epoch": 0.3580493808168602, - "learning_rate": 0.0021469741327032037, - "loss": 1.2103, - "step": 4655 - }, - { - "epoch": 0.3581262979770787, - "learning_rate": 0.0021466470988576103, - "loss": 1.4753, - "step": 4656 - }, - { - "epoch": 0.35820321513729714, - "learning_rate": 0.002146320027253619, - "loss": 1.0931, - "step": 4657 - }, - { - "epoch": 0.3582801322975156, - "learning_rate": 0.0021459929179103274, - "loss": 1.0186, - "step": 4658 - }, - { - "epoch": 0.358357049457734, - "learning_rate": 0.002145665770846837, - "loss": 1.0179, - "step": 4659 - }, - { - "epoch": 0.35843396661795246, - "learning_rate": 0.0021453385860822484, - "loss": 1.1274, - "step": 4660 - }, - { - "epoch": 0.3585108837781709, - "learning_rate": 0.0021450113636356676, - "loss": 0.8537, - "step": 4661 - }, - { - "epoch": 0.3585878009383894, - "learning_rate": 0.002144684103526201, - "loss": 1.2804, - "step": 4662 - }, - { - "epoch": 0.3586647180986078, - "learning_rate": 0.0021443568057729575, - "loss": 0.9907, - "step": 4663 - }, - { - "epoch": 0.35874163525882624, - "learning_rate": 0.002144029470395049, - "loss": 0.9742, - "step": 4664 - }, - { - "epoch": 0.3588185524190447, - "learning_rate": 0.0021437020974115887, - "loss": 1.071, - "step": 4665 - }, - { - "epoch": 0.35889546957926316, - "learning_rate": 0.002143374686841691, - "loss": 1.1727, - "step": 4666 - }, - { - "epoch": 0.35897238673948156, - "learning_rate": 0.002143047238704476, - "loss": 0.862, - "step": 4667 - }, - { - "epoch": 0.3590493038997, - "learning_rate": 0.002142719753019062, - "loss": 1.2982, - "step": 4668 - }, - { - "epoch": 0.3591262210599185, - "learning_rate": 0.002142392229804572, - "loss": 0.9353, - "step": 4669 - }, - { - "epoch": 0.3592031382201369, - "learning_rate": 0.0021420646690801302, - "loss": 1.0221, - "step": 4670 - }, - { - "epoch": 0.35928005538035535, - "learning_rate": 0.0021417370708648634, - "loss": 1.4891, - "step": 4671 - }, - { - "epoch": 0.3593569725405738, - "learning_rate": 0.0021414094351778997, - "loss": 0.89, - "step": 4672 - }, - { - "epoch": 0.35943388970079226, - "learning_rate": 0.0021410817620383703, - "loss": 0.9063, - "step": 4673 - }, - { - "epoch": 0.35951080686101067, - "learning_rate": 0.0021407540514654095, - "loss": 0.9539, - "step": 4674 - }, - { - "epoch": 0.3595877240212291, - "learning_rate": 0.002140426303478152, - "loss": 0.9472, - "step": 4675 - }, - { - "epoch": 0.3596646411814476, - "learning_rate": 0.0021400985180957347, - "loss": 1.0589, - "step": 4676 - }, - { - "epoch": 0.35974155834166605, - "learning_rate": 0.0021397706953372978, - "loss": 1.1529, - "step": 4677 - }, - { - "epoch": 0.35981847550188445, - "learning_rate": 0.0021394428352219837, - "loss": 1.4477, - "step": 4678 - }, - { - "epoch": 0.3598953926621029, - "learning_rate": 0.0021391149377689355, - "loss": 1.0915, - "step": 4679 - }, - { - "epoch": 0.35997230982232137, - "learning_rate": 0.0021387870029973004, - "loss": 1.4642, - "step": 4680 - }, - { - "epoch": 0.3600492269825398, - "learning_rate": 0.002138459030926226, - "loss": 0.9607, - "step": 4681 - }, - { - "epoch": 0.36012614414275823, - "learning_rate": 0.0021381310215748644, - "loss": 1.2426, - "step": 4682 - }, - { - "epoch": 0.3602030613029767, - "learning_rate": 0.0021378029749623663, - "loss": 1.3444, - "step": 4683 - }, - { - "epoch": 0.36027997846319515, - "learning_rate": 0.0021374748911078883, - "loss": 1.2366, - "step": 4684 - }, - { - "epoch": 0.3603568956234136, - "learning_rate": 0.0021371467700305874, - "loss": 1.1309, - "step": 4685 - }, - { - "epoch": 0.360433812783632, - "learning_rate": 0.002136818611749622, - "loss": 0.8353, - "step": 4686 - }, - { - "epoch": 0.36051072994385047, - "learning_rate": 0.0021364904162841543, - "loss": 1.1354, - "step": 4687 - }, - { - "epoch": 0.36058764710406893, - "learning_rate": 0.0021361621836533478, - "loss": 1.1264, - "step": 4688 - }, - { - "epoch": 0.36066456426428734, - "learning_rate": 0.0021358339138763684, - "loss": 0.9663, - "step": 4689 - }, - { - "epoch": 0.3607414814245058, - "learning_rate": 0.0021355056069723845, - "loss": 1.0954, - "step": 4690 - }, - { - "epoch": 0.36081839858472425, - "learning_rate": 0.0021351772629605655, - "loss": 1.057, - "step": 4691 - }, - { - "epoch": 0.3608953157449427, - "learning_rate": 0.0021348488818600845, - "loss": 1.1182, - "step": 4692 - }, - { - "epoch": 0.3609722329051611, - "learning_rate": 0.0021345204636901154, - "loss": 1.0405, - "step": 4693 - }, - { - "epoch": 0.3610491500653796, - "learning_rate": 0.002134192008469835, - "loss": 1.2886, - "step": 4694 - }, - { - "epoch": 0.36112606722559804, - "learning_rate": 0.0021338635162184223, - "loss": 1.0584, - "step": 4695 - }, - { - "epoch": 0.3612029843858165, - "learning_rate": 0.0021335349869550588, - "loss": 1.389, - "step": 4696 - }, - { - "epoch": 0.3612799015460349, - "learning_rate": 0.002133206420698927, - "loss": 1.0482, - "step": 4697 - }, - { - "epoch": 0.36135681870625336, - "learning_rate": 0.002132877817469212, - "loss": 0.8862, - "step": 4698 - }, - { - "epoch": 0.3614337358664718, - "learning_rate": 0.0021325491772851016, - "loss": 1.2062, - "step": 4699 - }, - { - "epoch": 0.3615106530266903, - "learning_rate": 0.0021322205001657857, - "loss": 1.4097, - "step": 4700 - }, - { - "epoch": 0.3615875701869087, - "learning_rate": 0.0021318917861304566, - "loss": 1.3983, - "step": 4701 - }, - { - "epoch": 0.36166448734712714, - "learning_rate": 0.002131563035198307, - "loss": 1.1813, - "step": 4702 - }, - { - "epoch": 0.3617414045073456, - "learning_rate": 0.0021312342473885333, - "loss": 1.4315, - "step": 4703 - }, - { - "epoch": 0.36181832166756406, - "learning_rate": 0.0021309054227203338, - "loss": 1.0769, - "step": 4704 - }, - { - "epoch": 0.36189523882778246, - "learning_rate": 0.0021305765612129103, - "loss": 1.2647, - "step": 4705 - }, - { - "epoch": 0.3619721559880009, - "learning_rate": 0.002130247662885463, - "loss": 1.2099, - "step": 4706 - }, - { - "epoch": 0.3620490731482194, - "learning_rate": 0.002129918727757199, - "loss": 1.0531, - "step": 4707 - }, - { - "epoch": 0.36212599030843784, - "learning_rate": 0.002129589755847323, - "loss": 1.0395, - "step": 4708 - }, - { - "epoch": 0.36220290746865624, - "learning_rate": 0.002129260747175046, - "loss": 1.4338, - "step": 4709 - }, - { - "epoch": 0.3622798246288747, - "learning_rate": 0.0021289317017595772, - "loss": 1.1014, - "step": 4710 - }, - { - "epoch": 0.36235674178909316, - "learning_rate": 0.002128602619620131, - "loss": 1.3059, - "step": 4711 - }, - { - "epoch": 0.36243365894931157, - "learning_rate": 0.0021282735007759233, - "loss": 1.2196, - "step": 4712 - }, - { - "epoch": 0.36251057610953, - "learning_rate": 0.0021279443452461707, - "loss": 1.1374, - "step": 4713 - }, - { - "epoch": 0.3625874932697485, - "learning_rate": 0.0021276151530500937, - "loss": 1.1438, - "step": 4714 - }, - { - "epoch": 0.36266441042996694, - "learning_rate": 0.0021272859242069133, - "loss": 1.2911, - "step": 4715 - }, - { - "epoch": 0.36274132759018535, - "learning_rate": 0.0021269566587358547, - "loss": 1.1752, - "step": 4716 - }, - { - "epoch": 0.3628182447504038, - "learning_rate": 0.002126627356656143, - "loss": 1.3372, - "step": 4717 - }, - { - "epoch": 0.36289516191062227, - "learning_rate": 0.0021262980179870064, - "loss": 1.0532, - "step": 4718 - }, - { - "epoch": 0.3629720790708407, - "learning_rate": 0.0021259686427476765, - "loss": 1.0698, - "step": 4719 - }, - { - "epoch": 0.36304899623105913, - "learning_rate": 0.002125639230957385, - "loss": 0.8697, - "step": 4720 - }, - { - "epoch": 0.3631259133912776, - "learning_rate": 0.002125309782635367, - "loss": 1.317, - "step": 4721 - }, - { - "epoch": 0.36320283055149605, - "learning_rate": 0.0021249802978008585, - "loss": 1.3131, - "step": 4722 - }, - { - "epoch": 0.3632797477117145, - "learning_rate": 0.0021246507764731, - "loss": 1.5172, - "step": 4723 - }, - { - "epoch": 0.3633566648719329, - "learning_rate": 0.002124321218671331, - "loss": 1.3545, - "step": 4724 - }, - { - "epoch": 0.36343358203215137, - "learning_rate": 0.0021239916244147955, - "loss": 1.3479, - "step": 4725 - }, - { - "epoch": 0.36351049919236983, - "learning_rate": 0.0021236619937227384, - "loss": 1.0363, - "step": 4726 - }, - { - "epoch": 0.3635874163525883, - "learning_rate": 0.002123332326614408, - "loss": 0.9226, - "step": 4727 - }, - { - "epoch": 0.3636643335128067, - "learning_rate": 0.002123002623109053, - "loss": 1.0268, - "step": 4728 - }, - { - "epoch": 0.36374125067302515, - "learning_rate": 0.0021226728832259264, - "loss": 1.3165, - "step": 4729 - }, - { - "epoch": 0.3638181678332436, - "learning_rate": 0.0021223431069842804, - "loss": 1.081, - "step": 4730 - }, - { - "epoch": 0.363895084993462, - "learning_rate": 0.002122013294403372, - "loss": 1.0528, - "step": 4731 - }, - { - "epoch": 0.3639720021536805, - "learning_rate": 0.0021216834455024592, - "loss": 1.3662, - "step": 4732 - }, - { - "epoch": 0.36404891931389893, - "learning_rate": 0.002121353560300802, - "loss": 1.4345, - "step": 4733 - }, - { - "epoch": 0.3641258364741174, - "learning_rate": 0.0021210236388176634, - "loss": 1.5348, - "step": 4734 - }, - { - "epoch": 0.3642027536343358, - "learning_rate": 0.002120693681072307, - "loss": 1.0566, - "step": 4735 - }, - { - "epoch": 0.36427967079455426, - "learning_rate": 0.0021203636870839996, - "loss": 1.2474, - "step": 4736 - }, - { - "epoch": 0.3643565879547727, - "learning_rate": 0.00212003365687201, - "loss": 1.013, - "step": 4737 - }, - { - "epoch": 0.3644335051149912, - "learning_rate": 0.0021197035904556086, - "loss": 0.9083, - "step": 4738 - }, - { - "epoch": 0.3645104222752096, - "learning_rate": 0.002119373487854069, - "loss": 1.1171, - "step": 4739 - }, - { - "epoch": 0.36458733943542804, - "learning_rate": 0.002119043349086666, - "loss": 1.1389, - "step": 4740 - }, - { - "epoch": 0.3646642565956465, - "learning_rate": 0.0021187131741726766, - "loss": 1.0876, - "step": 4741 - }, - { - "epoch": 0.36474117375586496, - "learning_rate": 0.0021183829631313805, - "loss": 0.8338, - "step": 4742 - }, - { - "epoch": 0.36481809091608336, - "learning_rate": 0.0021180527159820584, - "loss": 1.3758, - "step": 4743 - }, - { - "epoch": 0.3648950080763018, - "learning_rate": 0.002117722432743994, - "loss": 1.0835, - "step": 4744 - }, - { - "epoch": 0.3649719252365203, - "learning_rate": 0.002117392113436473, - "loss": 0.727, - "step": 4745 - }, - { - "epoch": 0.36504884239673874, - "learning_rate": 0.002117061758078783, - "loss": 1.5522, - "step": 4746 - }, - { - "epoch": 0.36512575955695714, - "learning_rate": 0.0021167313666902144, - "loss": 1.0084, - "step": 4747 - }, - { - "epoch": 0.3652026767171756, - "learning_rate": 0.002116400939290058, - "loss": 0.9914, - "step": 4748 - }, - { - "epoch": 0.36527959387739406, - "learning_rate": 0.002116070475897609, - "loss": 1.3308, - "step": 4749 - }, - { - "epoch": 0.36535651103761246, - "learning_rate": 0.0021157399765321626, - "loss": 1.1763, - "step": 4750 - }, - { - "epoch": 0.3654334281978309, - "learning_rate": 0.0021154094412130166, - "loss": 1.0322, - "step": 4751 - }, - { - "epoch": 0.3655103453580494, - "learning_rate": 0.002115078869959473, - "loss": 1.1166, - "step": 4752 - }, - { - "epoch": 0.36558726251826784, - "learning_rate": 0.002114748262790832, - "loss": 1.0376, - "step": 4753 - }, - { - "epoch": 0.36566417967848625, - "learning_rate": 0.0021144176197264004, - "loss": 0.983, - "step": 4754 - }, - { - "epoch": 0.3657410968387047, - "learning_rate": 0.0021140869407854828, - "loss": 1.2964, - "step": 4755 - }, - { - "epoch": 0.36581801399892316, - "learning_rate": 0.002113756225987389, - "loss": 1.2203, - "step": 4756 - }, - { - "epoch": 0.3658949311591416, - "learning_rate": 0.0021134254753514296, - "loss": 1.4343, - "step": 4757 - }, - { - "epoch": 0.36597184831936, - "learning_rate": 0.0021130946888969176, - "loss": 1.0308, - "step": 4758 - }, - { - "epoch": 0.3660487654795785, - "learning_rate": 0.002112763866643167, - "loss": 1.3455, - "step": 4759 - }, - { - "epoch": 0.36612568263979695, - "learning_rate": 0.0021124330086094967, - "loss": 1.2272, - "step": 4760 - }, - { - "epoch": 0.3662025998000154, - "learning_rate": 0.002112102114815224, - "loss": 1.3347, - "step": 4761 - }, - { - "epoch": 0.3662795169602338, - "learning_rate": 0.002111771185279671, - "loss": 1.0937, - "step": 4762 - }, - { - "epoch": 0.36635643412045227, - "learning_rate": 0.0021114402200221615, - "loss": 1.0203, - "step": 4763 - }, - { - "epoch": 0.3664333512806707, - "learning_rate": 0.0021111092190620193, - "loss": 1.4053, - "step": 4764 - }, - { - "epoch": 0.3665102684408892, - "learning_rate": 0.002110778182418574, - "loss": 1.3736, - "step": 4765 - }, - { - "epoch": 0.3665871856011076, - "learning_rate": 0.0021104471101111534, - "loss": 0.801, - "step": 4766 - }, - { - "epoch": 0.36666410276132605, - "learning_rate": 0.00211011600215909, - "loss": 1.0116, - "step": 4767 - }, - { - "epoch": 0.3667410199215445, - "learning_rate": 0.0021097848585817174, - "loss": 0.9977, - "step": 4768 - }, - { - "epoch": 0.36681793708176297, - "learning_rate": 0.0021094536793983716, - "loss": 1.1082, - "step": 4769 - }, - { - "epoch": 0.36689485424198137, - "learning_rate": 0.00210912246462839, - "loss": 1.3029, - "step": 4770 - }, - { - "epoch": 0.36697177140219983, - "learning_rate": 0.002108791214291113, - "loss": 1.1388, - "step": 4771 - }, - { - "epoch": 0.3670486885624183, - "learning_rate": 0.0021084599284058826, - "loss": 1.2999, - "step": 4772 - }, - { - "epoch": 0.3671256057226367, - "learning_rate": 0.002108128606992043, - "loss": 1.3385, - "step": 4773 - }, - { - "epoch": 0.36720252288285515, - "learning_rate": 0.00210779725006894, - "loss": 1.3433, - "step": 4774 - }, - { - "epoch": 0.3672794400430736, - "learning_rate": 0.0021074658576559217, - "loss": 0.8713, - "step": 4775 - }, - { - "epoch": 0.36735635720329207, - "learning_rate": 0.0021071344297723398, - "loss": 1.1533, - "step": 4776 - }, - { - "epoch": 0.3674332743635105, - "learning_rate": 0.0021068029664375455, - "loss": 1.1066, - "step": 4777 - }, - { - "epoch": 0.36751019152372894, - "learning_rate": 0.0021064714676708933, - "loss": 0.8034, - "step": 4778 - }, - { - "epoch": 0.3675871086839474, - "learning_rate": 0.0021061399334917404, - "loss": 1.0343, - "step": 4779 - }, - { - "epoch": 0.36766402584416585, - "learning_rate": 0.002105808363919445, - "loss": 1.1599, - "step": 4780 - }, - { - "epoch": 0.36774094300438426, - "learning_rate": 0.002105476758973368, - "loss": 1.3468, - "step": 4781 - }, - { - "epoch": 0.3678178601646027, - "learning_rate": 0.0021051451186728715, - "loss": 1.0666, - "step": 4782 - }, - { - "epoch": 0.3678947773248212, - "learning_rate": 0.002104813443037321, - "loss": 1.1095, - "step": 4783 - }, - { - "epoch": 0.36797169448503964, - "learning_rate": 0.0021044817320860837, - "loss": 1.1295, - "step": 4784 - }, - { - "epoch": 0.36804861164525804, - "learning_rate": 0.0021041499858385276, - "loss": 1.027, - "step": 4785 - }, - { - "epoch": 0.3681255288054765, - "learning_rate": 0.0021038182043140244, - "loss": 0.867, - "step": 4786 - }, - { - "epoch": 0.36820244596569496, - "learning_rate": 0.002103486387531947, - "loss": 1.0818, - "step": 4787 - }, - { - "epoch": 0.3682793631259134, - "learning_rate": 0.00210315453551167, - "loss": 1.0079, - "step": 4788 - }, - { - "epoch": 0.3683562802861318, - "learning_rate": 0.002102822648272572, - "loss": 1.2499, - "step": 4789 - }, - { - "epoch": 0.3684331974463503, - "learning_rate": 0.0021024907258340307, - "loss": 1.4547, - "step": 4790 - }, - { - "epoch": 0.36851011460656874, - "learning_rate": 0.0021021587682154286, - "loss": 0.6778, - "step": 4791 - }, - { - "epoch": 0.36858703176678714, - "learning_rate": 0.002101826775436148, - "loss": 0.8478, - "step": 4792 - }, - { - "epoch": 0.3686639489270056, - "learning_rate": 0.002101494747515575, - "loss": 1.2081, - "step": 4793 - }, - { - "epoch": 0.36874086608722406, - "learning_rate": 0.0021011626844730964, - "loss": 1.1856, - "step": 4794 - }, - { - "epoch": 0.3688177832474425, - "learning_rate": 0.002100830586328102, - "loss": 1.2232, - "step": 4795 - }, - { - "epoch": 0.3688947004076609, - "learning_rate": 0.0021004984530999843, - "loss": 1.1324, - "step": 4796 - }, - { - "epoch": 0.3689716175678794, - "learning_rate": 0.0021001662848081355, - "loss": 1.1762, - "step": 4797 - }, - { - "epoch": 0.36904853472809784, - "learning_rate": 0.002099834081471952, - "loss": 1.0931, - "step": 4798 - }, - { - "epoch": 0.3691254518883163, - "learning_rate": 0.0020995018431108313, - "loss": 1.0817, - "step": 4799 - }, - { - "epoch": 0.3692023690485347, - "learning_rate": 0.0020991695697441734, - "loss": 1.2891, - "step": 4800 - }, - { - "epoch": 0.36927928620875317, - "learning_rate": 0.0020988372613913796, - "loss": 1.1474, - "step": 4801 - }, - { - "epoch": 0.3693562033689716, - "learning_rate": 0.0020985049180718544, - "loss": 1.2557, - "step": 4802 - }, - { - "epoch": 0.3694331205291901, - "learning_rate": 0.002098172539805003, - "loss": 1.2381, - "step": 4803 - }, - { - "epoch": 0.3695100376894085, - "learning_rate": 0.0020978401266102337, - "loss": 1.2017, - "step": 4804 - }, - { - "epoch": 0.36958695484962695, - "learning_rate": 0.002097507678506956, - "loss": 1.2147, - "step": 4805 - }, - { - "epoch": 0.3696638720098454, - "learning_rate": 0.002097175195514583, - "loss": 1.2062, - "step": 4806 - }, - { - "epoch": 0.36974078917006387, - "learning_rate": 0.002096842677652527, - "loss": 1.3095, - "step": 4807 - }, - { - "epoch": 0.36981770633028227, - "learning_rate": 0.002096510124940206, - "loss": 1.2723, - "step": 4808 - }, - { - "epoch": 0.36989462349050073, - "learning_rate": 0.0020961775373970364, - "loss": 0.704, - "step": 4809 - }, - { - "epoch": 0.3699715406507192, - "learning_rate": 0.0020958449150424393, - "loss": 1.3423, - "step": 4810 - }, - { - "epoch": 0.37004845781093765, - "learning_rate": 0.002095512257895837, - "loss": 0.9298, - "step": 4811 - }, - { - "epoch": 0.37012537497115605, - "learning_rate": 0.002095179565976653, - "loss": 1.1521, - "step": 4812 - }, - { - "epoch": 0.3702022921313745, - "learning_rate": 0.002094846839304314, - "loss": 1.0047, - "step": 4813 - }, - { - "epoch": 0.37027920929159297, - "learning_rate": 0.0020945140778982478, - "loss": 0.7626, - "step": 4814 - }, - { - "epoch": 0.3703561264518114, - "learning_rate": 0.0020941812817778854, - "loss": 1.0414, - "step": 4815 - }, - { - "epoch": 0.37043304361202983, - "learning_rate": 0.0020938484509626584, - "loss": 1.0078, - "step": 4816 - }, - { - "epoch": 0.3705099607722483, - "learning_rate": 0.0020935155854720014, - "loss": 1.2156, - "step": 4817 - }, - { - "epoch": 0.37058687793246675, - "learning_rate": 0.0020931826853253514, - "loss": 1.2674, - "step": 4818 - }, - { - "epoch": 0.37066379509268516, - "learning_rate": 0.0020928497505421457, - "loss": 1.275, - "step": 4819 - }, - { - "epoch": 0.3707407122529036, - "learning_rate": 0.0020925167811418257, - "loss": 1.058, - "step": 4820 - }, - { - "epoch": 0.3708176294131221, - "learning_rate": 0.0020921837771438324, - "loss": 1.2275, - "step": 4821 - }, - { - "epoch": 0.37089454657334053, - "learning_rate": 0.0020918507385676117, - "loss": 1.0988, - "step": 4822 - }, - { - "epoch": 0.37097146373355894, - "learning_rate": 0.00209151766543261, - "loss": 1.2692, - "step": 4823 - }, - { - "epoch": 0.3710483808937774, - "learning_rate": 0.0020911845577582747, - "loss": 1.3823, - "step": 4824 - }, - { - "epoch": 0.37112529805399586, - "learning_rate": 0.002090851415564057, - "loss": 1.0632, - "step": 4825 - }, - { - "epoch": 0.3712022152142143, - "learning_rate": 0.00209051823886941, - "loss": 0.8388, - "step": 4826 - }, - { - "epoch": 0.3712791323744327, - "learning_rate": 0.0020901850276937874, - "loss": 0.7805, - "step": 4827 - }, - { - "epoch": 0.3713560495346512, - "learning_rate": 0.0020898517820566458, - "loss": 1.2459, - "step": 4828 - }, - { - "epoch": 0.37143296669486964, - "learning_rate": 0.002089518501977444, - "loss": 0.8033, - "step": 4829 - }, - { - "epoch": 0.3715098838550881, - "learning_rate": 0.002089185187475642, - "loss": 1.2512, - "step": 4830 - }, - { - "epoch": 0.3715868010153065, - "learning_rate": 0.0020888518385707034, - "loss": 1.0718, - "step": 4831 - }, - { - "epoch": 0.37166371817552496, - "learning_rate": 0.002088518455282092, - "loss": 1.4192, - "step": 4832 - }, - { - "epoch": 0.3717406353357434, - "learning_rate": 0.002088185037629275, - "loss": 0.953, - "step": 4833 - }, - { - "epoch": 0.3718175524959618, - "learning_rate": 0.00208785158563172, - "loss": 0.9038, - "step": 4834 - }, - { - "epoch": 0.3718944696561803, - "learning_rate": 0.002087518099308899, - "loss": 0.8231, - "step": 4835 - }, - { - "epoch": 0.37197138681639874, - "learning_rate": 0.0020871845786802836, - "loss": 1.0362, - "step": 4836 - }, - { - "epoch": 0.3720483039766172, - "learning_rate": 0.0020868510237653484, - "loss": 0.929, - "step": 4837 - }, - { - "epoch": 0.3721252211368356, - "learning_rate": 0.002086517434583571, - "loss": 1.0692, - "step": 4838 - }, - { - "epoch": 0.37220213829705406, - "learning_rate": 0.0020861838111544285, - "loss": 1.5116, - "step": 4839 - }, - { - "epoch": 0.3722790554572725, - "learning_rate": 0.002085850153497403, - "loss": 1.0373, - "step": 4840 - }, - { - "epoch": 0.372355972617491, - "learning_rate": 0.002085516461631976, - "loss": 1.1705, - "step": 4841 - }, - { - "epoch": 0.3724328897777094, - "learning_rate": 0.0020851827355776336, - "loss": 1.1292, - "step": 4842 - }, - { - "epoch": 0.37250980693792785, - "learning_rate": 0.0020848489753538604, - "loss": 0.9182, - "step": 4843 - }, - { - "epoch": 0.3725867240981463, - "learning_rate": 0.002084515180980146, - "loss": 1.3056, - "step": 4844 - }, - { - "epoch": 0.37266364125836476, - "learning_rate": 0.002084181352475981, - "loss": 1.1036, - "step": 4845 - }, - { - "epoch": 0.37274055841858317, - "learning_rate": 0.002083847489860859, - "loss": 1.0138, - "step": 4846 - }, - { - "epoch": 0.3728174755788016, - "learning_rate": 0.0020835135931542726, - "loss": 1.2371, - "step": 4847 - }, - { - "epoch": 0.3728943927390201, - "learning_rate": 0.00208317966237572, - "loss": 1.3178, - "step": 4848 - }, - { - "epoch": 0.37297130989923855, - "learning_rate": 0.0020828456975446985, - "loss": 1.6001, - "step": 4849 - }, - { - "epoch": 0.37304822705945695, - "learning_rate": 0.0020825116986807096, - "loss": 0.8634, - "step": 4850 - }, - { - "epoch": 0.3731251442196754, - "learning_rate": 0.002082177665803256, - "loss": 0.7771, - "step": 4851 - }, - { - "epoch": 0.37320206137989387, - "learning_rate": 0.0020818435989318406, - "loss": 1.1008, - "step": 4852 - }, - { - "epoch": 0.37327897854011227, - "learning_rate": 0.0020815094980859723, - "loss": 1.4486, - "step": 4853 - }, - { - "epoch": 0.37335589570033073, - "learning_rate": 0.002081175363285158, - "loss": 1.0082, - "step": 4854 - }, - { - "epoch": 0.3734328128605492, - "learning_rate": 0.0020808411945489086, - "loss": 1.3112, - "step": 4855 - }, - { - "epoch": 0.37350973002076765, - "learning_rate": 0.002080506991896736, - "loss": 0.7854, - "step": 4856 - }, - { - "epoch": 0.37358664718098605, - "learning_rate": 0.002080172755348156, - "loss": 1.2396, - "step": 4857 - }, - { - "epoch": 0.3736635643412045, - "learning_rate": 0.0020798384849226833, - "loss": 0.8316, - "step": 4858 - }, - { - "epoch": 0.37374048150142297, - "learning_rate": 0.0020795041806398375, - "loss": 0.9986, - "step": 4859 - }, - { - "epoch": 0.37381739866164143, - "learning_rate": 0.0020791698425191383, - "loss": 1.3407, - "step": 4860 - }, - { - "epoch": 0.37389431582185984, - "learning_rate": 0.002078835470580109, - "loss": 1.4374, - "step": 4861 - }, - { - "epoch": 0.3739712329820783, - "learning_rate": 0.0020785010648422734, - "loss": 1.0916, - "step": 4862 - }, - { - "epoch": 0.37404815014229675, - "learning_rate": 0.0020781666253251573, - "loss": 1.3573, - "step": 4863 - }, - { - "epoch": 0.3741250673025152, - "learning_rate": 0.0020778321520482893, - "loss": 1.072, - "step": 4864 - }, - { - "epoch": 0.3742019844627336, - "learning_rate": 0.0020774976450312, - "loss": 1.1448, - "step": 4865 - }, - { - "epoch": 0.3742789016229521, - "learning_rate": 0.0020771631042934215, - "loss": 1.396, - "step": 4866 - }, - { - "epoch": 0.37435581878317054, - "learning_rate": 0.0020768285298544873, - "loss": 1.3147, - "step": 4867 - }, - { - "epoch": 0.374432735943389, - "learning_rate": 0.002076493921733935, - "loss": 1.1093, - "step": 4868 - }, - { - "epoch": 0.3745096531036074, - "learning_rate": 0.002076159279951301, - "loss": 1.1783, - "step": 4869 - }, - { - "epoch": 0.37458657026382586, - "learning_rate": 0.0020758246045261266, - "loss": 0.8997, - "step": 4870 - }, - { - "epoch": 0.3746634874240443, - "learning_rate": 0.002075489895477953, - "loss": 1.4723, - "step": 4871 - }, - { - "epoch": 0.3747404045842628, - "learning_rate": 0.002075155152826325, - "loss": 1.376, - "step": 4872 - }, - { - "epoch": 0.3748173217444812, - "learning_rate": 0.002074820376590788, - "loss": 1.0331, - "step": 4873 - }, - { - "epoch": 0.37489423890469964, - "learning_rate": 0.0020744855667908903, - "loss": 0.9642, - "step": 4874 - }, - { - "epoch": 0.3749711560649181, - "learning_rate": 0.0020741507234461813, - "loss": 0.8382, - "step": 4875 - }, - { - "epoch": 0.3750480732251365, - "learning_rate": 0.002073815846576214, - "loss": 1.3105, - "step": 4876 - }, - { - "epoch": 0.37512499038535496, - "learning_rate": 0.0020734809362005406, - "loss": 1.418, - "step": 4877 - }, - { - "epoch": 0.3752019075455734, - "learning_rate": 0.002073145992338718, - "loss": 1.347, - "step": 4878 - }, - { - "epoch": 0.3752788247057919, - "learning_rate": 0.0020728110150103033, - "loss": 1.0774, - "step": 4879 - }, - { - "epoch": 0.3753557418660103, - "learning_rate": 0.002072476004234857, - "loss": 1.0817, - "step": 4880 - }, - { - "epoch": 0.37543265902622874, - "learning_rate": 0.0020721409600319403, - "loss": 0.6908, - "step": 4881 - }, - { - "epoch": 0.3755095761864472, - "learning_rate": 0.0020718058824211157, - "loss": 1.1856, - "step": 4882 - }, - { - "epoch": 0.37558649334666566, - "learning_rate": 0.0020714707714219504, - "loss": 1.329, - "step": 4883 - }, - { - "epoch": 0.37566341050688407, - "learning_rate": 0.0020711356270540116, - "loss": 1.0417, - "step": 4884 - }, - { - "epoch": 0.3757403276671025, - "learning_rate": 0.002070800449336867, - "loss": 1.2722, - "step": 4885 - }, - { - "epoch": 0.375817244827321, - "learning_rate": 0.0020704652382900904, - "loss": 1.1854, - "step": 4886 - }, - { - "epoch": 0.37589416198753944, - "learning_rate": 0.002070129993933254, - "loss": 1.0445, - "step": 4887 - }, - { - "epoch": 0.37597107914775785, - "learning_rate": 0.0020697947162859327, - "loss": 1.4291, - "step": 4888 - }, - { - "epoch": 0.3760479963079763, - "learning_rate": 0.002069459405367704, - "loss": 1.4924, - "step": 4889 - }, - { - "epoch": 0.37612491346819477, - "learning_rate": 0.002069124061198147, - "loss": 1.0078, - "step": 4890 - }, - { - "epoch": 0.3762018306284132, - "learning_rate": 0.0020687886837968436, - "loss": 1.2222, - "step": 4891 - }, - { - "epoch": 0.37627874778863163, - "learning_rate": 0.0020684532731833753, - "loss": 1.2222, - "step": 4892 - }, - { - "epoch": 0.3763556649488501, - "learning_rate": 0.002068117829377329, - "loss": 1.1769, - "step": 4893 - }, - { - "epoch": 0.37643258210906855, - "learning_rate": 0.00206778235239829, - "loss": 1.2531, - "step": 4894 - }, - { - "epoch": 0.37650949926928695, - "learning_rate": 0.0020674468422658475, - "loss": 1.5151, - "step": 4895 - }, - { - "epoch": 0.3765864164295054, - "learning_rate": 0.002067111298999593, - "loss": 1.4172, - "step": 4896 - }, - { - "epoch": 0.37666333358972387, - "learning_rate": 0.0020667757226191185, - "loss": 1.1472, - "step": 4897 - }, - { - "epoch": 0.37674025074994233, - "learning_rate": 0.0020664401131440186, - "loss": 1.3908, - "step": 4898 - }, - { - "epoch": 0.37681716791016073, - "learning_rate": 0.0020661044705938907, - "loss": 0.8493, - "step": 4899 - }, - { - "epoch": 0.3768940850703792, - "learning_rate": 0.0020657687949883325, - "loss": 1.1832, - "step": 4900 - }, - { - "epoch": 0.37697100223059765, - "learning_rate": 0.0020654330863469446, - "loss": 1.0361, - "step": 4901 - }, - { - "epoch": 0.3770479193908161, - "learning_rate": 0.0020650973446893293, - "loss": 1.26, - "step": 4902 - }, - { - "epoch": 0.3771248365510345, - "learning_rate": 0.0020647615700350916, - "loss": 0.8732, - "step": 4903 - }, - { - "epoch": 0.377201753711253, - "learning_rate": 0.002064425762403837, - "loss": 0.8881, - "step": 4904 - }, - { - "epoch": 0.37727867087147143, - "learning_rate": 0.002064089921815174, - "loss": 1.1377, - "step": 4905 - }, - { - "epoch": 0.3773555880316899, - "learning_rate": 0.0020637540482887125, - "loss": 1.2488, - "step": 4906 - }, - { - "epoch": 0.3774325051919083, - "learning_rate": 0.002063418141844065, - "loss": 1.2511, - "step": 4907 - }, - { - "epoch": 0.37750942235212676, - "learning_rate": 0.0020630822025008446, - "loss": 1.0231, - "step": 4908 - }, - { - "epoch": 0.3775863395123452, - "learning_rate": 0.002062746230278667, - "loss": 0.8397, - "step": 4909 - }, - { - "epoch": 0.3776632566725637, - "learning_rate": 0.002062410225197152, - "loss": 1.0901, - "step": 4910 - }, - { - "epoch": 0.3777401738327821, - "learning_rate": 0.0020620741872759167, - "loss": 1.2004, - "step": 4911 - }, - { - "epoch": 0.37781709099300054, - "learning_rate": 0.0020617381165345845, - "loss": 1.1119, - "step": 4912 - }, - { - "epoch": 0.377894008153219, - "learning_rate": 0.0020614020129927774, - "loss": 1.1782, - "step": 4913 - }, - { - "epoch": 0.3779709253134374, - "learning_rate": 0.0020610658766701223, - "loss": 1.2043, - "step": 4914 - }, - { - "epoch": 0.37804784247365586, - "learning_rate": 0.002060729707586246, - "loss": 1.001, - "step": 4915 - }, - { - "epoch": 0.3781247596338743, - "learning_rate": 0.002060393505760777, - "loss": 1.0096, - "step": 4916 - }, - { - "epoch": 0.3782016767940928, - "learning_rate": 0.002060057271213348, - "loss": 1.0906, - "step": 4917 - }, - { - "epoch": 0.3782785939543112, - "learning_rate": 0.0020597210039635908, - "loss": 1.4346, - "step": 4918 - }, - { - "epoch": 0.37835551111452964, - "learning_rate": 0.002059384704031141, - "loss": 1.2484, - "step": 4919 - }, - { - "epoch": 0.3784324282747481, - "learning_rate": 0.0020590483714356352, - "loss": 0.989, - "step": 4920 - }, - { - "epoch": 0.37850934543496656, - "learning_rate": 0.0020587120061967127, - "loss": 1.3396, - "step": 4921 - }, - { - "epoch": 0.37858626259518496, - "learning_rate": 0.0020583756083340137, - "loss": 1.2484, - "step": 4922 - }, - { - "epoch": 0.3786631797554034, - "learning_rate": 0.002058039177867181, - "loss": 1.3944, - "step": 4923 - }, - { - "epoch": 0.3787400969156219, - "learning_rate": 0.002057702714815859, - "loss": 1.0276, - "step": 4924 - }, - { - "epoch": 0.37881701407584034, - "learning_rate": 0.002057366219199694, - "loss": 1.53, - "step": 4925 - }, - { - "epoch": 0.37889393123605875, - "learning_rate": 0.002057029691038335, - "loss": 0.8265, - "step": 4926 - }, - { - "epoch": 0.3789708483962772, - "learning_rate": 0.002056693130351432, - "loss": 1.056, - "step": 4927 - }, - { - "epoch": 0.37904776555649566, - "learning_rate": 0.0020563565371586367, - "loss": 1.2299, - "step": 4928 - }, - { - "epoch": 0.3791246827167141, - "learning_rate": 0.0020560199114796032, - "loss": 0.9996, - "step": 4929 - }, - { - "epoch": 0.3792015998769325, - "learning_rate": 0.0020556832533339883, - "loss": 1.2088, - "step": 4930 - }, - { - "epoch": 0.379278517037151, - "learning_rate": 0.002055346562741448, - "loss": 0.9918, - "step": 4931 - }, - { - "epoch": 0.37935543419736945, - "learning_rate": 0.0020550098397216442, - "loss": 1.0546, - "step": 4932 - }, - { - "epoch": 0.3794323513575879, - "learning_rate": 0.002054673084294237, - "loss": 1.1932, - "step": 4933 - }, - { - "epoch": 0.3795092685178063, - "learning_rate": 0.0020543362964788904, - "loss": 1.0308, - "step": 4934 - }, - { - "epoch": 0.37958618567802477, - "learning_rate": 0.002053999476295269, - "loss": 1.1372, - "step": 4935 - }, - { - "epoch": 0.3796631028382432, - "learning_rate": 0.002053662623763042, - "loss": 1.0867, - "step": 4936 - }, - { - "epoch": 0.37974001999846163, - "learning_rate": 0.0020533257389018767, - "loss": 0.9925, - "step": 4937 - }, - { - "epoch": 0.3798169371586801, - "learning_rate": 0.002052988821731445, - "loss": 1.2389, - "step": 4938 - }, - { - "epoch": 0.37989385431889855, - "learning_rate": 0.0020526518722714195, - "loss": 1.3574, - "step": 4939 - }, - { - "epoch": 0.379970771479117, - "learning_rate": 0.002052314890541475, - "loss": 1.0758, - "step": 4940 - }, - { - "epoch": 0.3800476886393354, - "learning_rate": 0.0020519778765612897, - "loss": 1.0655, - "step": 4941 - }, - { - "epoch": 0.38012460579955387, - "learning_rate": 0.00205164083035054, - "loss": 1.2907, - "step": 4942 - }, - { - "epoch": 0.38020152295977233, - "learning_rate": 0.002051303751928907, - "loss": 1.4228, - "step": 4943 - }, - { - "epoch": 0.3802784401199908, - "learning_rate": 0.0020509666413160736, - "loss": 1.1147, - "step": 4944 - }, - { - "epoch": 0.3803553572802092, - "learning_rate": 0.0020506294985317246, - "loss": 1.4504, - "step": 4945 - }, - { - "epoch": 0.38043227444042765, - "learning_rate": 0.0020502923235955445, - "loss": 1.242, - "step": 4946 - }, - { - "epoch": 0.3805091916006461, - "learning_rate": 0.0020499551165272223, - "loss": 0.7567, - "step": 4947 - }, - { - "epoch": 0.38058610876086457, - "learning_rate": 0.0020496178773464473, - "loss": 0.9129, - "step": 4948 - }, - { - "epoch": 0.380663025921083, - "learning_rate": 0.002049280606072912, - "loss": 1.1387, - "step": 4949 - }, - { - "epoch": 0.38073994308130144, - "learning_rate": 0.00204894330272631, - "loss": 1.184, - "step": 4950 - }, - { - "epoch": 0.3808168602415199, - "learning_rate": 0.002048605967326336, - "loss": 1.222, - "step": 4951 - }, - { - "epoch": 0.38089377740173835, - "learning_rate": 0.002048268599892688, - "loss": 0.8492, - "step": 4952 - }, - { - "epoch": 0.38097069456195676, - "learning_rate": 0.0020479312004450653, - "loss": 1.3435, - "step": 4953 - }, - { - "epoch": 0.3810476117221752, - "learning_rate": 0.0020475937690031683, - "loss": 1.1365, - "step": 4954 - }, - { - "epoch": 0.3811245288823937, - "learning_rate": 0.002047256305586701, - "loss": 1.2877, - "step": 4955 - }, - { - "epoch": 0.3812014460426121, - "learning_rate": 0.0020469188102153675, - "loss": 0.9223, - "step": 4956 - }, - { - "epoch": 0.38127836320283054, - "learning_rate": 0.0020465812829088743, - "loss": 1.0167, - "step": 4957 - }, - { - "epoch": 0.381355280363049, - "learning_rate": 0.0020462437236869304, - "loss": 1.1759, - "step": 4958 - }, - { - "epoch": 0.38143219752326746, - "learning_rate": 0.0020459061325692465, - "loss": 1.2252, - "step": 4959 - }, - { - "epoch": 0.38150911468348586, - "learning_rate": 0.0020455685095755343, - "loss": 0.9734, - "step": 4960 - }, - { - "epoch": 0.3815860318437043, - "learning_rate": 0.0020452308547255086, - "loss": 1.2894, - "step": 4961 - }, - { - "epoch": 0.3816629490039228, - "learning_rate": 0.0020448931680388843, - "loss": 1.0756, - "step": 4962 - }, - { - "epoch": 0.38173986616414124, - "learning_rate": 0.002044555449535381, - "loss": 0.8025, - "step": 4963 - }, - { - "epoch": 0.38181678332435964, - "learning_rate": 0.002044217699234717, - "loss": 1.1754, - "step": 4964 - }, - { - "epoch": 0.3818937004845781, - "learning_rate": 0.0020438799171566147, - "loss": 1.1561, - "step": 4965 - }, - { - "epoch": 0.38197061764479656, - "learning_rate": 0.002043542103320797, - "loss": 1.0023, - "step": 4966 - }, - { - "epoch": 0.382047534805015, - "learning_rate": 0.002043204257746989, - "loss": 1.3553, - "step": 4967 - }, - { - "epoch": 0.3821244519652334, - "learning_rate": 0.002042866380454919, - "loss": 1.31, - "step": 4968 - }, - { - "epoch": 0.3822013691254519, - "learning_rate": 0.002042528471464315, - "loss": 1.6954, - "step": 4969 - }, - { - "epoch": 0.38227828628567034, - "learning_rate": 0.002042190530794908, - "loss": 1.036, - "step": 4970 - }, - { - "epoch": 0.3823552034458888, - "learning_rate": 0.002041852558466431, - "loss": 1.1154, - "step": 4971 - }, - { - "epoch": 0.3824321206061072, - "learning_rate": 0.002041514554498619, - "loss": 1.462, - "step": 4972 - }, - { - "epoch": 0.38250903776632567, - "learning_rate": 0.0020411765189112066, - "loss": 1.0796, - "step": 4973 - }, - { - "epoch": 0.3825859549265441, - "learning_rate": 0.002040838451723934, - "loss": 1.0786, - "step": 4974 - }, - { - "epoch": 0.3826628720867626, - "learning_rate": 0.00204050035295654, - "loss": 1.1372, - "step": 4975 - }, - { - "epoch": 0.382739789246981, - "learning_rate": 0.0020401622226287677, - "loss": 0.897, - "step": 4976 - }, - { - "epoch": 0.38281670640719945, - "learning_rate": 0.00203982406076036, - "loss": 1.1475, - "step": 4977 - }, - { - "epoch": 0.3828936235674179, - "learning_rate": 0.002039485867371063, - "loss": 0.601, - "step": 4978 - }, - { - "epoch": 0.3829705407276363, - "learning_rate": 0.002039147642480624, - "loss": 0.9801, - "step": 4979 - }, - { - "epoch": 0.38304745788785477, - "learning_rate": 0.002038809386108792, - "loss": 1.0897, - "step": 4980 - }, - { - "epoch": 0.38312437504807323, - "learning_rate": 0.0020384710982753185, - "loss": 0.9211, - "step": 4981 - }, - { - "epoch": 0.3832012922082917, - "learning_rate": 0.002038132778999956, - "loss": 1.1034, - "step": 4982 - }, - { - "epoch": 0.3832782093685101, - "learning_rate": 0.0020377944283024607, - "loss": 0.9202, - "step": 4983 - }, - { - "epoch": 0.38335512652872855, - "learning_rate": 0.0020374560462025873, - "loss": 1.1294, - "step": 4984 - }, - { - "epoch": 0.383432043688947, - "learning_rate": 0.0020371176327200957, - "loss": 1.0268, - "step": 4985 - }, - { - "epoch": 0.38350896084916547, - "learning_rate": 0.0020367791878747456, - "loss": 1.0809, - "step": 4986 - }, - { - "epoch": 0.3835858780093839, - "learning_rate": 0.0020364407116862998, - "loss": 1.0792, - "step": 4987 - }, - { - "epoch": 0.38366279516960233, - "learning_rate": 0.002036102204174521, - "loss": 1.3723, - "step": 4988 - }, - { - "epoch": 0.3837397123298208, - "learning_rate": 0.002035763665359176, - "loss": 1.2951, - "step": 4989 - }, - { - "epoch": 0.38381662949003925, - "learning_rate": 0.0020354250952600325, - "loss": 1.113, - "step": 4990 - }, - { - "epoch": 0.38389354665025766, - "learning_rate": 0.0020350864938968594, - "loss": 1.2646, - "step": 4991 - }, - { - "epoch": 0.3839704638104761, - "learning_rate": 0.0020347478612894286, - "loss": 1.4198, - "step": 4992 - }, - { - "epoch": 0.3840473809706946, - "learning_rate": 0.002034409197457512, - "loss": 1.0156, - "step": 4993 - }, - { - "epoch": 0.38412429813091303, - "learning_rate": 0.0020340705024208864, - "loss": 0.9134, - "step": 4994 - }, - { - "epoch": 0.38420121529113144, - "learning_rate": 0.002033731776199328, - "loss": 1.1205, - "step": 4995 - }, - { - "epoch": 0.3842781324513499, - "learning_rate": 0.002033393018812614, - "loss": 1.2747, - "step": 4996 - }, - { - "epoch": 0.38435504961156836, - "learning_rate": 0.0020330542302805254, - "loss": 1.4218, - "step": 4997 - }, - { - "epoch": 0.38443196677178676, - "learning_rate": 0.0020327154106228457, - "loss": 1.263, - "step": 4998 - }, - { - "epoch": 0.3845088839320052, - "learning_rate": 0.0020323765598593575, - "loss": 1.347, - "step": 4999 - }, - { - "epoch": 0.3845858010922237, - "learning_rate": 0.0020320376780098475, - "loss": 1.3136, - "step": 5000 - }, - { - "epoch": 0.38466271825244214, - "learning_rate": 0.0020316987650941024, - "loss": 1.0214, - "step": 5001 - }, - { - "epoch": 0.38473963541266054, - "learning_rate": 0.002031359821131913, - "loss": 1.0561, - "step": 5002 - }, - { - "epoch": 0.384816552572879, - "learning_rate": 0.0020310208461430694, - "loss": 1.1358, - "step": 5003 - }, - { - "epoch": 0.38489346973309746, - "learning_rate": 0.0020306818401473648, - "loss": 0.9896, - "step": 5004 - }, - { - "epoch": 0.3849703868933159, - "learning_rate": 0.0020303428031645952, - "loss": 0.966, - "step": 5005 - }, - { - "epoch": 0.3850473040535343, - "learning_rate": 0.0020300037352145566, - "loss": 1.275, - "step": 5006 - }, - { - "epoch": 0.3851242212137528, - "learning_rate": 0.0020296646363170473, - "loss": 1.2206, - "step": 5007 - }, - { - "epoch": 0.38520113837397124, - "learning_rate": 0.0020293255064918678, - "loss": 1.3341, - "step": 5008 - }, - { - "epoch": 0.3852780555341897, - "learning_rate": 0.0020289863457588204, - "loss": 1.0249, - "step": 5009 - }, - { - "epoch": 0.3853549726944081, - "learning_rate": 0.002028647154137709, - "loss": 0.8251, - "step": 5010 - }, - { - "epoch": 0.38543188985462656, - "learning_rate": 0.0020283079316483392, - "loss": 1.453, - "step": 5011 - }, - { - "epoch": 0.385508807014845, - "learning_rate": 0.0020279686783105186, - "loss": 1.4294, - "step": 5012 - }, - { - "epoch": 0.3855857241750635, - "learning_rate": 0.002027629394144057, - "loss": 1.3009, - "step": 5013 - }, - { - "epoch": 0.3856626413352819, - "learning_rate": 0.0020272900791687655, - "loss": 1.0109, - "step": 5014 - }, - { - "epoch": 0.38573955849550035, - "learning_rate": 0.0020269507334044557, - "loss": 1.0238, - "step": 5015 - }, - { - "epoch": 0.3858164756557188, - "learning_rate": 0.0020266113568709445, - "loss": 1.1444, - "step": 5016 - }, - { - "epoch": 0.3858933928159372, - "learning_rate": 0.0020262719495880467, - "loss": 1.1523, - "step": 5017 - }, - { - "epoch": 0.38597030997615567, - "learning_rate": 0.0020259325115755816, - "loss": 1.1241, - "step": 5018 - }, - { - "epoch": 0.3860472271363741, - "learning_rate": 0.0020255930428533684, - "loss": 1.1907, - "step": 5019 - }, - { - "epoch": 0.3861241442965926, - "learning_rate": 0.0020252535434412302, - "loss": 1.1154, - "step": 5020 - }, - { - "epoch": 0.386201061456811, - "learning_rate": 0.00202491401335899, - "loss": 1.5667, - "step": 5021 - }, - { - "epoch": 0.38627797861702945, - "learning_rate": 0.0020245744526264744, - "loss": 1.1757, - "step": 5022 - }, - { - "epoch": 0.3863548957772479, - "learning_rate": 0.0020242348612635086, - "loss": 1.249, - "step": 5023 - }, - { - "epoch": 0.38643181293746637, - "learning_rate": 0.0020238952392899233, - "loss": 1.3065, - "step": 5024 - }, - { - "epoch": 0.38650873009768477, - "learning_rate": 0.0020235555867255495, - "loss": 1.2946, - "step": 5025 - }, - { - "epoch": 0.38658564725790323, - "learning_rate": 0.0020232159035902184, - "loss": 1.0711, - "step": 5026 - }, - { - "epoch": 0.3866625644181217, - "learning_rate": 0.002022876189903766, - "loss": 1.0078, - "step": 5027 - }, - { - "epoch": 0.38673948157834015, - "learning_rate": 0.0020225364456860277, - "loss": 1.0949, - "step": 5028 - }, - { - "epoch": 0.38681639873855855, - "learning_rate": 0.002022196670956842, - "loss": 1.0695, - "step": 5029 - }, - { - "epoch": 0.386893315898777, - "learning_rate": 0.0020218568657360486, - "loss": 1.1325, - "step": 5030 - }, - { - "epoch": 0.38697023305899547, - "learning_rate": 0.0020215170300434885, - "loss": 1.1046, - "step": 5031 - }, - { - "epoch": 0.38704715021921393, - "learning_rate": 0.0020211771638990058, - "loss": 1.2303, - "step": 5032 - }, - { - "epoch": 0.38712406737943234, - "learning_rate": 0.0020208372673224456, - "loss": 0.9923, - "step": 5033 - }, - { - "epoch": 0.3872009845396508, - "learning_rate": 0.002020497340333654, - "loss": 0.8864, - "step": 5034 - }, - { - "epoch": 0.38727790169986925, - "learning_rate": 0.0020201573829524803, - "loss": 1.0268, - "step": 5035 - }, - { - "epoch": 0.3873548188600877, - "learning_rate": 0.0020198173951987753, - "loss": 1.0813, - "step": 5036 - }, - { - "epoch": 0.3874317360203061, - "learning_rate": 0.0020194773770923907, - "loss": 1.0885, - "step": 5037 - }, - { - "epoch": 0.3875086531805246, - "learning_rate": 0.0020191373286531805, - "loss": 1.3219, - "step": 5038 - }, - { - "epoch": 0.38758557034074304, - "learning_rate": 0.0020187972499010006, - "loss": 0.8751, - "step": 5039 - }, - { - "epoch": 0.38766248750096144, - "learning_rate": 0.0020184571408557095, - "loss": 1.1717, - "step": 5040 - }, - { - "epoch": 0.3877394046611799, - "learning_rate": 0.0020181170015371643, - "loss": 1.1097, - "step": 5041 - }, - { - "epoch": 0.38781632182139836, - "learning_rate": 0.0020177768319652283, - "loss": 1.1087, - "step": 5042 - }, - { - "epoch": 0.3878932389816168, - "learning_rate": 0.0020174366321597625, - "loss": 0.9766, - "step": 5043 - }, - { - "epoch": 0.3879701561418352, - "learning_rate": 0.0020170964021406336, - "loss": 1.1057, - "step": 5044 - }, - { - "epoch": 0.3880470733020537, - "learning_rate": 0.0020167561419277063, - "loss": 1.1334, - "step": 5045 - }, - { - "epoch": 0.38812399046227214, - "learning_rate": 0.0020164158515408497, - "loss": 0.9277, - "step": 5046 - }, - { - "epoch": 0.3882009076224906, - "learning_rate": 0.002016075530999933, - "loss": 1.266, - "step": 5047 - }, - { - "epoch": 0.388277824782709, - "learning_rate": 0.0020157351803248283, - "loss": 0.7329, - "step": 5048 - }, - { - "epoch": 0.38835474194292746, - "learning_rate": 0.002015394799535409, - "loss": 1.0342, - "step": 5049 - }, - { - "epoch": 0.3884316591031459, - "learning_rate": 0.00201505438865155, - "loss": 1.0407, - "step": 5050 - }, - { - "epoch": 0.3885085762633644, - "learning_rate": 0.0020147139476931284, - "loss": 0.8504, - "step": 5051 - }, - { - "epoch": 0.3885854934235828, - "learning_rate": 0.0020143734766800234, - "loss": 1.3714, - "step": 5052 - }, - { - "epoch": 0.38866241058380124, - "learning_rate": 0.002014032975632115, - "loss": 1.4464, - "step": 5053 - }, - { - "epoch": 0.3887393277440197, - "learning_rate": 0.002013692444569285, - "loss": 1.2104, - "step": 5054 - }, - { - "epoch": 0.38881624490423816, - "learning_rate": 0.002013351883511418, - "loss": 1.2731, - "step": 5055 - }, - { - "epoch": 0.38889316206445657, - "learning_rate": 0.0020130112924784, - "loss": 0.982, - "step": 5056 - }, - { - "epoch": 0.388970079224675, - "learning_rate": 0.002012670671490117, - "loss": 1.4885, - "step": 5057 - }, - { - "epoch": 0.3890469963848935, - "learning_rate": 0.00201233002056646, - "loss": 1.1467, - "step": 5058 - }, - { - "epoch": 0.3891239135451119, - "learning_rate": 0.002011989339727319, - "loss": 0.7181, - "step": 5059 - }, - { - "epoch": 0.38920083070533035, - "learning_rate": 0.0020116486289925867, - "loss": 1.0285, - "step": 5060 - }, - { - "epoch": 0.3892777478655488, - "learning_rate": 0.0020113078883821577, - "loss": 1.1083, - "step": 5061 - }, - { - "epoch": 0.38935466502576727, - "learning_rate": 0.0020109671179159285, - "loss": 1.218, - "step": 5062 - }, - { - "epoch": 0.38943158218598567, - "learning_rate": 0.0020106263176137965, - "loss": 1.3801, - "step": 5063 - }, - { - "epoch": 0.38950849934620413, - "learning_rate": 0.002010285487495662, - "loss": 0.8797, - "step": 5064 - }, - { - "epoch": 0.3895854165064226, - "learning_rate": 0.0020099446275814257, - "loss": 1.1166, - "step": 5065 - }, - { - "epoch": 0.38966233366664105, - "learning_rate": 0.0020096037378909914, - "loss": 1.5569, - "step": 5066 - }, - { - "epoch": 0.38973925082685945, - "learning_rate": 0.0020092628184442637, - "loss": 1.3894, - "step": 5067 - }, - { - "epoch": 0.3898161679870779, - "learning_rate": 0.002008921869261149, - "loss": 1.2409, - "step": 5068 - }, - { - "epoch": 0.38989308514729637, - "learning_rate": 0.0020085808903615564, - "loss": 1.0281, - "step": 5069 - }, - { - "epoch": 0.38997000230751483, - "learning_rate": 0.002008239881765395, - "loss": 1.0496, - "step": 5070 - }, - { - "epoch": 0.39004691946773323, - "learning_rate": 0.002007898843492578, - "loss": 1.1025, - "step": 5071 - }, - { - "epoch": 0.3901238366279517, - "learning_rate": 0.0020075577755630175, - "loss": 1.0, - "step": 5072 - }, - { - "epoch": 0.39020075378817015, - "learning_rate": 0.00200721667799663, - "loss": 1.55, - "step": 5073 - }, - { - "epoch": 0.3902776709483886, - "learning_rate": 0.0020068755508133316, - "loss": 1.0485, - "step": 5074 - }, - { - "epoch": 0.390354588108607, - "learning_rate": 0.0020065343940330422, - "loss": 1.0761, - "step": 5075 - }, - { - "epoch": 0.3904315052688255, - "learning_rate": 0.002006193207675681, - "loss": 1.1752, - "step": 5076 - }, - { - "epoch": 0.39050842242904393, - "learning_rate": 0.0020058519917611714, - "loss": 1.0596, - "step": 5077 - }, - { - "epoch": 0.39058533958926234, - "learning_rate": 0.002005510746309437, - "loss": 1.1141, - "step": 5078 - }, - { - "epoch": 0.3906622567494808, - "learning_rate": 0.0020051694713404023, - "loss": 0.8887, - "step": 5079 - }, - { - "epoch": 0.39073917390969926, - "learning_rate": 0.002004828166873996, - "loss": 1.158, - "step": 5080 - }, - { - "epoch": 0.3908160910699177, - "learning_rate": 0.002004486832930147, - "loss": 1.6662, - "step": 5081 - }, - { - "epoch": 0.3908930082301361, - "learning_rate": 0.0020041454695287867, - "loss": 1.2052, - "step": 5082 - }, - { - "epoch": 0.3909699253903546, - "learning_rate": 0.0020038040766898464, - "loss": 1.0947, - "step": 5083 - }, - { - "epoch": 0.39104684255057304, - "learning_rate": 0.002003462654433261, - "loss": 1.421, - "step": 5084 - }, - { - "epoch": 0.3911237597107915, - "learning_rate": 0.0020031212027789664, - "loss": 1.3049, - "step": 5085 - }, - { - "epoch": 0.3912006768710099, - "learning_rate": 0.002002779721746901, - "loss": 0.9061, - "step": 5086 - }, - { - "epoch": 0.39127759403122836, - "learning_rate": 0.002002438211357003, - "loss": 0.6874, - "step": 5087 - }, - { - "epoch": 0.3913545111914468, - "learning_rate": 0.002002096671629215, - "loss": 1.3376, - "step": 5088 - }, - { - "epoch": 0.3914314283516653, - "learning_rate": 0.0020017551025834786, - "loss": 1.5181, - "step": 5089 - }, - { - "epoch": 0.3915083455118837, - "learning_rate": 0.0020014135042397386, - "loss": 1.1969, - "step": 5090 - }, - { - "epoch": 0.39158526267210214, - "learning_rate": 0.002001071876617942, - "loss": 1.165, - "step": 5091 - }, - { - "epoch": 0.3916621798323206, - "learning_rate": 0.002000730219738036, - "loss": 0.9387, - "step": 5092 - }, - { - "epoch": 0.39173909699253906, - "learning_rate": 0.0020003885336199708, - "loss": 1.0339, - "step": 5093 - }, - { - "epoch": 0.39181601415275746, - "learning_rate": 0.0020000468182836974, - "loss": 0.985, - "step": 5094 - }, - { - "epoch": 0.3918929313129759, - "learning_rate": 0.0019997050737491696, - "loss": 1.1823, - "step": 5095 - }, - { - "epoch": 0.3919698484731944, - "learning_rate": 0.0019993633000363414, - "loss": 1.2862, - "step": 5096 - }, - { - "epoch": 0.39204676563341284, - "learning_rate": 0.00199902149716517, - "loss": 0.7466, - "step": 5097 - }, - { - "epoch": 0.39212368279363125, - "learning_rate": 0.0019986796651556126, - "loss": 1.0475, - "step": 5098 - }, - { - "epoch": 0.3922005999538497, - "learning_rate": 0.0019983378040276307, - "loss": 1.203, - "step": 5099 - }, - { - "epoch": 0.39227751711406816, - "learning_rate": 0.0019979959138011847, - "loss": 1.3668, - "step": 5100 - }, - { - "epoch": 0.39235443427428657, - "learning_rate": 0.001997653994496238, - "loss": 1.1902, - "step": 5101 - }, - { - "epoch": 0.392431351434505, - "learning_rate": 0.0019973120461327567, - "loss": 1.1621, - "step": 5102 - }, - { - "epoch": 0.3925082685947235, - "learning_rate": 0.0019969700687307054, - "loss": 1.4672, - "step": 5103 - }, - { - "epoch": 0.39258518575494195, - "learning_rate": 0.001996628062310055, - "loss": 1.1216, - "step": 5104 - }, - { - "epoch": 0.39266210291516035, - "learning_rate": 0.0019962860268907742, - "loss": 1.1515, - "step": 5105 - }, - { - "epoch": 0.3927390200753788, - "learning_rate": 0.001995943962492835, - "loss": 1.3502, - "step": 5106 - }, - { - "epoch": 0.39281593723559727, - "learning_rate": 0.0019956018691362114, - "loss": 1.1658, - "step": 5107 - }, - { - "epoch": 0.3928928543958157, - "learning_rate": 0.0019952597468408774, - "loss": 1.0952, - "step": 5108 - }, - { - "epoch": 0.39296977155603413, - "learning_rate": 0.001994917595626811, - "loss": 1.0106, - "step": 5109 - }, - { - "epoch": 0.3930466887162526, - "learning_rate": 0.0019945754155139906, - "loss": 1.2692, - "step": 5110 - }, - { - "epoch": 0.39312360587647105, - "learning_rate": 0.001994233206522396, - "loss": 0.8599, - "step": 5111 - }, - { - "epoch": 0.3932005230366895, - "learning_rate": 0.0019938909686720097, - "loss": 1.0223, - "step": 5112 - }, - { - "epoch": 0.3932774401969079, - "learning_rate": 0.0019935487019828147, - "loss": 0.9486, - "step": 5113 - }, - { - "epoch": 0.39335435735712637, - "learning_rate": 0.0019932064064747965, - "loss": 1.1028, - "step": 5114 - }, - { - "epoch": 0.39343127451734483, - "learning_rate": 0.0019928640821679426, - "loss": 1.1744, - "step": 5115 - }, - { - "epoch": 0.3935081916775633, - "learning_rate": 0.0019925217290822405, - "loss": 1.1037, - "step": 5116 - }, - { - "epoch": 0.3935851088377817, - "learning_rate": 0.0019921793472376824, - "loss": 0.9918, - "step": 5117 - }, - { - "epoch": 0.39366202599800015, - "learning_rate": 0.001991836936654259, - "loss": 1.2017, - "step": 5118 - }, - { - "epoch": 0.3937389431582186, - "learning_rate": 0.0019914944973519638, - "loss": 1.2374, - "step": 5119 - }, - { - "epoch": 0.393815860318437, - "learning_rate": 0.001991152029350793, - "loss": 1.2245, - "step": 5120 - }, - { - "epoch": 0.3938927774786555, - "learning_rate": 0.0019908095326707435, - "loss": 0.9647, - "step": 5121 - }, - { - "epoch": 0.39396969463887394, - "learning_rate": 0.0019904670073318136, - "loss": 1.2698, - "step": 5122 - }, - { - "epoch": 0.3940466117990924, - "learning_rate": 0.001990124453354004, - "loss": 0.8463, - "step": 5123 - }, - { - "epoch": 0.3941235289593108, - "learning_rate": 0.001989781870757317, - "loss": 1.1296, - "step": 5124 - }, - { - "epoch": 0.39420044611952926, - "learning_rate": 0.001989439259561756, - "loss": 1.0608, - "step": 5125 - }, - { - "epoch": 0.3942773632797477, - "learning_rate": 0.001989096619787326, - "loss": 1.0291, - "step": 5126 - }, - { - "epoch": 0.3943542804399662, - "learning_rate": 0.0019887539514540356, - "loss": 0.788, - "step": 5127 - }, - { - "epoch": 0.3944311976001846, - "learning_rate": 0.0019884112545818927, - "loss": 0.974, - "step": 5128 - }, - { - "epoch": 0.39450811476040304, - "learning_rate": 0.0019880685291909068, - "loss": 1.3288, - "step": 5129 - }, - { - "epoch": 0.3945850319206215, - "learning_rate": 0.001987725775301091, - "loss": 1.181, - "step": 5130 - }, - { - "epoch": 0.39466194908083996, - "learning_rate": 0.001987382992932459, - "loss": 1.2275, - "step": 5131 - }, - { - "epoch": 0.39473886624105836, - "learning_rate": 0.001987040182105026, - "loss": 1.5413, - "step": 5132 - }, - { - "epoch": 0.3948157834012768, - "learning_rate": 0.00198669734283881, - "loss": 1.2808, - "step": 5133 - }, - { - "epoch": 0.3948927005614953, - "learning_rate": 0.001986354475153828, - "loss": 0.9555, - "step": 5134 - }, - { - "epoch": 0.39496961772171374, - "learning_rate": 0.001986011579070101, - "loss": 0.8047, - "step": 5135 - }, - { - "epoch": 0.39504653488193214, - "learning_rate": 0.0019856686546076523, - "loss": 1.4716, - "step": 5136 - }, - { - "epoch": 0.3951234520421506, - "learning_rate": 0.001985325701786505, - "loss": 0.8409, - "step": 5137 - }, - { - "epoch": 0.39520036920236906, - "learning_rate": 0.001984982720626683, - "loss": 0.8102, - "step": 5138 - }, - { - "epoch": 0.39527728636258747, - "learning_rate": 0.0019846397111482152, - "loss": 1.1077, - "step": 5139 - }, - { - "epoch": 0.3953542035228059, - "learning_rate": 0.0019842966733711297, - "loss": 1.0789, - "step": 5140 - }, - { - "epoch": 0.3954311206830244, - "learning_rate": 0.0019839536073154563, - "loss": 0.9913, - "step": 5141 - }, - { - "epoch": 0.39550803784324284, - "learning_rate": 0.0019836105130012277, - "loss": 1.097, - "step": 5142 - }, - { - "epoch": 0.39558495500346125, - "learning_rate": 0.001983267390448477, - "loss": 1.2007, - "step": 5143 - }, - { - "epoch": 0.3956618721636797, - "learning_rate": 0.00198292423967724, - "loss": 0.8776, - "step": 5144 - }, - { - "epoch": 0.39573878932389817, - "learning_rate": 0.001982581060707553, - "loss": 1.526, - "step": 5145 - }, - { - "epoch": 0.3958157064841166, - "learning_rate": 0.0019822378535594553, - "loss": 0.8039, - "step": 5146 - }, - { - "epoch": 0.39589262364433503, - "learning_rate": 0.0019818946182529867, - "loss": 0.8459, - "step": 5147 - }, - { - "epoch": 0.3959695408045535, - "learning_rate": 0.001981551354808189, - "loss": 1.148, - "step": 5148 - }, - { - "epoch": 0.39604645796477195, - "learning_rate": 0.0019812080632451053, - "loss": 1.0183, - "step": 5149 - }, - { - "epoch": 0.3961233751249904, - "learning_rate": 0.0019808647435837824, - "loss": 1.1046, - "step": 5150 - }, - { - "epoch": 0.3962002922852088, - "learning_rate": 0.001980521395844265, - "loss": 0.8268, - "step": 5151 - }, - { - "epoch": 0.39627720944542727, - "learning_rate": 0.0019801780200466032, - "loss": 0.7725, - "step": 5152 - }, - { - "epoch": 0.39635412660564573, - "learning_rate": 0.001979834616210846, - "loss": 1.1379, - "step": 5153 - }, - { - "epoch": 0.3964310437658642, - "learning_rate": 0.0019794911843570456, - "loss": 1.2747, - "step": 5154 - }, - { - "epoch": 0.3965079609260826, - "learning_rate": 0.0019791477245052554, - "loss": 1.1457, - "step": 5155 - }, - { - "epoch": 0.39658487808630105, - "learning_rate": 0.0019788042366755297, - "loss": 1.4812, - "step": 5156 - }, - { - "epoch": 0.3966617952465195, - "learning_rate": 0.001978460720887926, - "loss": 1.0438, - "step": 5157 - }, - { - "epoch": 0.39673871240673797, - "learning_rate": 0.001978117177162502, - "loss": 1.4135, - "step": 5158 - }, - { - "epoch": 0.3968156295669564, - "learning_rate": 0.0019777736055193182, - "loss": 1.7645, - "step": 5159 - }, - { - "epoch": 0.39689254672717483, - "learning_rate": 0.001977430005978435, - "loss": 1.4837, - "step": 5160 - }, - { - "epoch": 0.3969694638873933, - "learning_rate": 0.0019770863785599168, - "loss": 1.1554, - "step": 5161 - }, - { - "epoch": 0.3970463810476117, - "learning_rate": 0.001976742723283827, - "loss": 1.0866, - "step": 5162 - }, - { - "epoch": 0.39712329820783016, - "learning_rate": 0.0019763990401702337, - "loss": 1.3217, - "step": 5163 - }, - { - "epoch": 0.3972002153680486, - "learning_rate": 0.001976055329239203, - "loss": 1.1724, - "step": 5164 - }, - { - "epoch": 0.3972771325282671, - "learning_rate": 0.0019757115905108063, - "loss": 0.9498, - "step": 5165 - }, - { - "epoch": 0.3973540496884855, - "learning_rate": 0.0019753678240051137, - "loss": 1.0203, - "step": 5166 - }, - { - "epoch": 0.39743096684870394, - "learning_rate": 0.001975024029742198, - "loss": 1.16, - "step": 5167 - }, - { - "epoch": 0.3975078840089224, - "learning_rate": 0.0019746802077421346, - "loss": 1.2596, - "step": 5168 - }, - { - "epoch": 0.39758480116914086, - "learning_rate": 0.001974336358024999, - "loss": 1.1585, - "step": 5169 - }, - { - "epoch": 0.39766171832935926, - "learning_rate": 0.0019739924806108696, - "loss": 1.3912, - "step": 5170 - }, - { - "epoch": 0.3977386354895777, - "learning_rate": 0.0019736485755198247, - "loss": 1.162, - "step": 5171 - }, - { - "epoch": 0.3978155526497962, - "learning_rate": 0.0019733046427719463, - "loss": 0.9619, - "step": 5172 - }, - { - "epoch": 0.39789246981001464, - "learning_rate": 0.001972960682387316, - "loss": 1.134, - "step": 5173 - }, - { - "epoch": 0.39796938697023304, - "learning_rate": 0.001972616694386019, - "loss": 1.0416, - "step": 5174 - }, - { - "epoch": 0.3980463041304515, - "learning_rate": 0.0019722726787881405, - "loss": 1.3365, - "step": 5175 - }, - { - "epoch": 0.39812322129066996, - "learning_rate": 0.001971928635613768, - "loss": 1.395, - "step": 5176 - }, - { - "epoch": 0.3982001384508884, - "learning_rate": 0.001971584564882991, - "loss": 1.0601, - "step": 5177 - }, - { - "epoch": 0.3982770556111068, - "learning_rate": 0.0019712404666158995, - "loss": 0.9681, - "step": 5178 - }, - { - "epoch": 0.3983539727713253, - "learning_rate": 0.001970896340832586, - "loss": 1.0862, - "step": 5179 - }, - { - "epoch": 0.39843088993154374, - "learning_rate": 0.0019705521875531444, - "loss": 1.1627, - "step": 5180 - }, - { - "epoch": 0.39850780709176215, - "learning_rate": 0.0019702080067976703, - "loss": 1.2503, - "step": 5181 - }, - { - "epoch": 0.3985847242519806, - "learning_rate": 0.001969863798586261, - "loss": 1.2246, - "step": 5182 - }, - { - "epoch": 0.39866164141219906, - "learning_rate": 0.0019695195629390143, - "loss": 1.2009, - "step": 5183 - }, - { - "epoch": 0.3987385585724175, - "learning_rate": 0.0019691752998760308, - "loss": 1.3125, - "step": 5184 - }, - { - "epoch": 0.3988154757326359, - "learning_rate": 0.0019688310094174133, - "loss": 0.9761, - "step": 5185 - }, - { - "epoch": 0.3988923928928544, - "learning_rate": 0.001968486691583264, - "loss": 0.8742, - "step": 5186 - }, - { - "epoch": 0.39896931005307285, - "learning_rate": 0.001968142346393689, - "loss": 1.2871, - "step": 5187 - }, - { - "epoch": 0.3990462272132913, - "learning_rate": 0.0019677979738687936, - "loss": 1.4258, - "step": 5188 - }, - { - "epoch": 0.3991231443735097, - "learning_rate": 0.001967453574028688, - "loss": 1.2961, - "step": 5189 - }, - { - "epoch": 0.39920006153372817, - "learning_rate": 0.0019671091468934803, - "loss": 0.9601, - "step": 5190 - }, - { - "epoch": 0.3992769786939466, - "learning_rate": 0.001966764692483283, - "loss": 1.4248, - "step": 5191 - }, - { - "epoch": 0.3993538958541651, - "learning_rate": 0.001966420210818209, - "loss": 1.3774, - "step": 5192 - }, - { - "epoch": 0.3994308130143835, - "learning_rate": 0.0019660757019183727, - "loss": 1.2007, - "step": 5193 - }, - { - "epoch": 0.39950773017460195, - "learning_rate": 0.0019657311658038902, - "loss": 1.1962, - "step": 5194 - }, - { - "epoch": 0.3995846473348204, - "learning_rate": 0.00196538660249488, - "loss": 0.8821, - "step": 5195 - }, - { - "epoch": 0.39966156449503887, - "learning_rate": 0.0019650420120114606, - "loss": 1.1384, - "step": 5196 - }, - { - "epoch": 0.39973848165525727, - "learning_rate": 0.0019646973943737538, - "loss": 0.992, - "step": 5197 - }, - { - "epoch": 0.39981539881547573, - "learning_rate": 0.0019643527496018822, - "loss": 1.0595, - "step": 5198 - }, - { - "epoch": 0.3998923159756942, - "learning_rate": 0.001964008077715969, - "loss": 1.2635, - "step": 5199 - }, - { - "epoch": 0.39996923313591265, - "learning_rate": 0.00196366337873614, - "loss": 1.101, - "step": 5200 - }, - { - "epoch": 0.40004615029613105, - "learning_rate": 0.0019633186526825243, - "loss": 0.9079, - "step": 5201 - }, - { - "epoch": 0.4001230674563495, - "learning_rate": 0.0019629738995752487, - "loss": 1.1907, - "step": 5202 - }, - { - "epoch": 0.40019998461656797, - "learning_rate": 0.0019626291194344454, - "loss": 1.1628, - "step": 5203 - }, - { - "epoch": 0.4002769017767864, - "learning_rate": 0.001962284312280245, - "loss": 1.7953, - "step": 5204 - }, - { - "epoch": 0.40035381893700484, - "learning_rate": 0.0019619394781327827, - "loss": 1.1781, - "step": 5205 - }, - { - "epoch": 0.4004307360972233, - "learning_rate": 0.001961594617012192, - "loss": 1.2836, - "step": 5206 - }, - { - "epoch": 0.40050765325744175, - "learning_rate": 0.001961249728938611, - "loss": 1.0594, - "step": 5207 - }, - { - "epoch": 0.40058457041766016, - "learning_rate": 0.0019609048139321776, - "loss": 0.2582, - "step": 5208 - }, - { - "epoch": 0.4006614875778786, - "learning_rate": 0.001960559872013031, - "loss": 1.0238, - "step": 5209 - }, - { - "epoch": 0.4007384047380971, - "learning_rate": 0.001960214903201314, - "loss": 1.1124, - "step": 5210 - }, - { - "epoch": 0.40081532189831554, - "learning_rate": 0.0019598699075171694, - "loss": 1.0666, - "step": 5211 - }, - { - "epoch": 0.40089223905853394, - "learning_rate": 0.0019595248849807417, - "loss": 1.2561, - "step": 5212 - }, - { - "epoch": 0.4009691562187524, - "learning_rate": 0.001959179835612177, - "loss": 1.0237, - "step": 5213 - }, - { - "epoch": 0.40104607337897086, - "learning_rate": 0.001958834759431623, - "loss": 1.1575, - "step": 5214 - }, - { - "epoch": 0.4011229905391893, - "learning_rate": 0.001958489656459229, - "loss": 1.2128, - "step": 5215 - }, - { - "epoch": 0.4011999076994077, - "learning_rate": 0.001958144526715147, - "loss": 1.1695, - "step": 5216 - }, - { - "epoch": 0.4012768248596262, - "learning_rate": 0.0019577993702195274, - "loss": 1.2952, - "step": 5217 - }, - { - "epoch": 0.40135374201984464, - "learning_rate": 0.0019574541869925265, - "loss": 1.0493, - "step": 5218 - }, - { - "epoch": 0.4014306591800631, - "learning_rate": 0.001957108977054298, - "loss": 1.0179, - "step": 5219 - }, - { - "epoch": 0.4015075763402815, - "learning_rate": 0.0019567637404250006, - "loss": 1.3156, - "step": 5220 - }, - { - "epoch": 0.40158449350049996, - "learning_rate": 0.001956418477124792, - "loss": 1.2617, - "step": 5221 - }, - { - "epoch": 0.4016614106607184, - "learning_rate": 0.0019560731871738333, - "loss": 1.7767, - "step": 5222 - }, - { - "epoch": 0.4017383278209368, - "learning_rate": 0.0019557278705922853, - "loss": 1.0942, - "step": 5223 - }, - { - "epoch": 0.4018152449811553, - "learning_rate": 0.0019553825274003125, - "loss": 1.2732, - "step": 5224 - }, - { - "epoch": 0.40189216214137374, - "learning_rate": 0.001955037157618079, - "loss": 1.2963, - "step": 5225 - }, - { - "epoch": 0.4019690793015922, - "learning_rate": 0.001954691761265752, - "loss": 1.0306, - "step": 5226 - }, - { - "epoch": 0.4020459964618106, - "learning_rate": 0.001954346338363499, - "loss": 1.1381, - "step": 5227 - }, - { - "epoch": 0.40212291362202907, - "learning_rate": 0.0019540008889314896, - "loss": 1.1768, - "step": 5228 - }, - { - "epoch": 0.4021998307822475, - "learning_rate": 0.001953655412989895, - "loss": 0.9864, - "step": 5229 - }, - { - "epoch": 0.402276747942466, - "learning_rate": 0.001953309910558888, - "loss": 1.0862, - "step": 5230 - }, - { - "epoch": 0.4023536651026844, - "learning_rate": 0.0019529643816586437, - "loss": 1.0612, - "step": 5231 - }, - { - "epoch": 0.40243058226290285, - "learning_rate": 0.0019526188263093366, - "loss": 1.0074, - "step": 5232 - }, - { - "epoch": 0.4025074994231213, - "learning_rate": 0.0019522732445311445, - "loss": 1.1565, - "step": 5233 - }, - { - "epoch": 0.40258441658333977, - "learning_rate": 0.0019519276363442464, - "loss": 0.9626, - "step": 5234 - }, - { - "epoch": 0.40266133374355817, - "learning_rate": 0.0019515820017688226, - "loss": 1.3013, - "step": 5235 - }, - { - "epoch": 0.40273825090377663, - "learning_rate": 0.001951236340825055, - "loss": 1.0675, - "step": 5236 - }, - { - "epoch": 0.4028151680639951, - "learning_rate": 0.0019508906535331272, - "loss": 0.8322, - "step": 5237 - }, - { - "epoch": 0.40289208522421355, - "learning_rate": 0.0019505449399132245, - "loss": 1.354, - "step": 5238 - }, - { - "epoch": 0.40296900238443195, - "learning_rate": 0.001950199199985533, - "loss": 0.9497, - "step": 5239 - }, - { - "epoch": 0.4030459195446504, - "learning_rate": 0.001949853433770241, - "loss": 1.0087, - "step": 5240 - }, - { - "epoch": 0.40312283670486887, - "learning_rate": 0.001949507641287538, - "loss": 1.4448, - "step": 5241 - }, - { - "epoch": 0.4031997538650873, - "learning_rate": 0.0019491618225576158, - "loss": 1.2243, - "step": 5242 - }, - { - "epoch": 0.40327667102530573, - "learning_rate": 0.0019488159776006664, - "loss": 1.516, - "step": 5243 - }, - { - "epoch": 0.4033535881855242, - "learning_rate": 0.0019484701064368842, - "loss": 1.4581, - "step": 5244 - }, - { - "epoch": 0.40343050534574265, - "learning_rate": 0.0019481242090864655, - "loss": 1.1713, - "step": 5245 - }, - { - "epoch": 0.40350742250596106, - "learning_rate": 0.0019477782855696069, - "loss": 1.663, - "step": 5246 - }, - { - "epoch": 0.4035843396661795, - "learning_rate": 0.0019474323359065077, - "loss": 1.2683, - "step": 5247 - }, - { - "epoch": 0.403661256826398, - "learning_rate": 0.0019470863601173678, - "loss": 1.7624, - "step": 5248 - }, - { - "epoch": 0.40373817398661643, - "learning_rate": 0.0019467403582223894, - "loss": 0.7888, - "step": 5249 - }, - { - "epoch": 0.40381509114683484, - "learning_rate": 0.0019463943302417766, - "loss": 1.2357, - "step": 5250 - }, - { - "epoch": 0.4038920083070533, - "learning_rate": 0.0019460482761957333, - "loss": 1.1859, - "step": 5251 - }, - { - "epoch": 0.40396892546727176, - "learning_rate": 0.0019457021961044659, - "loss": 0.9087, - "step": 5252 - }, - { - "epoch": 0.4040458426274902, - "learning_rate": 0.001945356089988183, - "loss": 0.9273, - "step": 5253 - }, - { - "epoch": 0.4041227597877086, - "learning_rate": 0.001945009957867094, - "loss": 1.2712, - "step": 5254 - }, - { - "epoch": 0.4041996769479271, - "learning_rate": 0.0019446637997614093, - "loss": 1.2907, - "step": 5255 - }, - { - "epoch": 0.40427659410814554, - "learning_rate": 0.0019443176156913424, - "loss": 1.1121, - "step": 5256 - }, - { - "epoch": 0.404353511268364, - "learning_rate": 0.0019439714056771063, - "loss": 1.2598, - "step": 5257 - }, - { - "epoch": 0.4044304284285824, - "learning_rate": 0.0019436251697389183, - "loss": 1.3612, - "step": 5258 - }, - { - "epoch": 0.40450734558880086, - "learning_rate": 0.001943278907896993, - "loss": 1.1818, - "step": 5259 - }, - { - "epoch": 0.4045842627490193, - "learning_rate": 0.0019429326201715511, - "loss": 0.9665, - "step": 5260 - }, - { - "epoch": 0.4046611799092378, - "learning_rate": 0.0019425863065828115, - "loss": 1.2597, - "step": 5261 - }, - { - "epoch": 0.4047380970694562, - "learning_rate": 0.0019422399671509968, - "loss": 1.4157, - "step": 5262 - }, - { - "epoch": 0.40481501422967464, - "learning_rate": 0.0019418936018963289, - "loss": 1.0955, - "step": 5263 - }, - { - "epoch": 0.4048919313898931, - "learning_rate": 0.0019415472108390331, - "loss": 1.1623, - "step": 5264 - }, - { - "epoch": 0.4049688485501115, - "learning_rate": 0.0019412007939993356, - "loss": 1.0951, - "step": 5265 - }, - { - "epoch": 0.40504576571032996, - "learning_rate": 0.0019408543513974637, - "loss": 0.9792, - "step": 5266 - }, - { - "epoch": 0.4051226828705484, - "learning_rate": 0.0019405078830536473, - "loss": 1.039, - "step": 5267 - }, - { - "epoch": 0.4051996000307669, - "learning_rate": 0.0019401613889881162, - "loss": 1.0961, - "step": 5268 - }, - { - "epoch": 0.4052765171909853, - "learning_rate": 0.0019398148692211033, - "loss": 1.0801, - "step": 5269 - }, - { - "epoch": 0.40535343435120375, - "learning_rate": 0.001939468323772841, - "loss": 0.7251, - "step": 5270 - }, - { - "epoch": 0.4054303515114222, - "learning_rate": 0.001939121752663566, - "loss": 1.0864, - "step": 5271 - }, - { - "epoch": 0.40550726867164066, - "learning_rate": 0.0019387751559135137, - "loss": 1.3339, - "step": 5272 - }, - { - "epoch": 0.40558418583185907, - "learning_rate": 0.0019384285335429228, - "loss": 0.8261, - "step": 5273 - }, - { - "epoch": 0.4056611029920775, - "learning_rate": 0.0019380818855720334, - "loss": 0.9781, - "step": 5274 - }, - { - "epoch": 0.405738020152296, - "learning_rate": 0.0019377352120210861, - "loss": 1.5219, - "step": 5275 - }, - { - "epoch": 0.40581493731251445, - "learning_rate": 0.0019373885129103231, - "loss": 1.1983, - "step": 5276 - }, - { - "epoch": 0.40589185447273285, - "learning_rate": 0.0019370417882599892, - "loss": 1.3282, - "step": 5277 - }, - { - "epoch": 0.4059687716329513, - "learning_rate": 0.0019366950380903297, - "loss": 1.3461, - "step": 5278 - }, - { - "epoch": 0.40604568879316977, - "learning_rate": 0.0019363482624215917, - "loss": 1.0908, - "step": 5279 - }, - { - "epoch": 0.4061226059533882, - "learning_rate": 0.0019360014612740245, - "loss": 1.3082, - "step": 5280 - }, - { - "epoch": 0.40619952311360663, - "learning_rate": 0.0019356546346678773, - "loss": 1.2681, - "step": 5281 - }, - { - "epoch": 0.4062764402738251, - "learning_rate": 0.001935307782623402, - "loss": 0.9953, - "step": 5282 - }, - { - "epoch": 0.40635335743404355, - "learning_rate": 0.0019349609051608511, - "loss": 1.1353, - "step": 5283 - }, - { - "epoch": 0.40643027459426195, - "learning_rate": 0.0019346140023004802, - "loss": 1.6298, - "step": 5284 - }, - { - "epoch": 0.4065071917544804, - "learning_rate": 0.0019342670740625447, - "loss": 1.1867, - "step": 5285 - }, - { - "epoch": 0.40658410891469887, - "learning_rate": 0.0019339201204673017, - "loss": 0.8309, - "step": 5286 - }, - { - "epoch": 0.40666102607491733, - "learning_rate": 0.0019335731415350112, - "loss": 1.4024, - "step": 5287 - }, - { - "epoch": 0.40673794323513573, - "learning_rate": 0.0019332261372859332, - "loss": 1.2121, - "step": 5288 - }, - { - "epoch": 0.4068148603953542, - "learning_rate": 0.0019328791077403294, - "loss": 1.2537, - "step": 5289 - }, - { - "epoch": 0.40689177755557265, - "learning_rate": 0.0019325320529184628, - "loss": 1.2006, - "step": 5290 - }, - { - "epoch": 0.4069686947157911, - "learning_rate": 0.0019321849728405995, - "loss": 1.4749, - "step": 5291 - }, - { - "epoch": 0.4070456118760095, - "learning_rate": 0.0019318378675270051, - "loss": 1.3887, - "step": 5292 - }, - { - "epoch": 0.407122529036228, - "learning_rate": 0.0019314907369979474, - "loss": 0.9718, - "step": 5293 - }, - { - "epoch": 0.40719944619644644, - "learning_rate": 0.0019311435812736958, - "loss": 1.0322, - "step": 5294 - }, - { - "epoch": 0.4072763633566649, - "learning_rate": 0.0019307964003745215, - "loss": 1.1825, - "step": 5295 - }, - { - "epoch": 0.4073532805168833, - "learning_rate": 0.0019304491943206963, - "loss": 0.9931, - "step": 5296 - }, - { - "epoch": 0.40743019767710176, - "learning_rate": 0.001930101963132494, - "loss": 1.2207, - "step": 5297 - }, - { - "epoch": 0.4075071148373202, - "learning_rate": 0.0019297547068301895, - "loss": 1.8569, - "step": 5298 - }, - { - "epoch": 0.4075840319975387, - "learning_rate": 0.00192940742543406, - "loss": 1.2515, - "step": 5299 - }, - { - "epoch": 0.4076609491577571, - "learning_rate": 0.0019290601189643837, - "loss": 1.0231, - "step": 5300 - }, - { - "epoch": 0.40773786631797554, - "learning_rate": 0.0019287127874414395, - "loss": 1.1391, - "step": 5301 - }, - { - "epoch": 0.407814783478194, - "learning_rate": 0.0019283654308855092, - "loss": 1.1344, - "step": 5302 - }, - { - "epoch": 0.4078917006384124, - "learning_rate": 0.001928018049316875, - "loss": 1.3173, - "step": 5303 - }, - { - "epoch": 0.40796861779863086, - "learning_rate": 0.0019276706427558206, - "loss": 1.2478, - "step": 5304 - }, - { - "epoch": 0.4080455349588493, - "learning_rate": 0.0019273232112226315, - "loss": 0.9447, - "step": 5305 - }, - { - "epoch": 0.4081224521190678, - "learning_rate": 0.0019269757547375952, - "loss": 1.4481, - "step": 5306 - }, - { - "epoch": 0.4081993692792862, - "learning_rate": 0.0019266282733209994, - "loss": 1.2329, - "step": 5307 - }, - { - "epoch": 0.40827628643950464, - "learning_rate": 0.001926280766993134, - "loss": 1.2065, - "step": 5308 - }, - { - "epoch": 0.4083532035997231, - "learning_rate": 0.0019259332357742902, - "loss": 1.0585, - "step": 5309 - }, - { - "epoch": 0.40843012075994156, - "learning_rate": 0.0019255856796847608, - "loss": 0.7993, - "step": 5310 - }, - { - "epoch": 0.40850703792015997, - "learning_rate": 0.0019252380987448408, - "loss": 1.4017, - "step": 5311 - }, - { - "epoch": 0.4085839550803784, - "learning_rate": 0.001924890492974824, - "loss": 1.3804, - "step": 5312 - }, - { - "epoch": 0.4086608722405969, - "learning_rate": 0.0019245428623950091, - "loss": 0.8605, - "step": 5313 - }, - { - "epoch": 0.40873778940081534, - "learning_rate": 0.0019241952070256938, - "loss": 1.347, - "step": 5314 - }, - { - "epoch": 0.40881470656103375, - "learning_rate": 0.001923847526887179, - "loss": 1.0736, - "step": 5315 - }, - { - "epoch": 0.4088916237212522, - "learning_rate": 0.0019234998219997649, - "loss": 1.2824, - "step": 5316 - }, - { - "epoch": 0.40896854088147067, - "learning_rate": 0.0019231520923837547, - "loss": 0.9933, - "step": 5317 - }, - { - "epoch": 0.4090454580416891, - "learning_rate": 0.0019228043380594524, - "loss": 1.2015, - "step": 5318 - }, - { - "epoch": 0.40912237520190753, - "learning_rate": 0.0019224565590471648, - "loss": 0.7587, - "step": 5319 - }, - { - "epoch": 0.409199292362126, - "learning_rate": 0.0019221087553671983, - "loss": 1.2312, - "step": 5320 - }, - { - "epoch": 0.40927620952234445, - "learning_rate": 0.0019217609270398615, - "loss": 1.2008, - "step": 5321 - }, - { - "epoch": 0.4093531266825629, - "learning_rate": 0.0019214130740854648, - "loss": 1.3925, - "step": 5322 - }, - { - "epoch": 0.4094300438427813, - "learning_rate": 0.0019210651965243196, - "loss": 1.0074, - "step": 5323 - }, - { - "epoch": 0.40950696100299977, - "learning_rate": 0.001920717294376739, - "loss": 1.5791, - "step": 5324 - }, - { - "epoch": 0.40958387816321823, - "learning_rate": 0.0019203693676630364, - "loss": 1.162, - "step": 5325 - }, - { - "epoch": 0.40966079532343663, - "learning_rate": 0.0019200214164035291, - "loss": 1.3833, - "step": 5326 - }, - { - "epoch": 0.4097377124836551, - "learning_rate": 0.001919673440618533, - "loss": 1.287, - "step": 5327 - }, - { - "epoch": 0.40981462964387355, - "learning_rate": 0.0019193254403283675, - "loss": 1.3652, - "step": 5328 - }, - { - "epoch": 0.409891546804092, - "learning_rate": 0.0019189774155533525, - "loss": 0.8447, - "step": 5329 - }, - { - "epoch": 0.4099684639643104, - "learning_rate": 0.0019186293663138092, - "loss": 1.4372, - "step": 5330 - }, - { - "epoch": 0.4100453811245289, - "learning_rate": 0.0019182812926300614, - "loss": 1.3139, - "step": 5331 - }, - { - "epoch": 0.41012229828474733, - "learning_rate": 0.0019179331945224328, - "loss": 0.9102, - "step": 5332 - }, - { - "epoch": 0.4101992154449658, - "learning_rate": 0.0019175850720112495, - "loss": 1.0867, - "step": 5333 - }, - { - "epoch": 0.4102761326051842, - "learning_rate": 0.001917236925116839, - "loss": 1.0796, - "step": 5334 - }, - { - "epoch": 0.41035304976540266, - "learning_rate": 0.0019168887538595291, - "loss": 0.8754, - "step": 5335 - }, - { - "epoch": 0.4104299669256211, - "learning_rate": 0.0019165405582596503, - "loss": 1.4631, - "step": 5336 - }, - { - "epoch": 0.4105068840858396, - "learning_rate": 0.0019161923383375344, - "loss": 1.0335, - "step": 5337 - }, - { - "epoch": 0.410583801246058, - "learning_rate": 0.0019158440941135144, - "loss": 1.2179, - "step": 5338 - }, - { - "epoch": 0.41066071840627644, - "learning_rate": 0.0019154958256079242, - "loss": 1.4298, - "step": 5339 - }, - { - "epoch": 0.4107376355664949, - "learning_rate": 0.0019151475328410994, - "loss": 1.6193, - "step": 5340 - }, - { - "epoch": 0.41081455272671336, - "learning_rate": 0.001914799215833378, - "loss": 1.0342, - "step": 5341 - }, - { - "epoch": 0.41089146988693176, - "learning_rate": 0.0019144508746050976, - "loss": 1.6808, - "step": 5342 - }, - { - "epoch": 0.4109683870471502, - "learning_rate": 0.0019141025091765989, - "loss": 1.2041, - "step": 5343 - }, - { - "epoch": 0.4110453042073687, - "learning_rate": 0.0019137541195682233, - "loss": 0.9751, - "step": 5344 - }, - { - "epoch": 0.4111222213675871, - "learning_rate": 0.0019134057058003133, - "loss": 1.1871, - "step": 5345 - }, - { - "epoch": 0.41119913852780554, - "learning_rate": 0.0019130572678932134, - "loss": 0.9748, - "step": 5346 - }, - { - "epoch": 0.411276055688024, - "learning_rate": 0.001912708805867269, - "loss": 1.3121, - "step": 5347 - }, - { - "epoch": 0.41135297284824246, - "learning_rate": 0.0019123603197428272, - "loss": 1.0555, - "step": 5348 - }, - { - "epoch": 0.41142989000846086, - "learning_rate": 0.001912011809540237, - "loss": 1.4181, - "step": 5349 - }, - { - "epoch": 0.4115068071686793, - "learning_rate": 0.0019116632752798474, - "loss": 0.8591, - "step": 5350 - }, - { - "epoch": 0.4115837243288978, - "learning_rate": 0.0019113147169820103, - "loss": 1.5652, - "step": 5351 - }, - { - "epoch": 0.41166064148911624, - "learning_rate": 0.0019109661346670785, - "loss": 1.2278, - "step": 5352 - }, - { - "epoch": 0.41173755864933465, - "learning_rate": 0.0019106175283554055, - "loss": 1.597, - "step": 5353 - }, - { - "epoch": 0.4118144758095531, - "learning_rate": 0.0019102688980673467, - "loss": 1.3299, - "step": 5354 - }, - { - "epoch": 0.41189139296977156, - "learning_rate": 0.0019099202438232598, - "loss": 1.1454, - "step": 5355 - }, - { - "epoch": 0.41196831012999, - "learning_rate": 0.0019095715656435028, - "loss": 1.018, - "step": 5356 - }, - { - "epoch": 0.4120452272902084, - "learning_rate": 0.0019092228635484354, - "loss": 1.1463, - "step": 5357 - }, - { - "epoch": 0.4121221444504269, - "learning_rate": 0.001908874137558418, - "loss": 1.1272, - "step": 5358 - }, - { - "epoch": 0.41219906161064535, - "learning_rate": 0.0019085253876938138, - "loss": 1.3921, - "step": 5359 - }, - { - "epoch": 0.4122759787708638, - "learning_rate": 0.0019081766139749862, - "loss": 1.11, - "step": 5360 - }, - { - "epoch": 0.4123528959310822, - "learning_rate": 0.0019078278164223013, - "loss": 1.4963, - "step": 5361 - }, - { - "epoch": 0.41242981309130067, - "learning_rate": 0.001907478995056125, - "loss": 1.184, - "step": 5362 - }, - { - "epoch": 0.4125067302515191, - "learning_rate": 0.001907130149896825, - "loss": 1.1806, - "step": 5363 - }, - { - "epoch": 0.4125836474117376, - "learning_rate": 0.0019067812809647716, - "loss": 1.0272, - "step": 5364 - }, - { - "epoch": 0.412660564571956, - "learning_rate": 0.0019064323882803352, - "loss": 1.2647, - "step": 5365 - }, - { - "epoch": 0.41273748173217445, - "learning_rate": 0.0019060834718638883, - "loss": 1.3375, - "step": 5366 - }, - { - "epoch": 0.4128143988923929, - "learning_rate": 0.001905734531735804, - "loss": 1.114, - "step": 5367 - }, - { - "epoch": 0.4128913160526113, - "learning_rate": 0.001905385567916458, - "loss": 1.443, - "step": 5368 - }, - { - "epoch": 0.41296823321282977, - "learning_rate": 0.0019050365804262255, - "loss": 1.3102, - "step": 5369 - }, - { - "epoch": 0.41304515037304823, - "learning_rate": 0.0019046875692854857, - "loss": 1.1855, - "step": 5370 - }, - { - "epoch": 0.4131220675332667, - "learning_rate": 0.001904338534514616, - "loss": 0.974, - "step": 5371 - }, - { - "epoch": 0.4131989846934851, - "learning_rate": 0.0019039894761339988, - "loss": 0.8566, - "step": 5372 - }, - { - "epoch": 0.41327590185370355, - "learning_rate": 0.0019036403941640146, - "loss": 0.8169, - "step": 5373 - }, - { - "epoch": 0.413352819013922, - "learning_rate": 0.0019032912886250471, - "loss": 1.17, - "step": 5374 - }, - { - "epoch": 0.41342973617414047, - "learning_rate": 0.0019029421595374814, - "loss": 0.9946, - "step": 5375 - }, - { - "epoch": 0.4135066533343589, - "learning_rate": 0.0019025930069217025, - "loss": 1.0334, - "step": 5376 - }, - { - "epoch": 0.41358357049457734, - "learning_rate": 0.001902243830798099, - "loss": 1.1297, - "step": 5377 - }, - { - "epoch": 0.4136604876547958, - "learning_rate": 0.0019018946311870582, - "loss": 0.894, - "step": 5378 - }, - { - "epoch": 0.41373740481501425, - "learning_rate": 0.0019015454081089717, - "loss": 0.9276, - "step": 5379 - }, - { - "epoch": 0.41381432197523266, - "learning_rate": 0.0019011961615842303, - "loss": 1.2543, - "step": 5380 - }, - { - "epoch": 0.4138912391354511, - "learning_rate": 0.001900846891633227, - "loss": 0.8844, - "step": 5381 - }, - { - "epoch": 0.4139681562956696, - "learning_rate": 0.0019004975982763552, - "loss": 1.4388, - "step": 5382 - }, - { - "epoch": 0.41404507345588804, - "learning_rate": 0.0019001482815340118, - "loss": 1.2338, - "step": 5383 - }, - { - "epoch": 0.41412199061610644, - "learning_rate": 0.001899798941426593, - "loss": 1.1151, - "step": 5384 - }, - { - "epoch": 0.4141989077763249, - "learning_rate": 0.0018994495779744976, - "loss": 1.1242, - "step": 5385 - }, - { - "epoch": 0.41427582493654336, - "learning_rate": 0.0018991001911981247, - "loss": 1.0195, - "step": 5386 - }, - { - "epoch": 0.41435274209676176, - "learning_rate": 0.0018987507811178762, - "loss": 1.0197, - "step": 5387 - }, - { - "epoch": 0.4144296592569802, - "learning_rate": 0.0018984013477541537, - "loss": 1.2167, - "step": 5388 - }, - { - "epoch": 0.4145065764171987, - "learning_rate": 0.0018980518911273611, - "loss": 0.9649, - "step": 5389 - }, - { - "epoch": 0.41458349357741714, - "learning_rate": 0.0018977024112579043, - "loss": 0.9731, - "step": 5390 - }, - { - "epoch": 0.41466041073763554, - "learning_rate": 0.001897352908166189, - "loss": 1.101, - "step": 5391 - }, - { - "epoch": 0.414737327897854, - "learning_rate": 0.0018970033818726231, - "loss": 1.3306, - "step": 5392 - }, - { - "epoch": 0.41481424505807246, - "learning_rate": 0.001896653832397616, - "loss": 1.1095, - "step": 5393 - }, - { - "epoch": 0.4148911622182909, - "learning_rate": 0.0018963042597615785, - "loss": 1.1044, - "step": 5394 - }, - { - "epoch": 0.4149680793785093, - "learning_rate": 0.0018959546639849224, - "loss": 0.8383, - "step": 5395 - }, - { - "epoch": 0.4150449965387278, - "learning_rate": 0.00189560504508806, - "loss": 1.2859, - "step": 5396 - }, - { - "epoch": 0.41512191369894624, - "learning_rate": 0.0018952554030914075, - "loss": 1.0922, - "step": 5397 - }, - { - "epoch": 0.4151988308591647, - "learning_rate": 0.0018949057380153794, - "loss": 1.3724, - "step": 5398 - }, - { - "epoch": 0.4152757480193831, - "learning_rate": 0.0018945560498803945, - "loss": 0.913, - "step": 5399 - }, - { - "epoch": 0.41535266517960157, - "learning_rate": 0.00189420633870687, - "loss": 1.3185, - "step": 5400 - }, - { - "epoch": 0.41542958233982, - "learning_rate": 0.001893856604515227, - "loss": 1.3982, - "step": 5401 - }, - { - "epoch": 0.4155064995000385, - "learning_rate": 0.001893506847325886, - "loss": 1.3029, - "step": 5402 - }, - { - "epoch": 0.4155834166602569, - "learning_rate": 0.001893157067159271, - "loss": 1.2488, - "step": 5403 - }, - { - "epoch": 0.41566033382047535, - "learning_rate": 0.001892807264035804, - "loss": 0.968, - "step": 5404 - }, - { - "epoch": 0.4157372509806938, - "learning_rate": 0.0018924574379759116, - "loss": 0.9755, - "step": 5405 - }, - { - "epoch": 0.4158141681409122, - "learning_rate": 0.001892107589000021, - "loss": 1.2083, - "step": 5406 - }, - { - "epoch": 0.41589108530113067, - "learning_rate": 0.001891757717128559, - "loss": 1.126, - "step": 5407 - }, - { - "epoch": 0.41596800246134913, - "learning_rate": 0.001891407822381956, - "loss": 1.2708, - "step": 5408 - }, - { - "epoch": 0.4160449196215676, - "learning_rate": 0.001891057904780642, - "loss": 1.1473, - "step": 5409 - }, - { - "epoch": 0.416121836781786, - "learning_rate": 0.0018907079643450501, - "loss": 1.0215, - "step": 5410 - }, - { - "epoch": 0.41619875394200445, - "learning_rate": 0.0018903580010956125, - "loss": 1.1171, - "step": 5411 - }, - { - "epoch": 0.4162756711022229, - "learning_rate": 0.0018900080150527645, - "loss": 1.1427, - "step": 5412 - }, - { - "epoch": 0.41635258826244137, - "learning_rate": 0.0018896580062369415, - "loss": 1.4094, - "step": 5413 - }, - { - "epoch": 0.4164295054226598, - "learning_rate": 0.0018893079746685822, - "loss": 1.1212, - "step": 5414 - }, - { - "epoch": 0.41650642258287823, - "learning_rate": 0.0018889579203681243, - "loss": 1.337, - "step": 5415 - }, - { - "epoch": 0.4165833397430967, - "learning_rate": 0.001888607843356008, - "loss": 1.1354, - "step": 5416 - }, - { - "epoch": 0.41666025690331515, - "learning_rate": 0.0018882577436526743, - "loss": 1.4084, - "step": 5417 - }, - { - "epoch": 0.41673717406353356, - "learning_rate": 0.0018879076212785669, - "loss": 0.8814, - "step": 5418 - }, - { - "epoch": 0.416814091223752, - "learning_rate": 0.001887557476254129, - "loss": 0.5657, - "step": 5419 - }, - { - "epoch": 0.4168910083839705, - "learning_rate": 0.001887207308599806, - "loss": 1.0559, - "step": 5420 - }, - { - "epoch": 0.41696792554418893, - "learning_rate": 0.001886857118336045, - "loss": 0.9819, - "step": 5421 - }, - { - "epoch": 0.41704484270440734, - "learning_rate": 0.001886506905483294, - "loss": 1.298, - "step": 5422 - }, - { - "epoch": 0.4171217598646258, - "learning_rate": 0.0018861566700620015, - "loss": 1.2829, - "step": 5423 - }, - { - "epoch": 0.41719867702484426, - "learning_rate": 0.0018858064120926183, - "loss": 0.927, - "step": 5424 - }, - { - "epoch": 0.4172755941850627, - "learning_rate": 0.0018854561315955975, - "loss": 0.9657, - "step": 5425 - }, - { - "epoch": 0.4173525113452811, - "learning_rate": 0.0018851058285913912, - "loss": 0.9917, - "step": 5426 - }, - { - "epoch": 0.4174294285054996, - "learning_rate": 0.001884755503100454, - "loss": 1.196, - "step": 5427 - }, - { - "epoch": 0.41750634566571804, - "learning_rate": 0.0018844051551432416, - "loss": 1.2799, - "step": 5428 - }, - { - "epoch": 0.41758326282593644, - "learning_rate": 0.001884054784740212, - "loss": 0.8154, - "step": 5429 - }, - { - "epoch": 0.4176601799861549, - "learning_rate": 0.0018837043919118232, - "loss": 1.239, - "step": 5430 - }, - { - "epoch": 0.41773709714637336, - "learning_rate": 0.0018833539766785349, - "loss": 0.971, - "step": 5431 - }, - { - "epoch": 0.4178140143065918, - "learning_rate": 0.0018830035390608089, - "loss": 0.7035, - "step": 5432 - }, - { - "epoch": 0.4178909314668102, - "learning_rate": 0.0018826530790791065, - "loss": 1.2166, - "step": 5433 - }, - { - "epoch": 0.4179678486270287, - "learning_rate": 0.0018823025967538926, - "loss": 1.1455, - "step": 5434 - }, - { - "epoch": 0.41804476578724714, - "learning_rate": 0.001881952092105631, - "loss": 1.0731, - "step": 5435 - }, - { - "epoch": 0.4181216829474656, - "learning_rate": 0.0018816015651547892, - "loss": 0.9191, - "step": 5436 - }, - { - "epoch": 0.418198600107684, - "learning_rate": 0.0018812510159218344, - "loss": 1.2045, - "step": 5437 - }, - { - "epoch": 0.41827551726790246, - "learning_rate": 0.0018809004444272348, - "loss": 1.2937, - "step": 5438 - }, - { - "epoch": 0.4183524344281209, - "learning_rate": 0.0018805498506914615, - "loss": 1.1727, - "step": 5439 - }, - { - "epoch": 0.4184293515883394, - "learning_rate": 0.0018801992347349862, - "loss": 0.9741, - "step": 5440 - }, - { - "epoch": 0.4185062687485578, - "learning_rate": 0.0018798485965782813, - "loss": 1.2351, - "step": 5441 - }, - { - "epoch": 0.41858318590877625, - "learning_rate": 0.0018794979362418206, - "loss": 0.9889, - "step": 5442 - }, - { - "epoch": 0.4186601030689947, - "learning_rate": 0.0018791472537460802, - "loss": 1.1544, - "step": 5443 - }, - { - "epoch": 0.41873702022921316, - "learning_rate": 0.0018787965491115363, - "loss": 1.1413, - "step": 5444 - }, - { - "epoch": 0.41881393738943157, - "learning_rate": 0.0018784458223586678, - "loss": 1.21, - "step": 5445 - }, - { - "epoch": 0.41889085454965, - "learning_rate": 0.0018780950735079523, - "loss": 0.8953, - "step": 5446 - }, - { - "epoch": 0.4189677717098685, - "learning_rate": 0.001877744302579872, - "loss": 0.7567, - "step": 5447 - }, - { - "epoch": 0.4190446888700869, - "learning_rate": 0.0018773935095949084, - "loss": 1.1003, - "step": 5448 - }, - { - "epoch": 0.41912160603030535, - "learning_rate": 0.0018770426945735436, - "loss": 0.9116, - "step": 5449 - }, - { - "epoch": 0.4191985231905238, - "learning_rate": 0.001876691857536264, - "loss": 1.0287, - "step": 5450 - }, - { - "epoch": 0.41927544035074227, - "learning_rate": 0.0018763409985035533, - "loss": 1.4729, - "step": 5451 - }, - { - "epoch": 0.41935235751096067, - "learning_rate": 0.0018759901174959003, - "loss": 1.2076, - "step": 5452 - }, - { - "epoch": 0.41942927467117913, - "learning_rate": 0.001875639214533792, - "loss": 1.1054, - "step": 5453 - }, - { - "epoch": 0.4195061918313976, - "learning_rate": 0.0018752882896377189, - "loss": 1.0048, - "step": 5454 - }, - { - "epoch": 0.41958310899161605, - "learning_rate": 0.0018749373428281707, - "loss": 1.3677, - "step": 5455 - }, - { - "epoch": 0.41966002615183445, - "learning_rate": 0.0018745863741256412, - "loss": 1.2008, - "step": 5456 - }, - { - "epoch": 0.4197369433120529, - "learning_rate": 0.0018742353835506224, - "loss": 0.9679, - "step": 5457 - }, - { - "epoch": 0.41981386047227137, - "learning_rate": 0.0018738843711236094, - "loss": 1.0629, - "step": 5458 - }, - { - "epoch": 0.41989077763248983, - "learning_rate": 0.0018735333368650982, - "loss": 1.288, - "step": 5459 - }, - { - "epoch": 0.41996769479270823, - "learning_rate": 0.0018731822807955868, - "loss": 1.1632, - "step": 5460 - }, - { - "epoch": 0.4200446119529267, - "learning_rate": 0.001872831202935573, - "loss": 1.4204, - "step": 5461 - }, - { - "epoch": 0.42012152911314515, - "learning_rate": 0.0018724801033055563, - "loss": 1.1945, - "step": 5462 - }, - { - "epoch": 0.4201984462733636, - "learning_rate": 0.0018721289819260383, - "loss": 1.0537, - "step": 5463 - }, - { - "epoch": 0.420275363433582, - "learning_rate": 0.0018717778388175213, - "loss": 0.9874, - "step": 5464 - }, - { - "epoch": 0.4203522805938005, - "learning_rate": 0.001871426674000509, - "loss": 0.6534, - "step": 5465 - }, - { - "epoch": 0.42042919775401894, - "learning_rate": 0.001871075487495506, - "loss": 1.3472, - "step": 5466 - }, - { - "epoch": 0.42050611491423734, - "learning_rate": 0.0018707242793230185, - "loss": 0.9003, - "step": 5467 - }, - { - "epoch": 0.4205830320744558, - "learning_rate": 0.001870373049503554, - "loss": 1.5472, - "step": 5468 - }, - { - "epoch": 0.42065994923467426, - "learning_rate": 0.0018700217980576212, - "loss": 1.2338, - "step": 5469 - }, - { - "epoch": 0.4207368663948927, - "learning_rate": 0.0018696705250057296, - "loss": 1.6406, - "step": 5470 - }, - { - "epoch": 0.4208137835551111, - "learning_rate": 0.0018693192303683914, - "loss": 0.986, - "step": 5471 - }, - { - "epoch": 0.4208907007153296, - "learning_rate": 0.0018689679141661183, - "loss": 1.5575, - "step": 5472 - }, - { - "epoch": 0.42096761787554804, - "learning_rate": 0.001868616576419424, - "loss": 1.0817, - "step": 5473 - }, - { - "epoch": 0.4210445350357665, - "learning_rate": 0.0018682652171488235, - "loss": 1.5008, - "step": 5474 - }, - { - "epoch": 0.4211214521959849, - "learning_rate": 0.0018679138363748334, - "loss": 1.1259, - "step": 5475 - }, - { - "epoch": 0.42119836935620336, - "learning_rate": 0.001867562434117971, - "loss": 1.2873, - "step": 5476 - }, - { - "epoch": 0.4212752865164218, - "learning_rate": 0.0018672110103987547, - "loss": 1.1144, - "step": 5477 - }, - { - "epoch": 0.4213522036766403, - "learning_rate": 0.0018668595652377052, - "loss": 1.278, - "step": 5478 - }, - { - "epoch": 0.4214291208368587, - "learning_rate": 0.001866508098655343, - "loss": 0.724, - "step": 5479 - }, - { - "epoch": 0.42150603799707714, - "learning_rate": 0.0018661566106721913, - "loss": 1.1591, - "step": 5480 - }, - { - "epoch": 0.4215829551572956, - "learning_rate": 0.0018658051013087731, - "loss": 1.2845, - "step": 5481 - }, - { - "epoch": 0.42165987231751406, - "learning_rate": 0.001865453570585614, - "loss": 0.8868, - "step": 5482 - }, - { - "epoch": 0.42173678947773247, - "learning_rate": 0.0018651020185232404, - "loss": 1.2903, - "step": 5483 - }, - { - "epoch": 0.4218137066379509, - "learning_rate": 0.0018647504451421786, - "loss": 0.896, - "step": 5484 - }, - { - "epoch": 0.4218906237981694, - "learning_rate": 0.001864398850462959, - "loss": 0.9226, - "step": 5485 - }, - { - "epoch": 0.42196754095838784, - "learning_rate": 0.00186404723450611, - "loss": 1.099, - "step": 5486 - }, - { - "epoch": 0.42204445811860625, - "learning_rate": 0.0018636955972921644, - "loss": 1.0727, - "step": 5487 - }, - { - "epoch": 0.4221213752788247, - "learning_rate": 0.001863343938841653, - "loss": 1.1478, - "step": 5488 - }, - { - "epoch": 0.42219829243904317, - "learning_rate": 0.0018629922591751112, - "loss": 1.372, - "step": 5489 - }, - { - "epoch": 0.42227520959926157, - "learning_rate": 0.0018626405583130727, - "loss": 1.057, - "step": 5490 - }, - { - "epoch": 0.42235212675948003, - "learning_rate": 0.0018622888362760741, - "loss": 1.0178, - "step": 5491 - }, - { - "epoch": 0.4224290439196985, - "learning_rate": 0.0018619370930846525, - "loss": 1.108, - "step": 5492 - }, - { - "epoch": 0.42250596107991695, - "learning_rate": 0.0018615853287593472, - "loss": 1.1004, - "step": 5493 - }, - { - "epoch": 0.42258287824013535, - "learning_rate": 0.0018612335433206976, - "loss": 1.305, - "step": 5494 - }, - { - "epoch": 0.4226597954003538, - "learning_rate": 0.0018608817367892446, - "loss": 1.0859, - "step": 5495 - }, - { - "epoch": 0.42273671256057227, - "learning_rate": 0.0018605299091855316, - "loss": 1.0684, - "step": 5496 - }, - { - "epoch": 0.42281362972079073, - "learning_rate": 0.0018601780605301006, - "loss": 1.174, - "step": 5497 - }, - { - "epoch": 0.42289054688100913, - "learning_rate": 0.0018598261908434983, - "loss": 0.8731, - "step": 5498 - }, - { - "epoch": 0.4229674640412276, - "learning_rate": 0.001859474300146269, - "loss": 1.2816, - "step": 5499 - }, - { - "epoch": 0.42304438120144605, - "learning_rate": 0.001859122388458961, - "loss": 1.2088, - "step": 5500 - }, - { - "epoch": 0.4231212983616645, - "learning_rate": 0.0018587704558021218, - "loss": 1.0698, - "step": 5501 - }, - { - "epoch": 0.4231982155218829, - "learning_rate": 0.0018584185021963023, - "loss": 0.8157, - "step": 5502 - }, - { - "epoch": 0.4232751326821014, - "learning_rate": 0.0018580665276620532, - "loss": 0.8452, - "step": 5503 - }, - { - "epoch": 0.42335204984231983, - "learning_rate": 0.001857714532219926, - "loss": 1.0249, - "step": 5504 - }, - { - "epoch": 0.4234289670025383, - "learning_rate": 0.0018573625158904744, - "loss": 1.104, - "step": 5505 - }, - { - "epoch": 0.4235058841627567, - "learning_rate": 0.0018570104786942534, - "loss": 1.1015, - "step": 5506 - }, - { - "epoch": 0.42358280132297516, - "learning_rate": 0.0018566584206518182, - "loss": 1.105, - "step": 5507 - }, - { - "epoch": 0.4236597184831936, - "learning_rate": 0.0018563063417837262, - "loss": 1.0311, - "step": 5508 - }, - { - "epoch": 0.423736635643412, - "learning_rate": 0.001855954242110536, - "loss": 1.395, - "step": 5509 - }, - { - "epoch": 0.4238135528036305, - "learning_rate": 0.001855602121652806, - "loss": 1.0516, - "step": 5510 - }, - { - "epoch": 0.42389046996384894, - "learning_rate": 0.0018552499804310976, - "loss": 1.8601, - "step": 5511 - }, - { - "epoch": 0.4239673871240674, - "learning_rate": 0.0018548978184659728, - "loss": 1.1041, - "step": 5512 - }, - { - "epoch": 0.4240443042842858, - "learning_rate": 0.0018545456357779945, - "loss": 1.058, - "step": 5513 - }, - { - "epoch": 0.42412122144450426, - "learning_rate": 0.001854193432387727, - "loss": 0.8967, - "step": 5514 - }, - { - "epoch": 0.4241981386047227, - "learning_rate": 0.0018538412083157355, - "loss": 1.004, - "step": 5515 - }, - { - "epoch": 0.4242750557649412, - "learning_rate": 0.0018534889635825878, - "loss": 1.0781, - "step": 5516 - }, - { - "epoch": 0.4243519729251596, - "learning_rate": 0.0018531366982088506, - "loss": 0.9593, - "step": 5517 - }, - { - "epoch": 0.42442889008537804, - "learning_rate": 0.001852784412215094, - "loss": 1.1843, - "step": 5518 - }, - { - "epoch": 0.4245058072455965, - "learning_rate": 0.0018524321056218873, - "loss": 1.1723, - "step": 5519 - }, - { - "epoch": 0.42458272440581496, - "learning_rate": 0.0018520797784498028, - "loss": 1.0247, - "step": 5520 - }, - { - "epoch": 0.42465964156603336, - "learning_rate": 0.0018517274307194135, - "loss": 1.1903, - "step": 5521 - }, - { - "epoch": 0.4247365587262518, - "learning_rate": 0.001851375062451293, - "loss": 1.2003, - "step": 5522 - }, - { - "epoch": 0.4248134758864703, - "learning_rate": 0.0018510226736660157, - "loss": 1.0411, - "step": 5523 - }, - { - "epoch": 0.42489039304668874, - "learning_rate": 0.0018506702643841593, - "loss": 1.2441, - "step": 5524 - }, - { - "epoch": 0.42496731020690715, - "learning_rate": 0.0018503178346263006, - "loss": 1.2971, - "step": 5525 - }, - { - "epoch": 0.4250442273671256, - "learning_rate": 0.001849965384413018, - "loss": 0.9883, - "step": 5526 - }, - { - "epoch": 0.42512114452734406, - "learning_rate": 0.0018496129137648926, - "loss": 0.9447, - "step": 5527 - }, - { - "epoch": 0.42519806168756247, - "learning_rate": 0.0018492604227025042, - "loss": 1.2593, - "step": 5528 - }, - { - "epoch": 0.4252749788477809, - "learning_rate": 0.0018489079112464363, - "loss": 1.2315, - "step": 5529 - }, - { - "epoch": 0.4253518960079994, - "learning_rate": 0.0018485553794172712, - "loss": 1.2355, - "step": 5530 - }, - { - "epoch": 0.42542881316821785, - "learning_rate": 0.0018482028272355947, - "loss": 1.0028, - "step": 5531 - }, - { - "epoch": 0.42550573032843625, - "learning_rate": 0.0018478502547219923, - "loss": 1.4217, - "step": 5532 - }, - { - "epoch": 0.4255826474886547, - "learning_rate": 0.0018474976618970509, - "loss": 1.092, - "step": 5533 - }, - { - "epoch": 0.42565956464887317, - "learning_rate": 0.0018471450487813584, - "loss": 1.2022, - "step": 5534 - }, - { - "epoch": 0.4257364818090916, - "learning_rate": 0.001846792415395505, - "loss": 1.1645, - "step": 5535 - }, - { - "epoch": 0.42581339896931003, - "learning_rate": 0.0018464397617600813, - "loss": 1.3899, - "step": 5536 - }, - { - "epoch": 0.4258903161295285, - "learning_rate": 0.0018460870878956785, - "loss": 1.3774, - "step": 5537 - }, - { - "epoch": 0.42596723328974695, - "learning_rate": 0.0018457343938228902, - "loss": 1.3572, - "step": 5538 - }, - { - "epoch": 0.4260441504499654, - "learning_rate": 0.0018453816795623102, - "loss": 1.3027, - "step": 5539 - }, - { - "epoch": 0.4261210676101838, - "learning_rate": 0.0018450289451345343, - "loss": 1.1324, - "step": 5540 - }, - { - "epoch": 0.42619798477040227, - "learning_rate": 0.001844676190560158, - "loss": 1.3033, - "step": 5541 - }, - { - "epoch": 0.42627490193062073, - "learning_rate": 0.0018443234158597806, - "loss": 1.0375, - "step": 5542 - }, - { - "epoch": 0.4263518190908392, - "learning_rate": 0.0018439706210539991, - "loss": 1.0948, - "step": 5543 - }, - { - "epoch": 0.4264287362510576, - "learning_rate": 0.0018436178061634156, - "loss": 1.4625, - "step": 5544 - }, - { - "epoch": 0.42650565341127605, - "learning_rate": 0.0018432649712086296, - "loss": 0.951, - "step": 5545 - }, - { - "epoch": 0.4265825705714945, - "learning_rate": 0.0018429121162102447, - "loss": 1.0098, - "step": 5546 - }, - { - "epoch": 0.42665948773171297, - "learning_rate": 0.0018425592411888635, - "loss": 0.9701, - "step": 5547 - }, - { - "epoch": 0.4267364048919314, - "learning_rate": 0.0018422063461650915, - "loss": 1.4429, - "step": 5548 - }, - { - "epoch": 0.42681332205214983, - "learning_rate": 0.0018418534311595341, - "loss": 1.1941, - "step": 5549 - }, - { - "epoch": 0.4268902392123683, - "learning_rate": 0.0018415004961927983, - "loss": 1.0569, - "step": 5550 - }, - { - "epoch": 0.4269671563725867, - "learning_rate": 0.0018411475412854934, - "loss": 1.1631, - "step": 5551 - }, - { - "epoch": 0.42704407353280516, - "learning_rate": 0.0018407945664582273, - "loss": 1.0821, - "step": 5552 - }, - { - "epoch": 0.4271209906930236, - "learning_rate": 0.0018404415717316119, - "loss": 1.229, - "step": 5553 - }, - { - "epoch": 0.4271979078532421, - "learning_rate": 0.0018400885571262576, - "loss": 1.1604, - "step": 5554 - }, - { - "epoch": 0.4272748250134605, - "learning_rate": 0.0018397355226627793, - "loss": 1.2475, - "step": 5555 - }, - { - "epoch": 0.42735174217367894, - "learning_rate": 0.0018393824683617885, - "loss": 0.8418, - "step": 5556 - }, - { - "epoch": 0.4274286593338974, - "learning_rate": 0.0018390293942439023, - "loss": 0.9409, - "step": 5557 - }, - { - "epoch": 0.42750557649411586, - "learning_rate": 0.0018386763003297357, - "loss": 0.9576, - "step": 5558 - }, - { - "epoch": 0.42758249365433426, - "learning_rate": 0.0018383231866399077, - "loss": 1.0819, - "step": 5559 - }, - { - "epoch": 0.4276594108145527, - "learning_rate": 0.0018379700531950362, - "loss": 1.0788, - "step": 5560 - }, - { - "epoch": 0.4277363279747712, - "learning_rate": 0.0018376169000157406, - "loss": 1.0406, - "step": 5561 - }, - { - "epoch": 0.42781324513498964, - "learning_rate": 0.0018372637271226427, - "loss": 1.2434, - "step": 5562 - }, - { - "epoch": 0.42789016229520804, - "learning_rate": 0.0018369105345363646, - "loss": 1.2091, - "step": 5563 - }, - { - "epoch": 0.4279670794554265, - "learning_rate": 0.0018365573222775288, - "loss": 1.5499, - "step": 5564 - }, - { - "epoch": 0.42804399661564496, - "learning_rate": 0.0018362040903667602, - "loss": 1.2836, - "step": 5565 - }, - { - "epoch": 0.4281209137758634, - "learning_rate": 0.0018358508388246844, - "loss": 1.3395, - "step": 5566 - }, - { - "epoch": 0.4281978309360818, - "learning_rate": 0.0018354975676719285, - "loss": 1.2794, - "step": 5567 - }, - { - "epoch": 0.4282747480963003, - "learning_rate": 0.0018351442769291195, - "loss": 0.9135, - "step": 5568 - }, - { - "epoch": 0.42835166525651874, - "learning_rate": 0.001834790966616887, - "loss": 0.9336, - "step": 5569 - }, - { - "epoch": 0.42842858241673715, - "learning_rate": 0.0018344376367558613, - "loss": 1.334, - "step": 5570 - }, - { - "epoch": 0.4285054995769556, - "learning_rate": 0.0018340842873666734, - "loss": 0.8759, - "step": 5571 - }, - { - "epoch": 0.42858241673717407, - "learning_rate": 0.0018337309184699555, - "loss": 1.2565, - "step": 5572 - }, - { - "epoch": 0.4286593338973925, - "learning_rate": 0.0018333775300863415, - "loss": 1.0318, - "step": 5573 - }, - { - "epoch": 0.42873625105761093, - "learning_rate": 0.0018330241222364663, - "loss": 1.0225, - "step": 5574 - }, - { - "epoch": 0.4288131682178294, - "learning_rate": 0.0018326706949409657, - "loss": 1.1291, - "step": 5575 - }, - { - "epoch": 0.42889008537804785, - "learning_rate": 0.001832317248220476, - "loss": 1.0597, - "step": 5576 - }, - { - "epoch": 0.4289670025382663, - "learning_rate": 0.0018319637820956361, - "loss": 1.4029, - "step": 5577 - }, - { - "epoch": 0.4290439196984847, - "learning_rate": 0.0018316102965870852, - "loss": 1.3606, - "step": 5578 - }, - { - "epoch": 0.42912083685870317, - "learning_rate": 0.0018312567917154632, - "loss": 0.8228, - "step": 5579 - }, - { - "epoch": 0.42919775401892163, - "learning_rate": 0.001830903267501412, - "loss": 0.9363, - "step": 5580 - }, - { - "epoch": 0.4292746711791401, - "learning_rate": 0.001830549723965574, - "loss": 1.0098, - "step": 5581 - }, - { - "epoch": 0.4293515883393585, - "learning_rate": 0.0018301961611285936, - "loss": 1.3514, - "step": 5582 - }, - { - "epoch": 0.42942850549957695, - "learning_rate": 0.0018298425790111146, - "loss": 1.3572, - "step": 5583 - }, - { - "epoch": 0.4295054226597954, - "learning_rate": 0.001829488977633784, - "loss": 0.7179, - "step": 5584 - }, - { - "epoch": 0.42958233982001387, - "learning_rate": 0.0018291353570172484, - "loss": 1.2272, - "step": 5585 - }, - { - "epoch": 0.4296592569802323, - "learning_rate": 0.0018287817171821568, - "loss": 1.2496, - "step": 5586 - }, - { - "epoch": 0.42973617414045073, - "learning_rate": 0.0018284280581491576, - "loss": 1.0349, - "step": 5587 - }, - { - "epoch": 0.4298130913006692, - "learning_rate": 0.001828074379938902, - "loss": 1.124, - "step": 5588 - }, - { - "epoch": 0.42989000846088765, - "learning_rate": 0.001827720682572041, - "loss": 0.9621, - "step": 5589 - }, - { - "epoch": 0.42996692562110606, - "learning_rate": 0.0018273669660692285, - "loss": 0.7076, - "step": 5590 - }, - { - "epoch": 0.4300438427813245, - "learning_rate": 0.0018270132304511173, - "loss": 1.2028, - "step": 5591 - }, - { - "epoch": 0.430120759941543, - "learning_rate": 0.0018266594757383626, - "loss": 1.197, - "step": 5592 - }, - { - "epoch": 0.4301976771017614, - "learning_rate": 0.0018263057019516213, - "loss": 1.3546, - "step": 5593 - }, - { - "epoch": 0.43027459426197984, - "learning_rate": 0.0018259519091115492, - "loss": 1.001, - "step": 5594 - }, - { - "epoch": 0.4303515114221983, - "learning_rate": 0.0018255980972388063, - "loss": 0.8959, - "step": 5595 - }, - { - "epoch": 0.43042842858241676, - "learning_rate": 0.0018252442663540505, - "loss": 1.3395, - "step": 5596 - }, - { - "epoch": 0.43050534574263516, - "learning_rate": 0.0018248904164779437, - "loss": 1.051, - "step": 5597 - }, - { - "epoch": 0.4305822629028536, - "learning_rate": 0.001824536547631146, - "loss": 1.0275, - "step": 5598 - }, - { - "epoch": 0.4306591800630721, - "learning_rate": 0.0018241826598343217, - "loss": 1.1403, - "step": 5599 - }, - { - "epoch": 0.43073609722329054, - "learning_rate": 0.0018238287531081334, - "loss": 1.1042, - "step": 5600 - }, - { - "epoch": 0.43081301438350894, - "learning_rate": 0.0018234748274732473, - "loss": 1.3162, - "step": 5601 - }, - { - "epoch": 0.4308899315437274, - "learning_rate": 0.0018231208829503286, - "loss": 1.0446, - "step": 5602 - }, - { - "epoch": 0.43096684870394586, - "learning_rate": 0.0018227669195600448, - "loss": 1.2103, - "step": 5603 - }, - { - "epoch": 0.4310437658641643, - "learning_rate": 0.0018224129373230647, - "loss": 1.2208, - "step": 5604 - }, - { - "epoch": 0.4311206830243827, - "learning_rate": 0.0018220589362600568, - "loss": 1.1432, - "step": 5605 - }, - { - "epoch": 0.4311976001846012, - "learning_rate": 0.001821704916391692, - "loss": 1.3191, - "step": 5606 - }, - { - "epoch": 0.43127451734481964, - "learning_rate": 0.0018213508777386418, - "loss": 1.0648, - "step": 5607 - }, - { - "epoch": 0.4313514345050381, - "learning_rate": 0.0018209968203215788, - "loss": 1.197, - "step": 5608 - }, - { - "epoch": 0.4314283516652565, - "learning_rate": 0.0018206427441611773, - "loss": 1.1027, - "step": 5609 - }, - { - "epoch": 0.43150526882547496, - "learning_rate": 0.0018202886492781117, - "loss": 1.0762, - "step": 5610 - }, - { - "epoch": 0.4315821859856934, - "learning_rate": 0.0018199345356930579, - "loss": 1.2543, - "step": 5611 - }, - { - "epoch": 0.4316591031459118, - "learning_rate": 0.001819580403426693, - "loss": 1.2031, - "step": 5612 - }, - { - "epoch": 0.4317360203061303, - "learning_rate": 0.0018192262524996956, - "loss": 1.2362, - "step": 5613 - }, - { - "epoch": 0.43181293746634875, - "learning_rate": 0.0018188720829327447, - "loss": 1.3068, - "step": 5614 - }, - { - "epoch": 0.4318898546265672, - "learning_rate": 0.0018185178947465201, - "loss": 1.1597, - "step": 5615 - }, - { - "epoch": 0.4319667717867856, - "learning_rate": 0.0018181636879617041, - "loss": 1.1482, - "step": 5616 - }, - { - "epoch": 0.43204368894700407, - "learning_rate": 0.0018178094625989787, - "loss": 1.3016, - "step": 5617 - }, - { - "epoch": 0.4321206061072225, - "learning_rate": 0.0018174552186790273, - "loss": 1.0175, - "step": 5618 - }, - { - "epoch": 0.432197523267441, - "learning_rate": 0.0018171009562225353, - "loss": 1.2541, - "step": 5619 - }, - { - "epoch": 0.4322744404276594, - "learning_rate": 0.0018167466752501877, - "loss": 1.147, - "step": 5620 - }, - { - "epoch": 0.43235135758787785, - "learning_rate": 0.0018163923757826717, - "loss": 0.9687, - "step": 5621 - }, - { - "epoch": 0.4324282747480963, - "learning_rate": 0.001816038057840675, - "loss": 1.0988, - "step": 5622 - }, - { - "epoch": 0.43250519190831477, - "learning_rate": 0.0018156837214448871, - "loss": 1.2579, - "step": 5623 - }, - { - "epoch": 0.43258210906853317, - "learning_rate": 0.0018153293666159976, - "loss": 1.4583, - "step": 5624 - }, - { - "epoch": 0.43265902622875163, - "learning_rate": 0.0018149749933746972, - "loss": 1.2231, - "step": 5625 - }, - { - "epoch": 0.4327359433889701, - "learning_rate": 0.0018146206017416793, - "loss": 1.2069, - "step": 5626 - }, - { - "epoch": 0.43281286054918855, - "learning_rate": 0.0018142661917376361, - "loss": 1.0469, - "step": 5627 - }, - { - "epoch": 0.43288977770940695, - "learning_rate": 0.001813911763383263, - "loss": 1.0712, - "step": 5628 - }, - { - "epoch": 0.4329666948696254, - "learning_rate": 0.0018135573166992542, - "loss": 0.8022, - "step": 5629 - }, - { - "epoch": 0.43304361202984387, - "learning_rate": 0.0018132028517063076, - "loss": 1.0543, - "step": 5630 - }, - { - "epoch": 0.4331205291900623, - "learning_rate": 0.0018128483684251198, - "loss": 1.0692, - "step": 5631 - }, - { - "epoch": 0.43319744635028073, - "learning_rate": 0.0018124938668763896, - "loss": 1.1745, - "step": 5632 - }, - { - "epoch": 0.4332743635104992, - "learning_rate": 0.001812139347080817, - "loss": 1.2819, - "step": 5633 - }, - { - "epoch": 0.43335128067071765, - "learning_rate": 0.0018117848090591026, - "loss": 1.1315, - "step": 5634 - }, - { - "epoch": 0.43342819783093606, - "learning_rate": 0.001811430252831948, - "loss": 0.8297, - "step": 5635 - }, - { - "epoch": 0.4335051149911545, - "learning_rate": 0.0018110756784200563, - "loss": 1.1009, - "step": 5636 - }, - { - "epoch": 0.433582032151373, - "learning_rate": 0.001810721085844132, - "loss": 1.7179, - "step": 5637 - }, - { - "epoch": 0.43365894931159144, - "learning_rate": 0.0018103664751248792, - "loss": 1.2815, - "step": 5638 - }, - { - "epoch": 0.43373586647180984, - "learning_rate": 0.0018100118462830055, - "loss": 0.7268, - "step": 5639 - }, - { - "epoch": 0.4338127836320283, - "learning_rate": 0.0018096571993392156, - "loss": 1.2658, - "step": 5640 - }, - { - "epoch": 0.43388970079224676, - "learning_rate": 0.00180930253431422, - "loss": 1.2305, - "step": 5641 - }, - { - "epoch": 0.4339666179524652, - "learning_rate": 0.0018089478512287268, - "loss": 1.2178, - "step": 5642 - }, - { - "epoch": 0.4340435351126836, - "learning_rate": 0.001808593150103447, - "loss": 1.0977, - "step": 5643 - }, - { - "epoch": 0.4341204522729021, - "learning_rate": 0.0018082384309590909, - "loss": 0.9142, - "step": 5644 - }, - { - "epoch": 0.43419736943312054, - "learning_rate": 0.0018078836938163721, - "loss": 1.1176, - "step": 5645 - }, - { - "epoch": 0.434274286593339, - "learning_rate": 0.001807528938696003, - "loss": 1.1686, - "step": 5646 - }, - { - "epoch": 0.4343512037535574, - "learning_rate": 0.001807174165618699, - "loss": 1.4266, - "step": 5647 - }, - { - "epoch": 0.43442812091377586, - "learning_rate": 0.0018068193746051755, - "loss": 0.9128, - "step": 5648 - }, - { - "epoch": 0.4345050380739943, - "learning_rate": 0.0018064645656761485, - "loss": 1.3591, - "step": 5649 - }, - { - "epoch": 0.4345819552342128, - "learning_rate": 0.0018061097388523365, - "loss": 0.9289, - "step": 5650 - }, - { - "epoch": 0.4346588723944312, - "learning_rate": 0.0018057548941544576, - "loss": 1.333, - "step": 5651 - }, - { - "epoch": 0.43473578955464964, - "learning_rate": 0.0018054000316032324, - "loss": 1.2954, - "step": 5652 - }, - { - "epoch": 0.4348127067148681, - "learning_rate": 0.0018050451512193805, - "loss": 1.2906, - "step": 5653 - }, - { - "epoch": 0.4348896238750865, - "learning_rate": 0.0018046902530236245, - "loss": 1.2782, - "step": 5654 - }, - { - "epoch": 0.43496654103530497, - "learning_rate": 0.001804335337036687, - "loss": 1.3585, - "step": 5655 - }, - { - "epoch": 0.4350434581955234, - "learning_rate": 0.001803980403279292, - "loss": 1.176, - "step": 5656 - }, - { - "epoch": 0.4351203753557419, - "learning_rate": 0.001803625451772164, - "loss": 1.3991, - "step": 5657 - }, - { - "epoch": 0.4351972925159603, - "learning_rate": 0.0018032704825360303, - "loss": 1.5181, - "step": 5658 - }, - { - "epoch": 0.43527420967617875, - "learning_rate": 0.0018029154955916166, - "loss": 1.3791, - "step": 5659 - }, - { - "epoch": 0.4353511268363972, - "learning_rate": 0.0018025604909596513, - "loss": 1.3091, - "step": 5660 - }, - { - "epoch": 0.43542804399661567, - "learning_rate": 0.0018022054686608639, - "loss": 0.866, - "step": 5661 - }, - { - "epoch": 0.43550496115683407, - "learning_rate": 0.0018018504287159842, - "loss": 1.2028, - "step": 5662 - }, - { - "epoch": 0.43558187831705253, - "learning_rate": 0.0018014953711457432, - "loss": 1.0731, - "step": 5663 - }, - { - "epoch": 0.435658795477271, - "learning_rate": 0.0018011402959708734, - "loss": 1.0611, - "step": 5664 - }, - { - "epoch": 0.43573571263748945, - "learning_rate": 0.0018007852032121076, - "loss": 1.2305, - "step": 5665 - }, - { - "epoch": 0.43581262979770785, - "learning_rate": 0.0018004300928901806, - "loss": 0.7995, - "step": 5666 - }, - { - "epoch": 0.4358895469579263, - "learning_rate": 0.0018000749650258273, - "loss": 1.0112, - "step": 5667 - }, - { - "epoch": 0.43596646411814477, - "learning_rate": 0.001799719819639784, - "loss": 1.1825, - "step": 5668 - }, - { - "epoch": 0.43604338127836323, - "learning_rate": 0.0017993646567527886, - "loss": 1.0716, - "step": 5669 - }, - { - "epoch": 0.43612029843858163, - "learning_rate": 0.0017990094763855784, - "loss": 1.0991, - "step": 5670 - }, - { - "epoch": 0.4361972155988001, - "learning_rate": 0.001798654278558893, - "loss": 1.1066, - "step": 5671 - }, - { - "epoch": 0.43627413275901855, - "learning_rate": 0.0017982990632934735, - "loss": 1.0699, - "step": 5672 - }, - { - "epoch": 0.43635104991923696, - "learning_rate": 0.0017979438306100605, - "loss": 1.3438, - "step": 5673 - }, - { - "epoch": 0.4364279670794554, - "learning_rate": 0.001797588580529397, - "loss": 1.0997, - "step": 5674 - }, - { - "epoch": 0.4365048842396739, - "learning_rate": 0.001797233313072226, - "loss": 1.1237, - "step": 5675 - }, - { - "epoch": 0.43658180139989233, - "learning_rate": 0.001796878028259292, - "loss": 1.2894, - "step": 5676 - }, - { - "epoch": 0.43665871856011074, - "learning_rate": 0.0017965227261113408, - "loss": 1.348, - "step": 5677 - }, - { - "epoch": 0.4367356357203292, - "learning_rate": 0.0017961674066491184, - "loss": 1.0409, - "step": 5678 - }, - { - "epoch": 0.43681255288054766, - "learning_rate": 0.0017958120698933725, - "loss": 1.4251, - "step": 5679 - }, - { - "epoch": 0.4368894700407661, - "learning_rate": 0.0017954567158648513, - "loss": 1.2653, - "step": 5680 - }, - { - "epoch": 0.4369663872009845, - "learning_rate": 0.0017951013445843054, - "loss": 0.9678, - "step": 5681 - }, - { - "epoch": 0.437043304361203, - "learning_rate": 0.0017947459560724833, - "loss": 1.2467, - "step": 5682 - }, - { - "epoch": 0.43712022152142144, - "learning_rate": 0.0017943905503501384, - "loss": 1.444, - "step": 5683 - }, - { - "epoch": 0.4371971386816399, - "learning_rate": 0.001794035127438022, - "loss": 1.026, - "step": 5684 - }, - { - "epoch": 0.4372740558418583, - "learning_rate": 0.0017936796873568887, - "loss": 1.3701, - "step": 5685 - }, - { - "epoch": 0.43735097300207676, - "learning_rate": 0.0017933242301274917, - "loss": 1.5726, - "step": 5686 - }, - { - "epoch": 0.4374278901622952, - "learning_rate": 0.0017929687557705876, - "loss": 1.0995, - "step": 5687 - }, - { - "epoch": 0.4375048073225137, - "learning_rate": 0.0017926132643069322, - "loss": 1.2614, - "step": 5688 - }, - { - "epoch": 0.4375817244827321, - "learning_rate": 0.0017922577557572838, - "loss": 1.1429, - "step": 5689 - }, - { - "epoch": 0.43765864164295054, - "learning_rate": 0.0017919022301424003, - "loss": 0.8692, - "step": 5690 - }, - { - "epoch": 0.437735558803169, - "learning_rate": 0.0017915466874830412, - "loss": 1.3014, - "step": 5691 - }, - { - "epoch": 0.4378124759633874, - "learning_rate": 0.0017911911277999676, - "loss": 1.3442, - "step": 5692 - }, - { - "epoch": 0.43788939312360586, - "learning_rate": 0.00179083555111394, - "loss": 1.2153, - "step": 5693 - }, - { - "epoch": 0.4379663102838243, - "learning_rate": 0.0017904799574457219, - "loss": 0.8997, - "step": 5694 - }, - { - "epoch": 0.4380432274440428, - "learning_rate": 0.001790124346816076, - "loss": 1.1885, - "step": 5695 - }, - { - "epoch": 0.4381201446042612, - "learning_rate": 0.0017897687192457675, - "loss": 1.3203, - "step": 5696 - }, - { - "epoch": 0.43819706176447965, - "learning_rate": 0.001789413074755561, - "loss": 1.3783, - "step": 5697 - }, - { - "epoch": 0.4382739789246981, - "learning_rate": 0.001789057413366224, - "loss": 1.1416, - "step": 5698 - }, - { - "epoch": 0.43835089608491656, - "learning_rate": 0.001788701735098523, - "loss": 1.0234, - "step": 5699 - }, - { - "epoch": 0.43842781324513497, - "learning_rate": 0.0017883460399732267, - "loss": 1.4129, - "step": 5700 - }, - { - "epoch": 0.4385047304053534, - "learning_rate": 0.0017879903280111052, - "loss": 0.9845, - "step": 5701 - }, - { - "epoch": 0.4385816475655719, - "learning_rate": 0.0017876345992329278, - "loss": 1.2713, - "step": 5702 - }, - { - "epoch": 0.43865856472579035, - "learning_rate": 0.0017872788536594665, - "loss": 1.2226, - "step": 5703 - }, - { - "epoch": 0.43873548188600875, - "learning_rate": 0.0017869230913114937, - "loss": 1.0973, - "step": 5704 - }, - { - "epoch": 0.4388123990462272, - "learning_rate": 0.0017865673122097825, - "loss": 0.9595, - "step": 5705 - }, - { - "epoch": 0.43888931620644567, - "learning_rate": 0.001786211516375107, - "loss": 1.3162, - "step": 5706 - }, - { - "epoch": 0.4389662333666641, - "learning_rate": 0.0017858557038282433, - "loss": 1.8682, - "step": 5707 - }, - { - "epoch": 0.43904315052688253, - "learning_rate": 0.001785499874589967, - "loss": 1.0754, - "step": 5708 - }, - { - "epoch": 0.439120067687101, - "learning_rate": 0.0017851440286810554, - "loss": 0.9753, - "step": 5709 - }, - { - "epoch": 0.43919698484731945, - "learning_rate": 0.0017847881661222865, - "loss": 0.9731, - "step": 5710 - }, - { - "epoch": 0.4392739020075379, - "learning_rate": 0.00178443228693444, - "loss": 1.3326, - "step": 5711 - }, - { - "epoch": 0.4393508191677563, - "learning_rate": 0.0017840763911382961, - "loss": 1.1624, - "step": 5712 - }, - { - "epoch": 0.43942773632797477, - "learning_rate": 0.0017837204787546353, - "loss": 1.0467, - "step": 5713 - }, - { - "epoch": 0.43950465348819323, - "learning_rate": 0.0017833645498042403, - "loss": 1.3491, - "step": 5714 - }, - { - "epoch": 0.43958157064841163, - "learning_rate": 0.0017830086043078936, - "loss": 1.1612, - "step": 5715 - }, - { - "epoch": 0.4396584878086301, - "learning_rate": 0.0017826526422863801, - "loss": 1.4372, - "step": 5716 - }, - { - "epoch": 0.43973540496884855, - "learning_rate": 0.0017822966637604837, - "loss": 1.0515, - "step": 5717 - }, - { - "epoch": 0.439812322129067, - "learning_rate": 0.001781940668750991, - "loss": 1.3669, - "step": 5718 - }, - { - "epoch": 0.4398892392892854, - "learning_rate": 0.0017815846572786893, - "loss": 0.7621, - "step": 5719 - }, - { - "epoch": 0.4399661564495039, - "learning_rate": 0.0017812286293643657, - "loss": 1.3997, - "step": 5720 - }, - { - "epoch": 0.44004307360972233, - "learning_rate": 0.0017808725850288088, - "loss": 1.1387, - "step": 5721 - }, - { - "epoch": 0.4401199907699408, - "learning_rate": 0.0017805165242928095, - "loss": 0.9814, - "step": 5722 - }, - { - "epoch": 0.4401969079301592, - "learning_rate": 0.0017801604471771582, - "loss": 1.1478, - "step": 5723 - }, - { - "epoch": 0.44027382509037766, - "learning_rate": 0.001779804353702646, - "loss": 1.3558, - "step": 5724 - }, - { - "epoch": 0.4403507422505961, - "learning_rate": 0.001779448243890066, - "loss": 1.2087, - "step": 5725 - }, - { - "epoch": 0.4404276594108146, - "learning_rate": 0.0017790921177602117, - "loss": 0.8501, - "step": 5726 - }, - { - "epoch": 0.440504576571033, - "learning_rate": 0.0017787359753338786, - "loss": 1.232, - "step": 5727 - }, - { - "epoch": 0.44058149373125144, - "learning_rate": 0.0017783798166318605, - "loss": 0.9937, - "step": 5728 - }, - { - "epoch": 0.4406584108914699, - "learning_rate": 0.001778023641674955, - "loss": 1.2035, - "step": 5729 - }, - { - "epoch": 0.44073532805168836, - "learning_rate": 0.0017776674504839593, - "loss": 0.9493, - "step": 5730 - }, - { - "epoch": 0.44081224521190676, - "learning_rate": 0.001777311243079672, - "loss": 1.2342, - "step": 5731 - }, - { - "epoch": 0.4408891623721252, - "learning_rate": 0.0017769550194828923, - "loss": 1.0352, - "step": 5732 - }, - { - "epoch": 0.4409660795323437, - "learning_rate": 0.0017765987797144203, - "loss": 0.9839, - "step": 5733 - }, - { - "epoch": 0.4410429966925621, - "learning_rate": 0.0017762425237950572, - "loss": 1.2379, - "step": 5734 - }, - { - "epoch": 0.44111991385278054, - "learning_rate": 0.0017758862517456048, - "loss": 1.3134, - "step": 5735 - }, - { - "epoch": 0.441196831012999, - "learning_rate": 0.0017755299635868673, - "loss": 1.2512, - "step": 5736 - }, - { - "epoch": 0.44127374817321746, - "learning_rate": 0.0017751736593396474, - "loss": 1.0343, - "step": 5737 - }, - { - "epoch": 0.44135066533343587, - "learning_rate": 0.0017748173390247518, - "loss": 1.2389, - "step": 5738 - }, - { - "epoch": 0.4414275824936543, - "learning_rate": 0.0017744610026629846, - "loss": 1.0723, - "step": 5739 - }, - { - "epoch": 0.4415044996538728, - "learning_rate": 0.0017741046502751535, - "loss": 0.9816, - "step": 5740 - }, - { - "epoch": 0.44158141681409124, - "learning_rate": 0.001773748281882066, - "loss": 1.0717, - "step": 5741 - }, - { - "epoch": 0.44165833397430965, - "learning_rate": 0.0017733918975045314, - "loss": 1.069, - "step": 5742 - }, - { - "epoch": 0.4417352511345281, - "learning_rate": 0.001773035497163359, - "loss": 0.9084, - "step": 5743 - }, - { - "epoch": 0.44181216829474657, - "learning_rate": 0.0017726790808793589, - "loss": 1.2703, - "step": 5744 - }, - { - "epoch": 0.441889085454965, - "learning_rate": 0.0017723226486733436, - "loss": 1.0079, - "step": 5745 - }, - { - "epoch": 0.44196600261518343, - "learning_rate": 0.001771966200566125, - "loss": 1.2455, - "step": 5746 - }, - { - "epoch": 0.4420429197754019, - "learning_rate": 0.0017716097365785164, - "loss": 1.0852, - "step": 5747 - }, - { - "epoch": 0.44211983693562035, - "learning_rate": 0.0017712532567313321, - "loss": 1.359, - "step": 5748 - }, - { - "epoch": 0.4421967540958388, - "learning_rate": 0.0017708967610453876, - "loss": 0.7557, - "step": 5749 - }, - { - "epoch": 0.4422736712560572, - "learning_rate": 0.001770540249541499, - "loss": 1.3303, - "step": 5750 - }, - { - "epoch": 0.44235058841627567, - "learning_rate": 0.0017701837222404834, - "loss": 1.401, - "step": 5751 - }, - { - "epoch": 0.44242750557649413, - "learning_rate": 0.0017698271791631577, - "loss": 1.2947, - "step": 5752 - }, - { - "epoch": 0.4425044227367126, - "learning_rate": 0.0017694706203303428, - "loss": 0.7679, - "step": 5753 - }, - { - "epoch": 0.442581339896931, - "learning_rate": 0.0017691140457628573, - "loss": 1.3605, - "step": 5754 - }, - { - "epoch": 0.44265825705714945, - "learning_rate": 0.001768757455481522, - "loss": 1.3312, - "step": 5755 - }, - { - "epoch": 0.4427351742173679, - "learning_rate": 0.0017684008495071591, - "loss": 1.252, - "step": 5756 - }, - { - "epoch": 0.4428120913775863, - "learning_rate": 0.0017680442278605907, - "loss": 1.215, - "step": 5757 - }, - { - "epoch": 0.4428890085378048, - "learning_rate": 0.0017676875905626404, - "loss": 0.9961, - "step": 5758 - }, - { - "epoch": 0.44296592569802323, - "learning_rate": 0.0017673309376341328, - "loss": 0.9695, - "step": 5759 - }, - { - "epoch": 0.4430428428582417, - "learning_rate": 0.0017669742690958933, - "loss": 1.3749, - "step": 5760 - }, - { - "epoch": 0.4431197600184601, - "learning_rate": 0.0017666175849687478, - "loss": 1.0788, - "step": 5761 - }, - { - "epoch": 0.44319667717867856, - "learning_rate": 0.001766260885273524, - "loss": 1.4633, - "step": 5762 - }, - { - "epoch": 0.443273594338897, - "learning_rate": 0.001765904170031049, - "loss": 1.0799, - "step": 5763 - }, - { - "epoch": 0.4433505114991155, - "learning_rate": 0.0017655474392621526, - "loss": 1.1455, - "step": 5764 - }, - { - "epoch": 0.4434274286593339, - "learning_rate": 0.0017651906929876647, - "loss": 1.2592, - "step": 5765 - }, - { - "epoch": 0.44350434581955234, - "learning_rate": 0.0017648339312284155, - "loss": 1.2114, - "step": 5766 - }, - { - "epoch": 0.4435812629797708, - "learning_rate": 0.0017644771540052376, - "loss": 1.0834, - "step": 5767 - }, - { - "epoch": 0.44365818013998926, - "learning_rate": 0.0017641203613389625, - "loss": 1.2354, - "step": 5768 - }, - { - "epoch": 0.44373509730020766, - "learning_rate": 0.0017637635532504249, - "loss": 1.2626, - "step": 5769 - }, - { - "epoch": 0.4438120144604261, - "learning_rate": 0.001763406729760458, - "loss": 1.2834, - "step": 5770 - }, - { - "epoch": 0.4438889316206446, - "learning_rate": 0.0017630498908898977, - "loss": 1.4358, - "step": 5771 - }, - { - "epoch": 0.44396584878086304, - "learning_rate": 0.0017626930366595797, - "loss": 1.4712, - "step": 5772 - }, - { - "epoch": 0.44404276594108144, - "learning_rate": 0.0017623361670903426, - "loss": 1.3903, - "step": 5773 - }, - { - "epoch": 0.4441196831012999, - "learning_rate": 0.0017619792822030226, - "loss": 1.1489, - "step": 5774 - }, - { - "epoch": 0.44419660026151836, - "learning_rate": 0.0017616223820184594, - "loss": 1.3875, - "step": 5775 - }, - { - "epoch": 0.44427351742173676, - "learning_rate": 0.0017612654665574926, - "loss": 0.8662, - "step": 5776 - }, - { - "epoch": 0.4443504345819552, - "learning_rate": 0.001760908535840963, - "loss": 1.3263, - "step": 5777 - }, - { - "epoch": 0.4444273517421737, - "learning_rate": 0.0017605515898897121, - "loss": 1.2111, - "step": 5778 - }, - { - "epoch": 0.44450426890239214, - "learning_rate": 0.0017601946287245818, - "loss": 1.7218, - "step": 5779 - }, - { - "epoch": 0.44458118606261054, - "learning_rate": 0.0017598376523664172, - "loss": 1.4569, - "step": 5780 - }, - { - "epoch": 0.444658103222829, - "learning_rate": 0.0017594806608360603, - "loss": 1.1493, - "step": 5781 - }, - { - "epoch": 0.44473502038304746, - "learning_rate": 0.0017591236541543574, - "loss": 1.0143, - "step": 5782 - }, - { - "epoch": 0.4448119375432659, - "learning_rate": 0.001758766632342154, - "loss": 1.2816, - "step": 5783 - }, - { - "epoch": 0.4448888547034843, - "learning_rate": 0.001758409595420298, - "loss": 1.386, - "step": 5784 - }, - { - "epoch": 0.4449657718637028, - "learning_rate": 0.0017580525434096354, - "loss": 1.3543, - "step": 5785 - }, - { - "epoch": 0.44504268902392125, - "learning_rate": 0.0017576954763310168, - "loss": 1.3586, - "step": 5786 - }, - { - "epoch": 0.4451196061841397, - "learning_rate": 0.0017573383942052902, - "loss": 1.0857, - "step": 5787 - }, - { - "epoch": 0.4451965233443581, - "learning_rate": 0.0017569812970533068, - "loss": 1.2279, - "step": 5788 - }, - { - "epoch": 0.44527344050457657, - "learning_rate": 0.0017566241848959176, - "loss": 1.2447, - "step": 5789 - }, - { - "epoch": 0.445350357664795, - "learning_rate": 0.0017562670577539747, - "loss": 0.8943, - "step": 5790 - }, - { - "epoch": 0.4454272748250135, - "learning_rate": 0.0017559099156483312, - "loss": 1.0126, - "step": 5791 - }, - { - "epoch": 0.4455041919852319, - "learning_rate": 0.0017555527585998414, - "loss": 1.0517, - "step": 5792 - }, - { - "epoch": 0.44558110914545035, - "learning_rate": 0.0017551955866293596, - "loss": 1.0309, - "step": 5793 - }, - { - "epoch": 0.4456580263056688, - "learning_rate": 0.0017548383997577411, - "loss": 0.8542, - "step": 5794 - }, - { - "epoch": 0.4457349434658872, - "learning_rate": 0.0017544811980058437, - "loss": 1.2182, - "step": 5795 - }, - { - "epoch": 0.44581186062610567, - "learning_rate": 0.001754123981394523, - "loss": 1.0964, - "step": 5796 - }, - { - "epoch": 0.44588877778632413, - "learning_rate": 0.0017537667499446386, - "loss": 1.03, - "step": 5797 - }, - { - "epoch": 0.4459656949465426, - "learning_rate": 0.0017534095036770491, - "loss": 1.2094, - "step": 5798 - }, - { - "epoch": 0.446042612106761, - "learning_rate": 0.0017530522426126148, - "loss": 1.1494, - "step": 5799 - }, - { - "epoch": 0.44611952926697945, - "learning_rate": 0.0017526949667721959, - "loss": 1.0827, - "step": 5800 - }, - { - "epoch": 0.4461964464271979, - "learning_rate": 0.0017523376761766547, - "loss": 1.0839, - "step": 5801 - }, - { - "epoch": 0.44627336358741637, - "learning_rate": 0.0017519803708468536, - "loss": 1.3063, - "step": 5802 - }, - { - "epoch": 0.4463502807476348, - "learning_rate": 0.0017516230508036565, - "loss": 1.3156, - "step": 5803 - }, - { - "epoch": 0.44642719790785323, - "learning_rate": 0.001751265716067927, - "loss": 1.3592, - "step": 5804 - }, - { - "epoch": 0.4465041150680717, - "learning_rate": 0.00175090836666053, - "loss": 0.8599, - "step": 5805 - }, - { - "epoch": 0.44658103222829015, - "learning_rate": 0.0017505510026023326, - "loss": 0.965, - "step": 5806 - }, - { - "epoch": 0.44665794938850856, - "learning_rate": 0.0017501936239142008, - "loss": 1.2277, - "step": 5807 - }, - { - "epoch": 0.446734866548727, - "learning_rate": 0.0017498362306170028, - "loss": 0.9069, - "step": 5808 - }, - { - "epoch": 0.4468117837089455, - "learning_rate": 0.0017494788227316062, - "loss": 1.3905, - "step": 5809 - }, - { - "epoch": 0.44688870086916394, - "learning_rate": 0.0017491214002788816, - "loss": 1.2333, - "step": 5810 - }, - { - "epoch": 0.44696561802938234, - "learning_rate": 0.001748763963279699, - "loss": 1.1398, - "step": 5811 - }, - { - "epoch": 0.4470425351896008, - "learning_rate": 0.0017484065117549289, - "loss": 1.2049, - "step": 5812 - }, - { - "epoch": 0.44711945234981926, - "learning_rate": 0.001748049045725444, - "loss": 1.0972, - "step": 5813 - }, - { - "epoch": 0.4471963695100377, - "learning_rate": 0.0017476915652121165, - "loss": 0.8411, - "step": 5814 - }, - { - "epoch": 0.4472732866702561, - "learning_rate": 0.001747334070235821, - "loss": 1.3048, - "step": 5815 - }, - { - "epoch": 0.4473502038304746, - "learning_rate": 0.0017469765608174306, - "loss": 0.9927, - "step": 5816 - }, - { - "epoch": 0.44742712099069304, - "learning_rate": 0.0017466190369778219, - "loss": 1.1908, - "step": 5817 - }, - { - "epoch": 0.44750403815091144, - "learning_rate": 0.0017462614987378707, - "loss": 1.0945, - "step": 5818 - }, - { - "epoch": 0.4475809553111299, - "learning_rate": 0.0017459039461184536, - "loss": 1.183, - "step": 5819 - }, - { - "epoch": 0.44765787247134836, - "learning_rate": 0.001745546379140449, - "loss": 0.9948, - "step": 5820 - }, - { - "epoch": 0.4477347896315668, - "learning_rate": 0.0017451887978247353, - "loss": 1.2216, - "step": 5821 - }, - { - "epoch": 0.4478117067917852, - "learning_rate": 0.0017448312021921923, - "loss": 1.1367, - "step": 5822 - }, - { - "epoch": 0.4478886239520037, - "learning_rate": 0.0017444735922637, - "loss": 0.8306, - "step": 5823 - }, - { - "epoch": 0.44796554111222214, - "learning_rate": 0.0017441159680601404, - "loss": 1.0624, - "step": 5824 - }, - { - "epoch": 0.4480424582724406, - "learning_rate": 0.0017437583296023944, - "loss": 1.1724, - "step": 5825 - }, - { - "epoch": 0.448119375432659, - "learning_rate": 0.0017434006769113462, - "loss": 1.2228, - "step": 5826 - }, - { - "epoch": 0.44819629259287747, - "learning_rate": 0.001743043010007878, - "loss": 1.3137, - "step": 5827 - }, - { - "epoch": 0.4482732097530959, - "learning_rate": 0.0017426853289128758, - "loss": 1.1691, - "step": 5828 - }, - { - "epoch": 0.4483501269133144, - "learning_rate": 0.0017423276336472238, - "loss": 1.2528, - "step": 5829 - }, - { - "epoch": 0.4484270440735328, - "learning_rate": 0.001741969924231809, - "loss": 1.4442, - "step": 5830 - }, - { - "epoch": 0.44850396123375125, - "learning_rate": 0.0017416122006875183, - "loss": 1.3941, - "step": 5831 - }, - { - "epoch": 0.4485808783939697, - "learning_rate": 0.0017412544630352389, - "loss": 1.0074, - "step": 5832 - }, - { - "epoch": 0.44865779555418817, - "learning_rate": 0.0017408967112958606, - "loss": 0.8635, - "step": 5833 - }, - { - "epoch": 0.44873471271440657, - "learning_rate": 0.0017405389454902718, - "loss": 1.213, - "step": 5834 - }, - { - "epoch": 0.44881162987462503, - "learning_rate": 0.0017401811656393637, - "loss": 1.4124, - "step": 5835 - }, - { - "epoch": 0.4488885470348435, - "learning_rate": 0.0017398233717640264, - "loss": 1.3143, - "step": 5836 - }, - { - "epoch": 0.4489654641950619, - "learning_rate": 0.0017394655638851533, - "loss": 0.8716, - "step": 5837 - }, - { - "epoch": 0.44904238135528035, - "learning_rate": 0.0017391077420236358, - "loss": 1.4332, - "step": 5838 - }, - { - "epoch": 0.4491192985154988, - "learning_rate": 0.0017387499062003684, - "loss": 1.2571, - "step": 5839 - }, - { - "epoch": 0.44919621567571727, - "learning_rate": 0.0017383920564362446, - "loss": 1.1221, - "step": 5840 - }, - { - "epoch": 0.4492731328359357, - "learning_rate": 0.0017380341927521604, - "loss": 0.9311, - "step": 5841 - }, - { - "epoch": 0.44935004999615413, - "learning_rate": 0.001737676315169012, - "loss": 1.265, - "step": 5842 - }, - { - "epoch": 0.4494269671563726, - "learning_rate": 0.0017373184237076953, - "loss": 1.0609, - "step": 5843 - }, - { - "epoch": 0.44950388431659105, - "learning_rate": 0.001736960518389109, - "loss": 1.4313, - "step": 5844 - }, - { - "epoch": 0.44958080147680946, - "learning_rate": 0.0017366025992341509, - "loss": 1.187, - "step": 5845 - }, - { - "epoch": 0.4496577186370279, - "learning_rate": 0.0017362446662637203, - "loss": 0.9655, - "step": 5846 - }, - { - "epoch": 0.4497346357972464, - "learning_rate": 0.0017358867194987176, - "loss": 1.354, - "step": 5847 - }, - { - "epoch": 0.44981155295746483, - "learning_rate": 0.0017355287589600435, - "loss": 1.2368, - "step": 5848 - }, - { - "epoch": 0.44988847011768324, - "learning_rate": 0.0017351707846685997, - "loss": 1.2649, - "step": 5849 - }, - { - "epoch": 0.4499653872779017, - "learning_rate": 0.0017348127966452889, - "loss": 0.961, - "step": 5850 - }, - { - "epoch": 0.45004230443812016, - "learning_rate": 0.0017344547949110138, - "loss": 1.3079, - "step": 5851 - }, - { - "epoch": 0.4501192215983386, - "learning_rate": 0.0017340967794866788, - "loss": 0.9241, - "step": 5852 - }, - { - "epoch": 0.450196138758557, - "learning_rate": 0.0017337387503931895, - "loss": 1.0215, - "step": 5853 - }, - { - "epoch": 0.4502730559187755, - "learning_rate": 0.0017333807076514503, - "loss": 1.0799, - "step": 5854 - }, - { - "epoch": 0.45034997307899394, - "learning_rate": 0.0017330226512823688, - "loss": 1.0383, - "step": 5855 - }, - { - "epoch": 0.45042689023921234, - "learning_rate": 0.0017326645813068517, - "loss": 1.2903, - "step": 5856 - }, - { - "epoch": 0.4505038073994308, - "learning_rate": 0.001732306497745807, - "loss": 1.0836, - "step": 5857 - }, - { - "epoch": 0.45058072455964926, - "learning_rate": 0.001731948400620144, - "loss": 0.9747, - "step": 5858 - }, - { - "epoch": 0.4506576417198677, - "learning_rate": 0.0017315902899507722, - "loss": 1.3402, - "step": 5859 - }, - { - "epoch": 0.4507345588800861, - "learning_rate": 0.0017312321657586018, - "loss": 1.0289, - "step": 5860 - }, - { - "epoch": 0.4508114760403046, - "learning_rate": 0.0017308740280645447, - "loss": 1.0714, - "step": 5861 - }, - { - "epoch": 0.45088839320052304, - "learning_rate": 0.0017305158768895121, - "loss": 1.3094, - "step": 5862 - }, - { - "epoch": 0.4509653103607415, - "learning_rate": 0.0017301577122544175, - "loss": 1.3609, - "step": 5863 - }, - { - "epoch": 0.4510422275209599, - "learning_rate": 0.001729799534180174, - "loss": 1.097, - "step": 5864 - }, - { - "epoch": 0.45111914468117836, - "learning_rate": 0.0017294413426876961, - "loss": 1.1695, - "step": 5865 - }, - { - "epoch": 0.4511960618413968, - "learning_rate": 0.0017290831377978991, - "loss": 1.1512, - "step": 5866 - }, - { - "epoch": 0.4512729790016153, - "learning_rate": 0.0017287249195316989, - "loss": 1.2946, - "step": 5867 - }, - { - "epoch": 0.4513498961618337, - "learning_rate": 0.0017283666879100127, - "loss": 1.1683, - "step": 5868 - }, - { - "epoch": 0.45142681332205215, - "learning_rate": 0.0017280084429537573, - "loss": 1.0327, - "step": 5869 - }, - { - "epoch": 0.4515037304822706, - "learning_rate": 0.0017276501846838514, - "loss": 1.0762, - "step": 5870 - }, - { - "epoch": 0.45158064764248906, - "learning_rate": 0.0017272919131212138, - "loss": 1.0974, - "step": 5871 - }, - { - "epoch": 0.45165756480270747, - "learning_rate": 0.001726933628286765, - "loss": 1.1356, - "step": 5872 - }, - { - "epoch": 0.4517344819629259, - "learning_rate": 0.0017265753302014245, - "loss": 0.9996, - "step": 5873 - }, - { - "epoch": 0.4518113991231444, - "learning_rate": 0.0017262170188861144, - "loss": 1.367, - "step": 5874 - }, - { - "epoch": 0.45188831628336285, - "learning_rate": 0.001725858694361757, - "loss": 1.3565, - "step": 5875 - }, - { - "epoch": 0.45196523344358125, - "learning_rate": 0.001725500356649275, - "loss": 1.0324, - "step": 5876 - }, - { - "epoch": 0.4520421506037997, - "learning_rate": 0.0017251420057695923, - "loss": 1.1557, - "step": 5877 - }, - { - "epoch": 0.45211906776401817, - "learning_rate": 0.0017247836417436332, - "loss": 1.046, - "step": 5878 - }, - { - "epoch": 0.45219598492423657, - "learning_rate": 0.0017244252645923232, - "loss": 1.2456, - "step": 5879 - }, - { - "epoch": 0.45227290208445503, - "learning_rate": 0.0017240668743365877, - "loss": 0.9816, - "step": 5880 - }, - { - "epoch": 0.4523498192446735, - "learning_rate": 0.001723708470997354, - "loss": 0.8587, - "step": 5881 - }, - { - "epoch": 0.45242673640489195, - "learning_rate": 0.0017233500545955492, - "loss": 1.2587, - "step": 5882 - }, - { - "epoch": 0.45250365356511035, - "learning_rate": 0.0017229916251521027, - "loss": 1.4925, - "step": 5883 - }, - { - "epoch": 0.4525805707253288, - "learning_rate": 0.0017226331826879421, - "loss": 0.8827, - "step": 5884 - }, - { - "epoch": 0.45265748788554727, - "learning_rate": 0.0017222747272239987, - "loss": 1.0074, - "step": 5885 - }, - { - "epoch": 0.45273440504576573, - "learning_rate": 0.0017219162587812015, - "loss": 0.9539, - "step": 5886 - }, - { - "epoch": 0.45281132220598413, - "learning_rate": 0.0017215577773804832, - "loss": 1.323, - "step": 5887 - }, - { - "epoch": 0.4528882393662026, - "learning_rate": 0.0017211992830427755, - "loss": 1.3439, - "step": 5888 - }, - { - "epoch": 0.45296515652642105, - "learning_rate": 0.0017208407757890107, - "loss": 1.5041, - "step": 5889 - }, - { - "epoch": 0.4530420736866395, - "learning_rate": 0.001720482255640123, - "loss": 0.9133, - "step": 5890 - }, - { - "epoch": 0.4531189908468579, - "learning_rate": 0.0017201237226170468, - "loss": 1.3905, - "step": 5891 - }, - { - "epoch": 0.4531959080070764, - "learning_rate": 0.001719765176740717, - "loss": 1.0082, - "step": 5892 - }, - { - "epoch": 0.45327282516729483, - "learning_rate": 0.0017194066180320692, - "loss": 0.8672, - "step": 5893 - }, - { - "epoch": 0.4533497423275133, - "learning_rate": 0.0017190480465120405, - "loss": 1.1731, - "step": 5894 - }, - { - "epoch": 0.4534266594877317, - "learning_rate": 0.001718689462201568, - "loss": 1.1501, - "step": 5895 - }, - { - "epoch": 0.45350357664795016, - "learning_rate": 0.00171833086512159, - "loss": 1.1021, - "step": 5896 - }, - { - "epoch": 0.4535804938081686, - "learning_rate": 0.001717972255293045, - "loss": 1.3561, - "step": 5897 - }, - { - "epoch": 0.453657410968387, - "learning_rate": 0.0017176136327368735, - "loss": 1.106, - "step": 5898 - }, - { - "epoch": 0.4537343281286055, - "learning_rate": 0.0017172549974740147, - "loss": 1.1652, - "step": 5899 - }, - { - "epoch": 0.45381124528882394, - "learning_rate": 0.0017168963495254098, - "loss": 0.9884, - "step": 5900 - }, - { - "epoch": 0.4538881624490424, - "learning_rate": 0.0017165376889120018, - "loss": 1.6408, - "step": 5901 - }, - { - "epoch": 0.4539650796092608, - "learning_rate": 0.001716179015654732, - "loss": 1.4123, - "step": 5902 - }, - { - "epoch": 0.45404199676947926, - "learning_rate": 0.0017158203297745443, - "loss": 1.0873, - "step": 5903 - }, - { - "epoch": 0.4541189139296977, - "learning_rate": 0.0017154616312923825, - "loss": 0.9099, - "step": 5904 - }, - { - "epoch": 0.4541958310899162, - "learning_rate": 0.0017151029202291917, - "loss": 0.9838, - "step": 5905 - }, - { - "epoch": 0.4542727482501346, - "learning_rate": 0.0017147441966059173, - "loss": 0.8756, - "step": 5906 - }, - { - "epoch": 0.45434966541035304, - "learning_rate": 0.001714385460443505, - "loss": 0.9054, - "step": 5907 - }, - { - "epoch": 0.4544265825705715, - "learning_rate": 0.0017140267117629027, - "loss": 1.375, - "step": 5908 - }, - { - "epoch": 0.45450349973078996, - "learning_rate": 0.0017136679505850572, - "loss": 1.2602, - "step": 5909 - }, - { - "epoch": 0.45458041689100837, - "learning_rate": 0.001713309176930918, - "loss": 1.0451, - "step": 5910 - }, - { - "epoch": 0.4546573340512268, - "learning_rate": 0.0017129503908214331, - "loss": 1.2153, - "step": 5911 - }, - { - "epoch": 0.4547342512114453, - "learning_rate": 0.001712591592277553, - "loss": 1.3271, - "step": 5912 - }, - { - "epoch": 0.45481116837166374, - "learning_rate": 0.0017122327813202283, - "loss": 1.3862, - "step": 5913 - }, - { - "epoch": 0.45488808553188215, - "learning_rate": 0.001711873957970411, - "loss": 1.17, - "step": 5914 - }, - { - "epoch": 0.4549650026921006, - "learning_rate": 0.0017115151222490516, - "loss": 1.4342, - "step": 5915 - }, - { - "epoch": 0.45504191985231907, - "learning_rate": 0.0017111562741771043, - "loss": 1.3069, - "step": 5916 - }, - { - "epoch": 0.45511883701253747, - "learning_rate": 0.0017107974137755219, - "loss": 1.0684, - "step": 5917 - }, - { - "epoch": 0.45519575417275593, - "learning_rate": 0.0017104385410652585, - "loss": 1.1121, - "step": 5918 - }, - { - "epoch": 0.4552726713329744, - "learning_rate": 0.0017100796560672697, - "loss": 1.2236, - "step": 5919 - }, - { - "epoch": 0.45534958849319285, - "learning_rate": 0.0017097207588025105, - "loss": 1.1302, - "step": 5920 - }, - { - "epoch": 0.45542650565341125, - "learning_rate": 0.0017093618492919386, - "loss": 1.1827, - "step": 5921 - }, - { - "epoch": 0.4555034228136297, - "learning_rate": 0.0017090029275565093, - "loss": 1.0719, - "step": 5922 - }, - { - "epoch": 0.45558033997384817, - "learning_rate": 0.0017086439936171815, - "loss": 1.2071, - "step": 5923 - }, - { - "epoch": 0.45565725713406663, - "learning_rate": 0.0017082850474949129, - "loss": 1.2975, - "step": 5924 - }, - { - "epoch": 0.45573417429428503, - "learning_rate": 0.0017079260892106643, - "loss": 1.4163, - "step": 5925 - }, - { - "epoch": 0.4558110914545035, - "learning_rate": 0.001707567118785394, - "loss": 1.0864, - "step": 5926 - }, - { - "epoch": 0.45588800861472195, - "learning_rate": 0.0017072081362400635, - "loss": 1.3652, - "step": 5927 - }, - { - "epoch": 0.4559649257749404, - "learning_rate": 0.001706849141595634, - "loss": 1.2553, - "step": 5928 - }, - { - "epoch": 0.4560418429351588, - "learning_rate": 0.0017064901348730673, - "loss": 1.3937, - "step": 5929 - }, - { - "epoch": 0.4561187600953773, - "learning_rate": 0.0017061311160933268, - "loss": 0.9029, - "step": 5930 - }, - { - "epoch": 0.45619567725559573, - "learning_rate": 0.0017057720852773754, - "loss": 1.3697, - "step": 5931 - }, - { - "epoch": 0.4562725944158142, - "learning_rate": 0.0017054130424461775, - "loss": 0.9505, - "step": 5932 - }, - { - "epoch": 0.4563495115760326, - "learning_rate": 0.001705053987620698, - "loss": 0.9906, - "step": 5933 - }, - { - "epoch": 0.45642642873625106, - "learning_rate": 0.0017046949208219028, - "loss": 1.4199, - "step": 5934 - }, - { - "epoch": 0.4565033458964695, - "learning_rate": 0.0017043358420707568, - "loss": 1.0986, - "step": 5935 - }, - { - "epoch": 0.456580263056688, - "learning_rate": 0.0017039767513882293, - "loss": 1.0399, - "step": 5936 - }, - { - "epoch": 0.4566571802169064, - "learning_rate": 0.0017036176487952859, - "loss": 1.2513, - "step": 5937 - }, - { - "epoch": 0.45673409737712484, - "learning_rate": 0.001703258534312896, - "loss": 0.9261, - "step": 5938 - }, - { - "epoch": 0.4568110145373433, - "learning_rate": 0.0017028994079620284, - "loss": 1.0229, - "step": 5939 - }, - { - "epoch": 0.4568879316975617, - "learning_rate": 0.001702540269763653, - "loss": 0.8337, - "step": 5940 - }, - { - "epoch": 0.45696484885778016, - "learning_rate": 0.0017021811197387403, - "loss": 1.044, - "step": 5941 - }, - { - "epoch": 0.4570417660179986, - "learning_rate": 0.0017018219579082608, - "loss": 1.3212, - "step": 5942 - }, - { - "epoch": 0.4571186831782171, - "learning_rate": 0.0017014627842931872, - "loss": 1.3879, - "step": 5943 - }, - { - "epoch": 0.4571956003384355, - "learning_rate": 0.001701103598914492, - "loss": 0.8131, - "step": 5944 - }, - { - "epoch": 0.45727251749865394, - "learning_rate": 0.001700744401793148, - "loss": 1.0003, - "step": 5945 - }, - { - "epoch": 0.4573494346588724, - "learning_rate": 0.0017003851929501283, - "loss": 0.9824, - "step": 5946 - }, - { - "epoch": 0.45742635181909086, - "learning_rate": 0.0017000259724064093, - "loss": 1.1699, - "step": 5947 - }, - { - "epoch": 0.45750326897930926, - "learning_rate": 0.001699666740182965, - "loss": 1.2166, - "step": 5948 - }, - { - "epoch": 0.4575801861395277, - "learning_rate": 0.0016993074963007715, - "loss": 1.1913, - "step": 5949 - }, - { - "epoch": 0.4576571032997462, - "learning_rate": 0.0016989482407808055, - "loss": 1.3081, - "step": 5950 - }, - { - "epoch": 0.45773402045996464, - "learning_rate": 0.001698588973644045, - "loss": 1.4225, - "step": 5951 - }, - { - "epoch": 0.45781093762018304, - "learning_rate": 0.0016982296949114666, - "loss": 0.9996, - "step": 5952 - }, - { - "epoch": 0.4578878547804015, - "learning_rate": 0.00169787040460405, - "loss": 1.4416, - "step": 5953 - }, - { - "epoch": 0.45796477194061996, - "learning_rate": 0.0016975111027427744, - "loss": 1.1461, - "step": 5954 - }, - { - "epoch": 0.4580416891008384, - "learning_rate": 0.0016971517893486193, - "loss": 1.0895, - "step": 5955 - }, - { - "epoch": 0.4581186062610568, - "learning_rate": 0.0016967924644425666, - "loss": 1.5107, - "step": 5956 - }, - { - "epoch": 0.4581955234212753, - "learning_rate": 0.001696433128045596, - "loss": 1.0817, - "step": 5957 - }, - { - "epoch": 0.45827244058149375, - "learning_rate": 0.0016960737801786907, - "loss": 1.1311, - "step": 5958 - }, - { - "epoch": 0.45834935774171215, - "learning_rate": 0.001695714420862833, - "loss": 1.1138, - "step": 5959 - }, - { - "epoch": 0.4584262749019306, - "learning_rate": 0.0016953550501190066, - "loss": 1.1194, - "step": 5960 - }, - { - "epoch": 0.45850319206214907, - "learning_rate": 0.0016949956679681944, - "loss": 1.5385, - "step": 5961 - }, - { - "epoch": 0.4585801092223675, - "learning_rate": 0.0016946362744313821, - "loss": 1.1169, - "step": 5962 - }, - { - "epoch": 0.45865702638258593, - "learning_rate": 0.0016942768695295555, - "loss": 0.9635, - "step": 5963 - }, - { - "epoch": 0.4587339435428044, - "learning_rate": 0.0016939174532836997, - "loss": 0.9801, - "step": 5964 - }, - { - "epoch": 0.45881086070302285, - "learning_rate": 0.0016935580257148013, - "loss": 1.3425, - "step": 5965 - }, - { - "epoch": 0.4588877778632413, - "learning_rate": 0.0016931985868438486, - "loss": 1.2531, - "step": 5966 - }, - { - "epoch": 0.4589646950234597, - "learning_rate": 0.001692839136691829, - "loss": 0.8637, - "step": 5967 - }, - { - "epoch": 0.45904161218367817, - "learning_rate": 0.001692479675279731, - "loss": 1.1803, - "step": 5968 - }, - { - "epoch": 0.45911852934389663, - "learning_rate": 0.0016921202026285447, - "loss": 1.106, - "step": 5969 - }, - { - "epoch": 0.4591954465041151, - "learning_rate": 0.0016917607187592588, - "loss": 1.7642, - "step": 5970 - }, - { - "epoch": 0.4592723636643335, - "learning_rate": 0.001691401223692865, - "loss": 0.9494, - "step": 5971 - }, - { - "epoch": 0.45934928082455195, - "learning_rate": 0.0016910417174503544, - "loss": 1.2692, - "step": 5972 - }, - { - "epoch": 0.4594261979847704, - "learning_rate": 0.0016906822000527185, - "loss": 1.1, - "step": 5973 - }, - { - "epoch": 0.45950311514498887, - "learning_rate": 0.0016903226715209506, - "loss": 1.1767, - "step": 5974 - }, - { - "epoch": 0.4595800323052073, - "learning_rate": 0.0016899631318760436, - "loss": 1.1886, - "step": 5975 - }, - { - "epoch": 0.45965694946542573, - "learning_rate": 0.0016896035811389911, - "loss": 1.0692, - "step": 5976 - }, - { - "epoch": 0.4597338666256442, - "learning_rate": 0.001689244019330788, - "loss": 1.1346, - "step": 5977 - }, - { - "epoch": 0.45981078378586265, - "learning_rate": 0.0016888844464724294, - "loss": 1.1183, - "step": 5978 - }, - { - "epoch": 0.45988770094608106, - "learning_rate": 0.0016885248625849111, - "loss": 1.3074, - "step": 5979 - }, - { - "epoch": 0.4599646181062995, - "learning_rate": 0.0016881652676892294, - "loss": 1.518, - "step": 5980 - }, - { - "epoch": 0.460041535266518, - "learning_rate": 0.0016878056618063817, - "loss": 1.3334, - "step": 5981 - }, - { - "epoch": 0.4601184524267364, - "learning_rate": 0.001687446044957366, - "loss": 1.162, - "step": 5982 - }, - { - "epoch": 0.46019536958695484, - "learning_rate": 0.00168708641716318, - "loss": 1.3031, - "step": 5983 - }, - { - "epoch": 0.4602722867471733, - "learning_rate": 0.0016867267784448233, - "loss": 1.6319, - "step": 5984 - }, - { - "epoch": 0.46034920390739176, - "learning_rate": 0.0016863671288232956, - "loss": 1.1516, - "step": 5985 - }, - { - "epoch": 0.46042612106761016, - "learning_rate": 0.0016860074683195966, - "loss": 1.4387, - "step": 5986 - }, - { - "epoch": 0.4605030382278286, - "learning_rate": 0.001685647796954728, - "loss": 0.7825, - "step": 5987 - }, - { - "epoch": 0.4605799553880471, - "learning_rate": 0.0016852881147496907, - "loss": 1.2067, - "step": 5988 - }, - { - "epoch": 0.46065687254826554, - "learning_rate": 0.0016849284217254877, - "loss": 1.3889, - "step": 5989 - }, - { - "epoch": 0.46073378970848394, - "learning_rate": 0.0016845687179031215, - "loss": 1.0775, - "step": 5990 - }, - { - "epoch": 0.4608107068687024, - "learning_rate": 0.0016842090033035951, - "loss": 1.0139, - "step": 5991 - }, - { - "epoch": 0.46088762402892086, - "learning_rate": 0.001683849277947913, - "loss": 1.4182, - "step": 5992 - }, - { - "epoch": 0.4609645411891393, - "learning_rate": 0.00168348954185708, - "loss": 1.1822, - "step": 5993 - }, - { - "epoch": 0.4610414583493577, - "learning_rate": 0.0016831297950521016, - "loss": 1.0535, - "step": 5994 - }, - { - "epoch": 0.4611183755095762, - "learning_rate": 0.0016827700375539837, - "loss": 1.0001, - "step": 5995 - }, - { - "epoch": 0.46119529266979464, - "learning_rate": 0.0016824102693837326, - "loss": 1.5293, - "step": 5996 - }, - { - "epoch": 0.4612722098300131, - "learning_rate": 0.0016820504905623557, - "loss": 1.057, - "step": 5997 - }, - { - "epoch": 0.4613491269902315, - "learning_rate": 0.0016816907011108612, - "loss": 0.9821, - "step": 5998 - }, - { - "epoch": 0.46142604415044997, - "learning_rate": 0.0016813309010502574, - "loss": 1.3119, - "step": 5999 - }, - { - "epoch": 0.4615029613106684, - "learning_rate": 0.0016809710904015529, - "loss": 1.0554, - "step": 6000 - }, - { - "epoch": 0.46157987847088683, - "learning_rate": 0.0016806112691857584, - "loss": 1.0643, - "step": 6001 - }, - { - "epoch": 0.4616567956311053, - "learning_rate": 0.0016802514374238831, - "loss": 0.9915, - "step": 6002 - }, - { - "epoch": 0.46173371279132375, - "learning_rate": 0.0016798915951369386, - "loss": 1.2561, - "step": 6003 - }, - { - "epoch": 0.4618106299515422, - "learning_rate": 0.0016795317423459366, - "loss": 1.2189, - "step": 6004 - }, - { - "epoch": 0.4618875471117606, - "learning_rate": 0.0016791718790718892, - "loss": 1.2343, - "step": 6005 - }, - { - "epoch": 0.46196446427197907, - "learning_rate": 0.0016788120053358084, - "loss": 1.0122, - "step": 6006 - }, - { - "epoch": 0.46204138143219753, - "learning_rate": 0.0016784521211587087, - "loss": 1.3605, - "step": 6007 - }, - { - "epoch": 0.462118298592416, - "learning_rate": 0.0016780922265616034, - "loss": 1.2304, - "step": 6008 - }, - { - "epoch": 0.4621952157526344, - "learning_rate": 0.001677732321565508, - "loss": 0.8988, - "step": 6009 - }, - { - "epoch": 0.46227213291285285, - "learning_rate": 0.0016773724061914367, - "loss": 1.105, - "step": 6010 - }, - { - "epoch": 0.4623490500730713, - "learning_rate": 0.0016770124804604057, - "loss": 1.3922, - "step": 6011 - }, - { - "epoch": 0.46242596723328977, - "learning_rate": 0.0016766525443934314, - "loss": 1.0368, - "step": 6012 - }, - { - "epoch": 0.4625028843935082, - "learning_rate": 0.0016762925980115314, - "loss": 0.9141, - "step": 6013 - }, - { - "epoch": 0.46257980155372663, - "learning_rate": 0.0016759326413357224, - "loss": 1.1835, - "step": 6014 - }, - { - "epoch": 0.4626567187139451, - "learning_rate": 0.0016755726743870234, - "loss": 1.0215, - "step": 6015 - }, - { - "epoch": 0.46273363587416355, - "learning_rate": 0.0016752126971864526, - "loss": 1.1779, - "step": 6016 - }, - { - "epoch": 0.46281055303438196, - "learning_rate": 0.00167485270975503, - "loss": 1.2531, - "step": 6017 - }, - { - "epoch": 0.4628874701946004, - "learning_rate": 0.0016744927121137756, - "loss": 1.2628, - "step": 6018 - }, - { - "epoch": 0.4629643873548189, - "learning_rate": 0.0016741327042837097, - "loss": 1.3413, - "step": 6019 - }, - { - "epoch": 0.4630413045150373, - "learning_rate": 0.0016737726862858543, - "loss": 1.256, - "step": 6020 - }, - { - "epoch": 0.46311822167525574, - "learning_rate": 0.00167341265814123, - "loss": 1.0495, - "step": 6021 - }, - { - "epoch": 0.4631951388354742, - "learning_rate": 0.0016730526198708601, - "loss": 1.2592, - "step": 6022 - }, - { - "epoch": 0.46327205599569266, - "learning_rate": 0.0016726925714957669, - "loss": 0.8827, - "step": 6023 - }, - { - "epoch": 0.46334897315591106, - "learning_rate": 0.0016723325130369754, - "loss": 0.9721, - "step": 6024 - }, - { - "epoch": 0.4634258903161295, - "learning_rate": 0.0016719724445155083, - "loss": 1.4127, - "step": 6025 - }, - { - "epoch": 0.463502807476348, - "learning_rate": 0.0016716123659523908, - "loss": 0.8854, - "step": 6026 - }, - { - "epoch": 0.46357972463656644, - "learning_rate": 0.0016712522773686485, - "loss": 1.5001, - "step": 6027 - }, - { - "epoch": 0.46365664179678484, - "learning_rate": 0.0016708921787853073, - "loss": 0.7913, - "step": 6028 - }, - { - "epoch": 0.4637335589570033, - "learning_rate": 0.001670532070223394, - "loss": 1.0606, - "step": 6029 - }, - { - "epoch": 0.46381047611722176, - "learning_rate": 0.0016701719517039349, - "loss": 1.1232, - "step": 6030 - }, - { - "epoch": 0.4638873932774402, - "learning_rate": 0.0016698118232479583, - "loss": 1.4346, - "step": 6031 - }, - { - "epoch": 0.4639643104376586, - "learning_rate": 0.0016694516848764927, - "loss": 1.0104, - "step": 6032 - }, - { - "epoch": 0.4640412275978771, - "learning_rate": 0.0016690915366105665, - "loss": 1.0202, - "step": 6033 - }, - { - "epoch": 0.46411814475809554, - "learning_rate": 0.001668731378471209, - "loss": 0.9859, - "step": 6034 - }, - { - "epoch": 0.464195061918314, - "learning_rate": 0.0016683712104794506, - "loss": 1.0598, - "step": 6035 - }, - { - "epoch": 0.4642719790785324, - "learning_rate": 0.0016680110326563217, - "loss": 1.3897, - "step": 6036 - }, - { - "epoch": 0.46434889623875086, - "learning_rate": 0.001667650845022854, - "loss": 1.1436, - "step": 6037 - }, - { - "epoch": 0.4644258133989693, - "learning_rate": 0.0016672906476000778, - "loss": 1.3955, - "step": 6038 - }, - { - "epoch": 0.4645027305591878, - "learning_rate": 0.001666930440409027, - "loss": 1.0686, - "step": 6039 - }, - { - "epoch": 0.4645796477194062, - "learning_rate": 0.0016665702234707337, - "loss": 0.8183, - "step": 6040 - }, - { - "epoch": 0.46465656487962465, - "learning_rate": 0.0016662099968062312, - "loss": 1.3284, - "step": 6041 - }, - { - "epoch": 0.4647334820398431, - "learning_rate": 0.001665849760436554, - "loss": 1.5238, - "step": 6042 - }, - { - "epoch": 0.4648103992000615, - "learning_rate": 0.0016654895143827367, - "loss": 1.0193, - "step": 6043 - }, - { - "epoch": 0.46488731636027997, - "learning_rate": 0.001665129258665814, - "loss": 1.0677, - "step": 6044 - }, - { - "epoch": 0.4649642335204984, - "learning_rate": 0.0016647689933068216, - "loss": 1.2368, - "step": 6045 - }, - { - "epoch": 0.4650411506807169, - "learning_rate": 0.001664408718326796, - "loss": 0.9672, - "step": 6046 - }, - { - "epoch": 0.4651180678409353, - "learning_rate": 0.0016640484337467742, - "loss": 1.1939, - "step": 6047 - }, - { - "epoch": 0.46519498500115375, - "learning_rate": 0.001663688139587793, - "loss": 0.9955, - "step": 6048 - }, - { - "epoch": 0.4652719021613722, - "learning_rate": 0.001663327835870891, - "loss": 0.8805, - "step": 6049 - }, - { - "epoch": 0.46534881932159067, - "learning_rate": 0.001662967522617106, - "loss": 1.0091, - "step": 6050 - }, - { - "epoch": 0.46542573648180907, - "learning_rate": 0.0016626071998474786, - "loss": 0.996, - "step": 6051 - }, - { - "epoch": 0.46550265364202753, - "learning_rate": 0.0016622468675830462, - "loss": 1.1809, - "step": 6052 - }, - { - "epoch": 0.465579570802246, - "learning_rate": 0.001661886525844851, - "loss": 1.4364, - "step": 6053 - }, - { - "epoch": 0.46565648796246445, - "learning_rate": 0.0016615261746539321, - "loss": 1.4154, - "step": 6054 - }, - { - "epoch": 0.46573340512268285, - "learning_rate": 0.0016611658140313324, - "loss": 1.246, - "step": 6055 - }, - { - "epoch": 0.4658103222829013, - "learning_rate": 0.001660805443998092, - "loss": 0.8958, - "step": 6056 - }, - { - "epoch": 0.46588723944311977, - "learning_rate": 0.0016604450645752544, - "loss": 1.1856, - "step": 6057 - }, - { - "epoch": 0.46596415660333823, - "learning_rate": 0.001660084675783862, - "loss": 1.2212, - "step": 6058 - }, - { - "epoch": 0.46604107376355663, - "learning_rate": 0.0016597242776449591, - "loss": 1.1431, - "step": 6059 - }, - { - "epoch": 0.4661179909237751, - "learning_rate": 0.001659363870179589, - "loss": 1.0644, - "step": 6060 - }, - { - "epoch": 0.46619490808399355, - "learning_rate": 0.001659003453408796, - "loss": 1.1554, - "step": 6061 - }, - { - "epoch": 0.46627182524421196, - "learning_rate": 0.0016586430273536265, - "loss": 0.9181, - "step": 6062 - }, - { - "epoch": 0.4663487424044304, - "learning_rate": 0.0016582825920351246, - "loss": 0.976, - "step": 6063 - }, - { - "epoch": 0.4664256595646489, - "learning_rate": 0.0016579221474743375, - "loss": 1.2245, - "step": 6064 - }, - { - "epoch": 0.46650257672486733, - "learning_rate": 0.0016575616936923114, - "loss": 1.2207, - "step": 6065 - }, - { - "epoch": 0.46657949388508574, - "learning_rate": 0.0016572012307100941, - "loss": 1.3792, - "step": 6066 - }, - { - "epoch": 0.4666564110453042, - "learning_rate": 0.0016568407585487329, - "loss": 1.3489, - "step": 6067 - }, - { - "epoch": 0.46673332820552266, - "learning_rate": 0.0016564802772292765, - "loss": 1.539, - "step": 6068 - }, - { - "epoch": 0.4668102453657411, - "learning_rate": 0.0016561197867727734, - "loss": 1.4299, - "step": 6069 - }, - { - "epoch": 0.4668871625259595, - "learning_rate": 0.0016557592872002732, - "loss": 0.9731, - "step": 6070 - }, - { - "epoch": 0.466964079686178, - "learning_rate": 0.001655398778532826, - "loss": 1.2043, - "step": 6071 - }, - { - "epoch": 0.46704099684639644, - "learning_rate": 0.0016550382607914823, - "loss": 0.9383, - "step": 6072 - }, - { - "epoch": 0.4671179140066149, - "learning_rate": 0.001654677733997293, - "loss": 1.1071, - "step": 6073 - }, - { - "epoch": 0.4671948311668333, - "learning_rate": 0.0016543171981713094, - "loss": 0.7593, - "step": 6074 - }, - { - "epoch": 0.46727174832705176, - "learning_rate": 0.001653956653334584, - "loss": 1.056, - "step": 6075 - }, - { - "epoch": 0.4673486654872702, - "learning_rate": 0.0016535960995081689, - "loss": 0.9245, - "step": 6076 - }, - { - "epoch": 0.4674255826474887, - "learning_rate": 0.0016532355367131176, - "loss": 1.3049, - "step": 6077 - }, - { - "epoch": 0.4675024998077071, - "learning_rate": 0.001652874964970484, - "loss": 1.1381, - "step": 6078 - }, - { - "epoch": 0.46757941696792554, - "learning_rate": 0.0016525143843013216, - "loss": 0.9822, - "step": 6079 - }, - { - "epoch": 0.467656334128144, - "learning_rate": 0.0016521537947266852, - "loss": 1.0136, - "step": 6080 - }, - { - "epoch": 0.4677332512883624, - "learning_rate": 0.0016517931962676301, - "loss": 1.2798, - "step": 6081 - }, - { - "epoch": 0.46781016844858087, - "learning_rate": 0.0016514325889452125, - "loss": 1.2401, - "step": 6082 - }, - { - "epoch": 0.4678870856087993, - "learning_rate": 0.0016510719727804875, - "loss": 1.2683, - "step": 6083 - }, - { - "epoch": 0.4679640027690178, - "learning_rate": 0.0016507113477945133, - "loss": 1.091, - "step": 6084 - }, - { - "epoch": 0.4680409199292362, - "learning_rate": 0.001650350714008346, - "loss": 0.9794, - "step": 6085 - }, - { - "epoch": 0.46811783708945465, - "learning_rate": 0.001649990071443044, - "loss": 1.0378, - "step": 6086 - }, - { - "epoch": 0.4681947542496731, - "learning_rate": 0.001649629420119665, - "loss": 1.0775, - "step": 6087 - }, - { - "epoch": 0.46827167140989157, - "learning_rate": 0.0016492687600592685, - "loss": 1.3136, - "step": 6088 - }, - { - "epoch": 0.46834858857010997, - "learning_rate": 0.0016489080912829139, - "loss": 1.2109, - "step": 6089 - }, - { - "epoch": 0.46842550573032843, - "learning_rate": 0.0016485474138116605, - "loss": 1.06, - "step": 6090 - }, - { - "epoch": 0.4685024228905469, - "learning_rate": 0.0016481867276665683, - "loss": 1.2542, - "step": 6091 - }, - { - "epoch": 0.46857934005076535, - "learning_rate": 0.001647826032868699, - "loss": 1.1768, - "step": 6092 - }, - { - "epoch": 0.46865625721098375, - "learning_rate": 0.0016474653294391139, - "loss": 1.1091, - "step": 6093 - }, - { - "epoch": 0.4687331743712022, - "learning_rate": 0.0016471046173988737, - "loss": 1.4344, - "step": 6094 - }, - { - "epoch": 0.46881009153142067, - "learning_rate": 0.0016467438967690423, - "loss": 0.8547, - "step": 6095 - }, - { - "epoch": 0.46888700869163913, - "learning_rate": 0.0016463831675706815, - "loss": 1.3919, - "step": 6096 - }, - { - "epoch": 0.46896392585185753, - "learning_rate": 0.0016460224298248554, - "loss": 1.4495, - "step": 6097 - }, - { - "epoch": 0.469040843012076, - "learning_rate": 0.001645661683552627, - "loss": 1.2432, - "step": 6098 - }, - { - "epoch": 0.46911776017229445, - "learning_rate": 0.0016453009287750611, - "loss": 0.7972, - "step": 6099 - }, - { - "epoch": 0.4691946773325129, - "learning_rate": 0.0016449401655132224, - "loss": 1.3865, - "step": 6100 - }, - { - "epoch": 0.4692715944927313, - "learning_rate": 0.0016445793937881772, - "loss": 1.1304, - "step": 6101 - }, - { - "epoch": 0.4693485116529498, - "learning_rate": 0.0016442186136209898, - "loss": 0.9214, - "step": 6102 - }, - { - "epoch": 0.46942542881316823, - "learning_rate": 0.0016438578250327276, - "loss": 1.3439, - "step": 6103 - }, - { - "epoch": 0.46950234597338664, - "learning_rate": 0.0016434970280444568, - "loss": 1.0002, - "step": 6104 - }, - { - "epoch": 0.4695792631336051, - "learning_rate": 0.001643136222677245, - "loss": 1.0149, - "step": 6105 - }, - { - "epoch": 0.46965618029382356, - "learning_rate": 0.0016427754089521603, - "loss": 1.0766, - "step": 6106 - }, - { - "epoch": 0.469733097454042, - "learning_rate": 0.00164241458689027, - "loss": 0.9845, - "step": 6107 - }, - { - "epoch": 0.4698100146142604, - "learning_rate": 0.0016420537565126443, - "loss": 1.3217, - "step": 6108 - }, - { - "epoch": 0.4698869317744789, - "learning_rate": 0.0016416929178403514, - "loss": 1.1593, - "step": 6109 - }, - { - "epoch": 0.46996384893469734, - "learning_rate": 0.0016413320708944612, - "loss": 1.4989, - "step": 6110 - }, - { - "epoch": 0.4700407660949158, - "learning_rate": 0.001640971215696044, - "loss": 0.9828, - "step": 6111 - }, - { - "epoch": 0.4701176832551342, - "learning_rate": 0.0016406103522661711, - "loss": 1.1914, - "step": 6112 - }, - { - "epoch": 0.47019460041535266, - "learning_rate": 0.001640249480625913, - "loss": 0.7579, - "step": 6113 - }, - { - "epoch": 0.4702715175755711, - "learning_rate": 0.0016398886007963416, - "loss": 0.8694, - "step": 6114 - }, - { - "epoch": 0.4703484347357896, - "learning_rate": 0.0016395277127985285, - "loss": 0.8679, - "step": 6115 - }, - { - "epoch": 0.470425351896008, - "learning_rate": 0.0016391668166535474, - "loss": 0.9447, - "step": 6116 - }, - { - "epoch": 0.47050226905622644, - "learning_rate": 0.001638805912382471, - "loss": 1.3569, - "step": 6117 - }, - { - "epoch": 0.4705791862164449, - "learning_rate": 0.001638445000006372, - "loss": 1.097, - "step": 6118 - }, - { - "epoch": 0.47065610337666336, - "learning_rate": 0.0016380840795463258, - "loss": 1.1325, - "step": 6119 - }, - { - "epoch": 0.47073302053688176, - "learning_rate": 0.0016377231510234062, - "loss": 1.335, - "step": 6120 - }, - { - "epoch": 0.4708099376971002, - "learning_rate": 0.001637362214458688, - "loss": 1.3611, - "step": 6121 - }, - { - "epoch": 0.4708868548573187, - "learning_rate": 0.001637001269873247, - "loss": 1.2211, - "step": 6122 - }, - { - "epoch": 0.4709637720175371, - "learning_rate": 0.0016366403172881593, - "loss": 1.0244, - "step": 6123 - }, - { - "epoch": 0.47104068917775554, - "learning_rate": 0.001636279356724501, - "loss": 1.1608, - "step": 6124 - }, - { - "epoch": 0.471117606337974, - "learning_rate": 0.0016359183882033487, - "loss": 1.3197, - "step": 6125 - }, - { - "epoch": 0.47119452349819246, - "learning_rate": 0.0016355574117457802, - "loss": 1.0408, - "step": 6126 - }, - { - "epoch": 0.47127144065841087, - "learning_rate": 0.0016351964273728736, - "loss": 0.9261, - "step": 6127 - }, - { - "epoch": 0.4713483578186293, - "learning_rate": 0.0016348354351057065, - "loss": 1.2108, - "step": 6128 - }, - { - "epoch": 0.4714252749788478, - "learning_rate": 0.0016344744349653573, - "loss": 1.2655, - "step": 6129 - }, - { - "epoch": 0.47150219213906625, - "learning_rate": 0.001634113426972906, - "loss": 0.8518, - "step": 6130 - }, - { - "epoch": 0.47157910929928465, - "learning_rate": 0.0016337524111494325, - "loss": 1.2985, - "step": 6131 - }, - { - "epoch": 0.4716560264595031, - "learning_rate": 0.0016333913875160157, - "loss": 1.1613, - "step": 6132 - }, - { - "epoch": 0.47173294361972157, - "learning_rate": 0.0016330303560937367, - "loss": 0.7984, - "step": 6133 - }, - { - "epoch": 0.47180986077994, - "learning_rate": 0.0016326693169036771, - "loss": 1.2623, - "step": 6134 - }, - { - "epoch": 0.47188677794015843, - "learning_rate": 0.0016323082699669174, - "loss": 0.7766, - "step": 6135 - }, - { - "epoch": 0.4719636951003769, - "learning_rate": 0.00163194721530454, - "loss": 1.1435, - "step": 6136 - }, - { - "epoch": 0.47204061226059535, - "learning_rate": 0.0016315861529376272, - "loss": 1.1985, - "step": 6137 - }, - { - "epoch": 0.4721175294208138, - "learning_rate": 0.0016312250828872615, - "loss": 1.299, - "step": 6138 - }, - { - "epoch": 0.4721944465810322, - "learning_rate": 0.0016308640051745274, - "loss": 1.5076, - "step": 6139 - }, - { - "epoch": 0.47227136374125067, - "learning_rate": 0.0016305029198205067, - "loss": 1.1281, - "step": 6140 - }, - { - "epoch": 0.47234828090146913, - "learning_rate": 0.001630141826846285, - "loss": 1.2295, - "step": 6141 - }, - { - "epoch": 0.4724251980616876, - "learning_rate": 0.0016297807262729458, - "loss": 1.2271, - "step": 6142 - }, - { - "epoch": 0.472502115221906, - "learning_rate": 0.0016294196181215756, - "loss": 1.1294, - "step": 6143 - }, - { - "epoch": 0.47257903238212445, - "learning_rate": 0.0016290585024132582, - "loss": 1.0136, - "step": 6144 - }, - { - "epoch": 0.4726559495423429, - "learning_rate": 0.0016286973791690808, - "loss": 1.2991, - "step": 6145 - }, - { - "epoch": 0.4727328667025613, - "learning_rate": 0.0016283362484101295, - "loss": 1.1902, - "step": 6146 - }, - { - "epoch": 0.4728097838627798, - "learning_rate": 0.0016279751101574902, - "loss": 1.0617, - "step": 6147 - }, - { - "epoch": 0.47288670102299823, - "learning_rate": 0.0016276139644322515, - "loss": 1.0008, - "step": 6148 - }, - { - "epoch": 0.4729636181832167, - "learning_rate": 0.0016272528112555, - "loss": 0.99, - "step": 6149 - }, - { - "epoch": 0.4730405353434351, - "learning_rate": 0.0016268916506483246, - "loss": 1.2492, - "step": 6150 - }, - { - "epoch": 0.47311745250365356, - "learning_rate": 0.0016265304826318134, - "loss": 1.0925, - "step": 6151 - }, - { - "epoch": 0.473194369663872, - "learning_rate": 0.0016261693072270552, - "loss": 1.3133, - "step": 6152 - }, - { - "epoch": 0.4732712868240905, - "learning_rate": 0.0016258081244551398, - "loss": 1.3241, - "step": 6153 - }, - { - "epoch": 0.4733482039843089, - "learning_rate": 0.0016254469343371573, - "loss": 1.0899, - "step": 6154 - }, - { - "epoch": 0.47342512114452734, - "learning_rate": 0.0016250857368941968, - "loss": 1.1011, - "step": 6155 - }, - { - "epoch": 0.4735020383047458, - "learning_rate": 0.0016247245321473504, - "loss": 1.2659, - "step": 6156 - }, - { - "epoch": 0.47357895546496426, - "learning_rate": 0.0016243633201177081, - "loss": 1.2514, - "step": 6157 - }, - { - "epoch": 0.47365587262518266, - "learning_rate": 0.0016240021008263623, - "loss": 1.1454, - "step": 6158 - }, - { - "epoch": 0.4737327897854011, - "learning_rate": 0.0016236408742944046, - "loss": 0.8226, - "step": 6159 - }, - { - "epoch": 0.4738097069456196, - "learning_rate": 0.001623279640542927, - "loss": 0.9157, - "step": 6160 - }, - { - "epoch": 0.47388662410583804, - "learning_rate": 0.001622918399593023, - "loss": 0.8018, - "step": 6161 - }, - { - "epoch": 0.47396354126605644, - "learning_rate": 0.0016225571514657854, - "loss": 0.9245, - "step": 6162 - }, - { - "epoch": 0.4740404584262749, - "learning_rate": 0.0016221958961823082, - "loss": 0.8069, - "step": 6163 - }, - { - "epoch": 0.47411737558649336, - "learning_rate": 0.001621834633763685, - "loss": 1.6641, - "step": 6164 - }, - { - "epoch": 0.47419429274671177, - "learning_rate": 0.0016214733642310112, - "loss": 1.115, - "step": 6165 - }, - { - "epoch": 0.4742712099069302, - "learning_rate": 0.0016211120876053803, - "loss": 0.8321, - "step": 6166 - }, - { - "epoch": 0.4743481270671487, - "learning_rate": 0.001620750803907889, - "loss": 1.2554, - "step": 6167 - }, - { - "epoch": 0.47442504422736714, - "learning_rate": 0.0016203895131596316, - "loss": 1.6233, - "step": 6168 - }, - { - "epoch": 0.47450196138758555, - "learning_rate": 0.0016200282153817059, - "loss": 1.4526, - "step": 6169 - }, - { - "epoch": 0.474578878547804, - "learning_rate": 0.0016196669105952074, - "loss": 1.1712, - "step": 6170 - }, - { - "epoch": 0.47465579570802247, - "learning_rate": 0.0016193055988212328, - "loss": 0.8857, - "step": 6171 - }, - { - "epoch": 0.4747327128682409, - "learning_rate": 0.0016189442800808806, - "loss": 1.0663, - "step": 6172 - }, - { - "epoch": 0.47480963002845933, - "learning_rate": 0.0016185829543952476, - "loss": 1.2616, - "step": 6173 - }, - { - "epoch": 0.4748865471886778, - "learning_rate": 0.0016182216217854324, - "loss": 0.761, - "step": 6174 - }, - { - "epoch": 0.47496346434889625, - "learning_rate": 0.001617860282272533, - "loss": 1.0008, - "step": 6175 - }, - { - "epoch": 0.4750403815091147, - "learning_rate": 0.0016174989358776494, - "loss": 1.1151, - "step": 6176 - }, - { - "epoch": 0.4751172986693331, - "learning_rate": 0.0016171375826218807, - "loss": 1.0568, - "step": 6177 - }, - { - "epoch": 0.47519421582955157, - "learning_rate": 0.0016167762225263262, - "loss": 1.0713, - "step": 6178 - }, - { - "epoch": 0.47527113298977003, - "learning_rate": 0.0016164148556120864, - "loss": 1.195, - "step": 6179 - }, - { - "epoch": 0.4753480501499885, - "learning_rate": 0.0016160534819002618, - "loss": 1.4284, - "step": 6180 - }, - { - "epoch": 0.4754249673102069, - "learning_rate": 0.0016156921014119532, - "loss": 1.3352, - "step": 6181 - }, - { - "epoch": 0.47550188447042535, - "learning_rate": 0.0016153307141682624, - "loss": 1.26, - "step": 6182 - }, - { - "epoch": 0.4755788016306438, - "learning_rate": 0.0016149693201902913, - "loss": 1.2073, - "step": 6183 - }, - { - "epoch": 0.4756557187908622, - "learning_rate": 0.0016146079194991416, - "loss": 1.2384, - "step": 6184 - }, - { - "epoch": 0.4757326359510807, - "learning_rate": 0.0016142465121159158, - "loss": 1.0126, - "step": 6185 - }, - { - "epoch": 0.47580955311129913, - "learning_rate": 0.0016138850980617172, - "loss": 1.0965, - "step": 6186 - }, - { - "epoch": 0.4758864702715176, - "learning_rate": 0.0016135236773576493, - "loss": 1.1451, - "step": 6187 - }, - { - "epoch": 0.475963387431736, - "learning_rate": 0.0016131622500248153, - "loss": 1.4201, - "step": 6188 - }, - { - "epoch": 0.47604030459195446, - "learning_rate": 0.00161280081608432, - "loss": 1.0825, - "step": 6189 - }, - { - "epoch": 0.4761172217521729, - "learning_rate": 0.0016124393755572667, - "loss": 1.3944, - "step": 6190 - }, - { - "epoch": 0.4761941389123914, - "learning_rate": 0.0016120779284647618, - "loss": 1.0913, - "step": 6191 - }, - { - "epoch": 0.4762710560726098, - "learning_rate": 0.0016117164748279096, - "loss": 1.2932, - "step": 6192 - }, - { - "epoch": 0.47634797323282824, - "learning_rate": 0.0016113550146678157, - "loss": 1.6216, - "step": 6193 - }, - { - "epoch": 0.4764248903930467, - "learning_rate": 0.001610993548005587, - "loss": 1.0621, - "step": 6194 - }, - { - "epoch": 0.47650180755326516, - "learning_rate": 0.0016106320748623287, - "loss": 1.107, - "step": 6195 - }, - { - "epoch": 0.47657872471348356, - "learning_rate": 0.0016102705952591493, - "loss": 1.1866, - "step": 6196 - }, - { - "epoch": 0.476655641873702, - "learning_rate": 0.0016099091092171538, - "loss": 1.0801, - "step": 6197 - }, - { - "epoch": 0.4767325590339205, - "learning_rate": 0.0016095476167574516, - "loss": 1.7075, - "step": 6198 - }, - { - "epoch": 0.47680947619413894, - "learning_rate": 0.0016091861179011492, - "loss": 1.1463, - "step": 6199 - }, - { - "epoch": 0.47688639335435734, - "learning_rate": 0.0016088246126693562, - "loss": 1.0707, - "step": 6200 - }, - { - "epoch": 0.4769633105145758, - "learning_rate": 0.0016084631010831808, - "loss": 1.2336, - "step": 6201 - }, - { - "epoch": 0.47704022767479426, - "learning_rate": 0.0016081015831637313, - "loss": 1.0221, - "step": 6202 - }, - { - "epoch": 0.4771171448350127, - "learning_rate": 0.001607740058932118, - "loss": 1.0629, - "step": 6203 - }, - { - "epoch": 0.4771940619952311, - "learning_rate": 0.0016073785284094503, - "loss": 1.1565, - "step": 6204 - }, - { - "epoch": 0.4772709791554496, - "learning_rate": 0.0016070169916168385, - "loss": 0.7952, - "step": 6205 - }, - { - "epoch": 0.47734789631566804, - "learning_rate": 0.0016066554485753928, - "loss": 1.104, - "step": 6206 - }, - { - "epoch": 0.47742481347588644, - "learning_rate": 0.001606293899306225, - "loss": 0.8004, - "step": 6207 - }, - { - "epoch": 0.4775017306361049, - "learning_rate": 0.001605932343830445, - "loss": 1.6401, - "step": 6208 - }, - { - "epoch": 0.47757864779632336, - "learning_rate": 0.001605570782169165, - "loss": 1.1741, - "step": 6209 - }, - { - "epoch": 0.4776555649565418, - "learning_rate": 0.001605209214343497, - "loss": 1.1576, - "step": 6210 - }, - { - "epoch": 0.4777324821167602, - "learning_rate": 0.0016048476403745537, - "loss": 0.8853, - "step": 6211 - }, - { - "epoch": 0.4778093992769787, - "learning_rate": 0.001604486060283447, - "loss": 1.0119, - "step": 6212 - }, - { - "epoch": 0.47788631643719715, - "learning_rate": 0.0016041244740912905, - "loss": 1.1708, - "step": 6213 - }, - { - "epoch": 0.4779632335974156, - "learning_rate": 0.0016037628818191976, - "loss": 1.12, - "step": 6214 - }, - { - "epoch": 0.478040150757634, - "learning_rate": 0.0016034012834882819, - "loss": 1.8501, - "step": 6215 - }, - { - "epoch": 0.47811706791785247, - "learning_rate": 0.0016030396791196573, - "loss": 0.99, - "step": 6216 - }, - { - "epoch": 0.4781939850780709, - "learning_rate": 0.0016026780687344385, - "loss": 1.0905, - "step": 6217 - }, - { - "epoch": 0.4782709022382894, - "learning_rate": 0.0016023164523537405, - "loss": 0.7269, - "step": 6218 - }, - { - "epoch": 0.4783478193985078, - "learning_rate": 0.0016019548299986781, - "loss": 1.0102, - "step": 6219 - }, - { - "epoch": 0.47842473655872625, - "learning_rate": 0.001601593201690367, - "loss": 1.033, - "step": 6220 - }, - { - "epoch": 0.4785016537189447, - "learning_rate": 0.0016012315674499222, - "loss": 1.0176, - "step": 6221 - }, - { - "epoch": 0.47857857087916317, - "learning_rate": 0.0016008699272984615, - "loss": 1.1314, - "step": 6222 - }, - { - "epoch": 0.47865548803938157, - "learning_rate": 0.0016005082812571008, - "loss": 1.1402, - "step": 6223 - }, - { - "epoch": 0.47873240519960003, - "learning_rate": 0.001600146629346956, - "loss": 1.4318, - "step": 6224 - }, - { - "epoch": 0.4788093223598185, - "learning_rate": 0.0015997849715891457, - "loss": 0.8946, - "step": 6225 - }, - { - "epoch": 0.4788862395200369, - "learning_rate": 0.0015994233080047866, - "loss": 1.1351, - "step": 6226 - }, - { - "epoch": 0.47896315668025535, - "learning_rate": 0.0015990616386149972, - "loss": 1.3198, - "step": 6227 - }, - { - "epoch": 0.4790400738404738, - "learning_rate": 0.0015986999634408955, - "loss": 1.1457, - "step": 6228 - }, - { - "epoch": 0.47911699100069227, - "learning_rate": 0.0015983382825035997, - "loss": 1.274, - "step": 6229 - }, - { - "epoch": 0.4791939081609107, - "learning_rate": 0.0015979765958242294, - "loss": 1.2721, - "step": 6230 - }, - { - "epoch": 0.47927082532112913, - "learning_rate": 0.0015976149034239039, - "loss": 1.0324, - "step": 6231 - }, - { - "epoch": 0.4793477424813476, - "learning_rate": 0.0015972532053237418, - "loss": 1.294, - "step": 6232 - }, - { - "epoch": 0.47942465964156605, - "learning_rate": 0.001596891501544864, - "loss": 1.0118, - "step": 6233 - }, - { - "epoch": 0.47950157680178446, - "learning_rate": 0.0015965297921083903, - "loss": 0.8374, - "step": 6234 - }, - { - "epoch": 0.4795784939620029, - "learning_rate": 0.0015961680770354416, - "loss": 1.227, - "step": 6235 - }, - { - "epoch": 0.4796554111222214, - "learning_rate": 0.0015958063563471384, - "loss": 1.3112, - "step": 6236 - }, - { - "epoch": 0.47973232828243983, - "learning_rate": 0.0015954446300646026, - "loss": 1.1344, - "step": 6237 - }, - { - "epoch": 0.47980924544265824, - "learning_rate": 0.0015950828982089552, - "loss": 0.9309, - "step": 6238 - }, - { - "epoch": 0.4798861626028767, - "learning_rate": 0.0015947211608013181, - "loss": 0.9173, - "step": 6239 - }, - { - "epoch": 0.47996307976309516, - "learning_rate": 0.0015943594178628143, - "loss": 1.324, - "step": 6240 - }, - { - "epoch": 0.4800399969233136, - "learning_rate": 0.0015939976694145648, - "loss": 1.3338, - "step": 6241 - }, - { - "epoch": 0.480116914083532, - "learning_rate": 0.0015936359154776942, - "loss": 1.3014, - "step": 6242 - }, - { - "epoch": 0.4801938312437505, - "learning_rate": 0.0015932741560733246, - "loss": 0.8919, - "step": 6243 - }, - { - "epoch": 0.48027074840396894, - "learning_rate": 0.00159291239122258, - "loss": 1.0611, - "step": 6244 - }, - { - "epoch": 0.48034766556418734, - "learning_rate": 0.001592550620946584, - "loss": 0.802, - "step": 6245 - }, - { - "epoch": 0.4804245827244058, - "learning_rate": 0.0015921888452664604, - "loss": 1.552, - "step": 6246 - }, - { - "epoch": 0.48050149988462426, - "learning_rate": 0.0015918270642033346, - "loss": 1.3838, - "step": 6247 - }, - { - "epoch": 0.4805784170448427, - "learning_rate": 0.0015914652777783303, - "loss": 1.3279, - "step": 6248 - }, - { - "epoch": 0.4806553342050611, - "learning_rate": 0.001591103486012574, - "loss": 0.9805, - "step": 6249 - }, - { - "epoch": 0.4807322513652796, - "learning_rate": 0.001590741688927189, - "loss": 1.1158, - "step": 6250 - }, - { - "epoch": 0.48080916852549804, - "learning_rate": 0.0015903798865433032, - "loss": 1.1117, - "step": 6251 - }, - { - "epoch": 0.4808860856857165, - "learning_rate": 0.0015900180788820408, - "loss": 1.1248, - "step": 6252 - }, - { - "epoch": 0.4809630028459349, - "learning_rate": 0.0015896562659645298, - "loss": 0.9676, - "step": 6253 - }, - { - "epoch": 0.48103992000615337, - "learning_rate": 0.0015892944478118956, - "loss": 1.0328, - "step": 6254 - }, - { - "epoch": 0.4811168371663718, - "learning_rate": 0.0015889326244452656, - "loss": 1.314, - "step": 6255 - }, - { - "epoch": 0.4811937543265903, - "learning_rate": 0.0015885707958857667, - "loss": 1.1711, - "step": 6256 - }, - { - "epoch": 0.4812706714868087, - "learning_rate": 0.0015882089621545269, - "loss": 1.5342, - "step": 6257 - }, - { - "epoch": 0.48134758864702715, - "learning_rate": 0.001587847123272674, - "loss": 1.2094, - "step": 6258 - }, - { - "epoch": 0.4814245058072456, - "learning_rate": 0.0015874852792613357, - "loss": 0.4793, - "step": 6259 - }, - { - "epoch": 0.48150142296746407, - "learning_rate": 0.001587123430141641, - "loss": 1.3869, - "step": 6260 - }, - { - "epoch": 0.48157834012768247, - "learning_rate": 0.0015867615759347187, - "loss": 1.2889, - "step": 6261 - }, - { - "epoch": 0.48165525728790093, - "learning_rate": 0.001586399716661697, - "loss": 1.0959, - "step": 6262 - }, - { - "epoch": 0.4817321744481194, - "learning_rate": 0.001586037852343706, - "loss": 1.1597, - "step": 6263 - }, - { - "epoch": 0.48180909160833785, - "learning_rate": 0.0015856759830018754, - "loss": 0.8337, - "step": 6264 - }, - { - "epoch": 0.48188600876855625, - "learning_rate": 0.0015853141086573346, - "loss": 1.0926, - "step": 6265 - }, - { - "epoch": 0.4819629259287747, - "learning_rate": 0.0015849522293312143, - "loss": 0.8937, - "step": 6266 - }, - { - "epoch": 0.48203984308899317, - "learning_rate": 0.0015845903450446442, - "loss": 1.1546, - "step": 6267 - }, - { - "epoch": 0.4821167602492116, - "learning_rate": 0.0015842284558187565, - "loss": 0.8617, - "step": 6268 - }, - { - "epoch": 0.48219367740943003, - "learning_rate": 0.0015838665616746811, - "loss": 1.2347, - "step": 6269 - }, - { - "epoch": 0.4822705945696485, - "learning_rate": 0.0015835046626335495, - "loss": 1.2136, - "step": 6270 - }, - { - "epoch": 0.48234751172986695, - "learning_rate": 0.001583142758716494, - "loss": 0.9565, - "step": 6271 - }, - { - "epoch": 0.48242442889008536, - "learning_rate": 0.0015827808499446461, - "loss": 1.4013, - "step": 6272 - }, - { - "epoch": 0.4825013460503038, - "learning_rate": 0.0015824189363391383, - "loss": 0.9568, - "step": 6273 - }, - { - "epoch": 0.4825782632105223, - "learning_rate": 0.0015820570179211023, - "loss": 0.9756, - "step": 6274 - }, - { - "epoch": 0.48265518037074073, - "learning_rate": 0.0015816950947116722, - "loss": 1.3315, - "step": 6275 - }, - { - "epoch": 0.48273209753095914, - "learning_rate": 0.0015813331667319799, - "loss": 1.2767, - "step": 6276 - }, - { - "epoch": 0.4828090146911776, - "learning_rate": 0.0015809712340031593, - "loss": 1.3024, - "step": 6277 - }, - { - "epoch": 0.48288593185139606, - "learning_rate": 0.0015806092965463443, - "loss": 1.3587, - "step": 6278 - }, - { - "epoch": 0.4829628490116145, - "learning_rate": 0.001580247354382668, - "loss": 1.3582, - "step": 6279 - }, - { - "epoch": 0.4830397661718329, - "learning_rate": 0.0015798854075332657, - "loss": 0.8317, - "step": 6280 - }, - { - "epoch": 0.4831166833320514, - "learning_rate": 0.00157952345601927, - "loss": 1.1435, - "step": 6281 - }, - { - "epoch": 0.48319360049226984, - "learning_rate": 0.0015791614998618179, - "loss": 1.0068, - "step": 6282 - }, - { - "epoch": 0.4832705176524883, - "learning_rate": 0.001578799539082043, - "loss": 1.4294, - "step": 6283 - }, - { - "epoch": 0.4833474348127067, - "learning_rate": 0.0015784375737010812, - "loss": 0.9777, - "step": 6284 - }, - { - "epoch": 0.48342435197292516, - "learning_rate": 0.0015780756037400672, - "loss": 1.2644, - "step": 6285 - }, - { - "epoch": 0.4835012691331436, - "learning_rate": 0.0015777136292201377, - "loss": 0.974, - "step": 6286 - }, - { - "epoch": 0.483578186293362, - "learning_rate": 0.0015773516501624283, - "loss": 0.7558, - "step": 6287 - }, - { - "epoch": 0.4836551034535805, - "learning_rate": 0.001576989666588075, - "loss": 1.4457, - "step": 6288 - }, - { - "epoch": 0.48373202061379894, - "learning_rate": 0.0015766276785182153, - "loss": 1.1475, - "step": 6289 - }, - { - "epoch": 0.4838089377740174, - "learning_rate": 0.0015762656859739854, - "loss": 1.1936, - "step": 6290 - }, - { - "epoch": 0.4838858549342358, - "learning_rate": 0.0015759036889765232, - "loss": 1.1512, - "step": 6291 - }, - { - "epoch": 0.48396277209445426, - "learning_rate": 0.0015755416875469649, - "loss": 0.9819, - "step": 6292 - }, - { - "epoch": 0.4840396892546727, - "learning_rate": 0.0015751796817064488, - "loss": 1.3392, - "step": 6293 - }, - { - "epoch": 0.4841166064148912, - "learning_rate": 0.0015748176714761126, - "loss": 1.0857, - "step": 6294 - }, - { - "epoch": 0.4841935235751096, - "learning_rate": 0.0015744556568770952, - "loss": 1.0391, - "step": 6295 - }, - { - "epoch": 0.48427044073532804, - "learning_rate": 0.0015740936379305336, - "loss": 1.0908, - "step": 6296 - }, - { - "epoch": 0.4843473578955465, - "learning_rate": 0.0015737316146575678, - "loss": 1.1738, - "step": 6297 - }, - { - "epoch": 0.48442427505576496, - "learning_rate": 0.0015733695870793357, - "loss": 1.1246, - "step": 6298 - }, - { - "epoch": 0.48450119221598337, - "learning_rate": 0.0015730075552169774, - "loss": 1.1251, - "step": 6299 - }, - { - "epoch": 0.4845781093762018, - "learning_rate": 0.001572645519091632, - "loss": 1.2144, - "step": 6300 - }, - { - "epoch": 0.4846550265364203, - "learning_rate": 0.0015722834787244384, - "loss": 1.242, - "step": 6301 - }, - { - "epoch": 0.48473194369663875, - "learning_rate": 0.0015719214341365377, - "loss": 1.0311, - "step": 6302 - }, - { - "epoch": 0.48480886085685715, - "learning_rate": 0.0015715593853490694, - "loss": 1.3294, - "step": 6303 - }, - { - "epoch": 0.4848857780170756, - "learning_rate": 0.001571197332383174, - "loss": 1.4097, - "step": 6304 - }, - { - "epoch": 0.48496269517729407, - "learning_rate": 0.001570835275259992, - "loss": 1.1468, - "step": 6305 - }, - { - "epoch": 0.48503961233751247, - "learning_rate": 0.0015704732140006646, - "loss": 1.0481, - "step": 6306 - }, - { - "epoch": 0.48511652949773093, - "learning_rate": 0.0015701111486263327, - "loss": 1.2476, - "step": 6307 - }, - { - "epoch": 0.4851934466579494, - "learning_rate": 0.001569749079158138, - "loss": 0.8719, - "step": 6308 - }, - { - "epoch": 0.48527036381816785, - "learning_rate": 0.0015693870056172213, - "loss": 0.9322, - "step": 6309 - }, - { - "epoch": 0.48534728097838625, - "learning_rate": 0.0015690249280247255, - "loss": 1.0444, - "step": 6310 - }, - { - "epoch": 0.4854241981386047, - "learning_rate": 0.0015686628464017922, - "loss": 1.0921, - "step": 6311 - }, - { - "epoch": 0.48550111529882317, - "learning_rate": 0.0015683007607695636, - "loss": 0.7263, - "step": 6312 - }, - { - "epoch": 0.48557803245904163, - "learning_rate": 0.0015679386711491825, - "loss": 1.0986, - "step": 6313 - }, - { - "epoch": 0.48565494961926003, - "learning_rate": 0.0015675765775617916, - "loss": 0.9929, - "step": 6314 - }, - { - "epoch": 0.4857318667794785, - "learning_rate": 0.0015672144800285338, - "loss": 1.319, - "step": 6315 - }, - { - "epoch": 0.48580878393969695, - "learning_rate": 0.0015668523785705526, - "loss": 1.1637, - "step": 6316 - }, - { - "epoch": 0.4858857010999154, - "learning_rate": 0.001566490273208991, - "loss": 0.7977, - "step": 6317 - }, - { - "epoch": 0.4859626182601338, - "learning_rate": 0.0015661281639649937, - "loss": 1.0797, - "step": 6318 - }, - { - "epoch": 0.4860395354203523, - "learning_rate": 0.0015657660508597035, - "loss": 1.0003, - "step": 6319 - }, - { - "epoch": 0.48611645258057073, - "learning_rate": 0.0015654039339142649, - "loss": 1.157, - "step": 6320 - }, - { - "epoch": 0.4861933697407892, - "learning_rate": 0.0015650418131498227, - "loss": 1.1297, - "step": 6321 - }, - { - "epoch": 0.4862702869010076, - "learning_rate": 0.0015646796885875217, - "loss": 1.2896, - "step": 6322 - }, - { - "epoch": 0.48634720406122606, - "learning_rate": 0.0015643175602485053, - "loss": 1.3231, - "step": 6323 - }, - { - "epoch": 0.4864241212214445, - "learning_rate": 0.0015639554281539204, - "loss": 1.3241, - "step": 6324 - }, - { - "epoch": 0.486501038381663, - "learning_rate": 0.0015635932923249106, - "loss": 1.056, - "step": 6325 - }, - { - "epoch": 0.4865779555418814, - "learning_rate": 0.001563231152782623, - "loss": 0.8163, - "step": 6326 - }, - { - "epoch": 0.48665487270209984, - "learning_rate": 0.001562869009548202, - "loss": 1.0155, - "step": 6327 - }, - { - "epoch": 0.4867317898623183, - "learning_rate": 0.0015625068626427942, - "loss": 1.0447, - "step": 6328 - }, - { - "epoch": 0.4868087070225367, - "learning_rate": 0.0015621447120875455, - "loss": 1.3451, - "step": 6329 - }, - { - "epoch": 0.48688562418275516, - "learning_rate": 0.0015617825579036023, - "loss": 1.021, - "step": 6330 - }, - { - "epoch": 0.4869625413429736, - "learning_rate": 0.0015614204001121108, - "loss": 0.8143, - "step": 6331 - }, - { - "epoch": 0.4870394585031921, - "learning_rate": 0.0015610582387342185, - "loss": 1.2286, - "step": 6332 - }, - { - "epoch": 0.4871163756634105, - "learning_rate": 0.001560696073791072, - "loss": 1.0498, - "step": 6333 - }, - { - "epoch": 0.48719329282362894, - "learning_rate": 0.0015603339053038186, - "loss": 0.8564, - "step": 6334 - }, - { - "epoch": 0.4872702099838474, - "learning_rate": 0.0015599717332936051, - "loss": 1.3406, - "step": 6335 - }, - { - "epoch": 0.48734712714406586, - "learning_rate": 0.00155960955778158, - "loss": 0.9921, - "step": 6336 - }, - { - "epoch": 0.48742404430428427, - "learning_rate": 0.001559247378788891, - "loss": 1.0311, - "step": 6337 - }, - { - "epoch": 0.4875009614645027, - "learning_rate": 0.0015588851963366853, - "loss": 1.3841, - "step": 6338 - }, - { - "epoch": 0.4875778786247212, - "learning_rate": 0.0015585230104461119, - "loss": 1.2471, - "step": 6339 - }, - { - "epoch": 0.48765479578493964, - "learning_rate": 0.0015581608211383184, - "loss": 1.1149, - "step": 6340 - }, - { - "epoch": 0.48773171294515805, - "learning_rate": 0.0015577986284344544, - "loss": 1.2192, - "step": 6341 - }, - { - "epoch": 0.4878086301053765, - "learning_rate": 0.0015574364323556683, - "loss": 1.0891, - "step": 6342 - }, - { - "epoch": 0.48788554726559497, - "learning_rate": 0.0015570742329231088, - "loss": 1.2197, - "step": 6343 - }, - { - "epoch": 0.4879624644258134, - "learning_rate": 0.0015567120301579257, - "loss": 1.225, - "step": 6344 - }, - { - "epoch": 0.48803938158603183, - "learning_rate": 0.0015563498240812675, - "loss": 1.234, - "step": 6345 - }, - { - "epoch": 0.4881162987462503, - "learning_rate": 0.001555987614714285, - "loss": 0.8916, - "step": 6346 - }, - { - "epoch": 0.48819321590646875, - "learning_rate": 0.0015556254020781268, - "loss": 1.0892, - "step": 6347 - }, - { - "epoch": 0.48827013306668715, - "learning_rate": 0.001555263186193944, - "loss": 1.1146, - "step": 6348 - }, - { - "epoch": 0.4883470502269056, - "learning_rate": 0.0015549009670828855, - "loss": 1.1832, - "step": 6349 - }, - { - "epoch": 0.48842396738712407, - "learning_rate": 0.0015545387447661028, - "loss": 1.2099, - "step": 6350 - }, - { - "epoch": 0.48850088454734253, - "learning_rate": 0.0015541765192647456, - "loss": 1.2221, - "step": 6351 - }, - { - "epoch": 0.48857780170756093, - "learning_rate": 0.0015538142905999652, - "loss": 1.2262, - "step": 6352 - }, - { - "epoch": 0.4886547188677794, - "learning_rate": 0.0015534520587929122, - "loss": 1.0718, - "step": 6353 - }, - { - "epoch": 0.48873163602799785, - "learning_rate": 0.0015530898238647374, - "loss": 1.2102, - "step": 6354 - }, - { - "epoch": 0.4888085531882163, - "learning_rate": 0.0015527275858365933, - "loss": 0.7523, - "step": 6355 - }, - { - "epoch": 0.4888854703484347, - "learning_rate": 0.0015523653447296298, - "loss": 1.2573, - "step": 6356 - }, - { - "epoch": 0.4889623875086532, - "learning_rate": 0.0015520031005649996, - "loss": 1.4722, - "step": 6357 - }, - { - "epoch": 0.48903930466887163, - "learning_rate": 0.001551640853363854, - "loss": 1.1594, - "step": 6358 - }, - { - "epoch": 0.4891162218290901, - "learning_rate": 0.0015512786031473453, - "loss": 1.0412, - "step": 6359 - }, - { - "epoch": 0.4891931389893085, - "learning_rate": 0.0015509163499366257, - "loss": 1.0809, - "step": 6360 - }, - { - "epoch": 0.48927005614952696, - "learning_rate": 0.0015505540937528473, - "loss": 1.103, - "step": 6361 - }, - { - "epoch": 0.4893469733097454, - "learning_rate": 0.0015501918346171619, - "loss": 1.2935, - "step": 6362 - }, - { - "epoch": 0.4894238904699639, - "learning_rate": 0.0015498295725507238, - "loss": 1.3264, - "step": 6363 - }, - { - "epoch": 0.4895008076301823, - "learning_rate": 0.001549467307574685, - "loss": 1.2447, - "step": 6364 - }, - { - "epoch": 0.48957772479040074, - "learning_rate": 0.001549105039710198, - "loss": 1.3307, - "step": 6365 - }, - { - "epoch": 0.4896546419506192, - "learning_rate": 0.0015487427689784168, - "loss": 1.1443, - "step": 6366 - }, - { - "epoch": 0.48973155911083766, - "learning_rate": 0.0015483804954004945, - "loss": 1.2331, - "step": 6367 - }, - { - "epoch": 0.48980847627105606, - "learning_rate": 0.0015480182189975852, - "loss": 1.1772, - "step": 6368 - }, - { - "epoch": 0.4898853934312745, - "learning_rate": 0.0015476559397908413, - "loss": 1.0711, - "step": 6369 - }, - { - "epoch": 0.489962310591493, - "learning_rate": 0.0015472936578014179, - "loss": 0.933, - "step": 6370 - }, - { - "epoch": 0.4900392277517114, - "learning_rate": 0.0015469313730504684, - "loss": 1.4777, - "step": 6371 - }, - { - "epoch": 0.49011614491192984, - "learning_rate": 0.0015465690855591472, - "loss": 1.0311, - "step": 6372 - }, - { - "epoch": 0.4901930620721483, - "learning_rate": 0.0015462067953486084, - "loss": 1.142, - "step": 6373 - }, - { - "epoch": 0.49026997923236676, - "learning_rate": 0.0015458445024400067, - "loss": 0.9315, - "step": 6374 - }, - { - "epoch": 0.49034689639258516, - "learning_rate": 0.0015454822068544968, - "loss": 1.1105, - "step": 6375 - }, - { - "epoch": 0.4904238135528036, - "learning_rate": 0.001545119908613233, - "loss": 1.25, - "step": 6376 - }, - { - "epoch": 0.4905007307130221, - "learning_rate": 0.0015447576077373716, - "loss": 1.1408, - "step": 6377 - }, - { - "epoch": 0.49057764787324054, - "learning_rate": 0.0015443953042480658, - "loss": 1.1946, - "step": 6378 - }, - { - "epoch": 0.49065456503345894, - "learning_rate": 0.0015440329981664727, - "loss": 0.9651, - "step": 6379 - }, - { - "epoch": 0.4907314821936774, - "learning_rate": 0.0015436706895137467, - "loss": 1.1148, - "step": 6380 - }, - { - "epoch": 0.49080839935389586, - "learning_rate": 0.0015433083783110435, - "loss": 1.015, - "step": 6381 - }, - { - "epoch": 0.4908853165141143, - "learning_rate": 0.0015429460645795192, - "loss": 1.265, - "step": 6382 - }, - { - "epoch": 0.4909622336743327, - "learning_rate": 0.0015425837483403297, - "loss": 0.9602, - "step": 6383 - }, - { - "epoch": 0.4910391508345512, - "learning_rate": 0.0015422214296146298, - "loss": 1.3532, - "step": 6384 - }, - { - "epoch": 0.49111606799476965, - "learning_rate": 0.0015418591084235774, - "loss": 1.2866, - "step": 6385 - }, - { - "epoch": 0.4911929851549881, - "learning_rate": 0.0015414967847883275, - "loss": 1.1965, - "step": 6386 - }, - { - "epoch": 0.4912699023152065, - "learning_rate": 0.0015411344587300375, - "loss": 1.3679, - "step": 6387 - }, - { - "epoch": 0.49134681947542497, - "learning_rate": 0.0015407721302698638, - "loss": 1.3245, - "step": 6388 - }, - { - "epoch": 0.4914237366356434, - "learning_rate": 0.0015404097994289622, - "loss": 1.0462, - "step": 6389 - }, - { - "epoch": 0.49150065379586183, - "learning_rate": 0.0015400474662284913, - "loss": 1.2234, - "step": 6390 - }, - { - "epoch": 0.4915775709560803, - "learning_rate": 0.0015396851306896061, - "loss": 1.1224, - "step": 6391 - }, - { - "epoch": 0.49165448811629875, - "learning_rate": 0.0015393227928334654, - "loss": 1.2868, - "step": 6392 - }, - { - "epoch": 0.4917314052765172, - "learning_rate": 0.0015389604526812252, - "loss": 1.1485, - "step": 6393 - }, - { - "epoch": 0.4918083224367356, - "learning_rate": 0.0015385981102540444, - "loss": 1.1772, - "step": 6394 - }, - { - "epoch": 0.49188523959695407, - "learning_rate": 0.0015382357655730793, - "loss": 1.0205, - "step": 6395 - }, - { - "epoch": 0.49196215675717253, - "learning_rate": 0.001537873418659488, - "loss": 1.3706, - "step": 6396 - }, - { - "epoch": 0.492039073917391, - "learning_rate": 0.001537511069534428, - "loss": 0.9855, - "step": 6397 - }, - { - "epoch": 0.4921159910776094, - "learning_rate": 0.0015371487182190581, - "loss": 1.1971, - "step": 6398 - }, - { - "epoch": 0.49219290823782785, - "learning_rate": 0.0015367863647345356, - "loss": 1.2389, - "step": 6399 - }, - { - "epoch": 0.4922698253980463, - "learning_rate": 0.0015364240091020188, - "loss": 1.0502, - "step": 6400 - }, - { - "epoch": 0.49234674255826477, - "learning_rate": 0.0015360616513426665, - "loss": 1.397, - "step": 6401 - }, - { - "epoch": 0.4924236597184832, - "learning_rate": 0.0015356992914776367, - "loss": 1.1998, - "step": 6402 - }, - { - "epoch": 0.49250057687870163, - "learning_rate": 0.0015353369295280877, - "loss": 1.0317, - "step": 6403 - }, - { - "epoch": 0.4925774940389201, - "learning_rate": 0.0015349745655151785, - "loss": 1.1539, - "step": 6404 - }, - { - "epoch": 0.49265441119913855, - "learning_rate": 0.0015346121994600688, - "loss": 0.9895, - "step": 6405 - }, - { - "epoch": 0.49273132835935696, - "learning_rate": 0.0015342498313839163, - "loss": 1.0169, - "step": 6406 - }, - { - "epoch": 0.4928082455195754, - "learning_rate": 0.00153388746130788, - "loss": 0.7782, - "step": 6407 - }, - { - "epoch": 0.4928851626797939, - "learning_rate": 0.0015335250892531196, - "loss": 0.9776, - "step": 6408 - }, - { - "epoch": 0.4929620798400123, - "learning_rate": 0.0015331627152407946, - "loss": 0.9775, - "step": 6409 - }, - { - "epoch": 0.49303899700023074, - "learning_rate": 0.0015328003392920642, - "loss": 1.3916, - "step": 6410 - }, - { - "epoch": 0.4931159141604492, - "learning_rate": 0.0015324379614280875, - "loss": 1.29, - "step": 6411 - }, - { - "epoch": 0.49319283132066766, - "learning_rate": 0.0015320755816700246, - "loss": 1.3708, - "step": 6412 - }, - { - "epoch": 0.49326974848088606, - "learning_rate": 0.0015317132000390351, - "loss": 1.2731, - "step": 6413 - }, - { - "epoch": 0.4933466656411045, - "learning_rate": 0.0015313508165562789, - "loss": 1.2686, - "step": 6414 - }, - { - "epoch": 0.493423582801323, - "learning_rate": 0.0015309884312429153, - "loss": 0.9308, - "step": 6415 - }, - { - "epoch": 0.49350049996154144, - "learning_rate": 0.0015306260441201056, - "loss": 0.9955, - "step": 6416 - }, - { - "epoch": 0.49357741712175984, - "learning_rate": 0.0015302636552090088, - "loss": 1.1347, - "step": 6417 - }, - { - "epoch": 0.4936543342819783, - "learning_rate": 0.001529901264530786, - "loss": 1.1132, - "step": 6418 - }, - { - "epoch": 0.49373125144219676, - "learning_rate": 0.0015295388721065968, - "loss": 0.8727, - "step": 6419 - }, - { - "epoch": 0.4938081686024152, - "learning_rate": 0.0015291764779576022, - "loss": 0.8353, - "step": 6420 - }, - { - "epoch": 0.4938850857626336, - "learning_rate": 0.0015288140821049629, - "loss": 0.89, - "step": 6421 - }, - { - "epoch": 0.4939620029228521, - "learning_rate": 0.0015284516845698389, - "loss": 1.022, - "step": 6422 - }, - { - "epoch": 0.49403892008307054, - "learning_rate": 0.0015280892853733916, - "loss": 0.8269, - "step": 6423 - }, - { - "epoch": 0.494115837243289, - "learning_rate": 0.0015277268845367814, - "loss": 1.1641, - "step": 6424 - }, - { - "epoch": 0.4941927544035074, - "learning_rate": 0.0015273644820811704, - "loss": 0.8085, - "step": 6425 - }, - { - "epoch": 0.49426967156372587, - "learning_rate": 0.0015270020780277177, - "loss": 1.4092, - "step": 6426 - }, - { - "epoch": 0.4943465887239443, - "learning_rate": 0.0015266396723975862, - "loss": 1.3734, - "step": 6427 - }, - { - "epoch": 0.4944235058841628, - "learning_rate": 0.0015262772652119357, - "loss": 1.3758, - "step": 6428 - }, - { - "epoch": 0.4945004230443812, - "learning_rate": 0.0015259148564919292, - "loss": 1.2438, - "step": 6429 - }, - { - "epoch": 0.49457734020459965, - "learning_rate": 0.0015255524462587267, - "loss": 1.1805, - "step": 6430 - }, - { - "epoch": 0.4946542573648181, - "learning_rate": 0.0015251900345334903, - "loss": 1.284, - "step": 6431 - }, - { - "epoch": 0.4947311745250365, - "learning_rate": 0.001524827621337382, - "loss": 1.5342, - "step": 6432 - }, - { - "epoch": 0.49480809168525497, - "learning_rate": 0.0015244652066915623, - "loss": 1.1638, - "step": 6433 - }, - { - "epoch": 0.49488500884547343, - "learning_rate": 0.001524102790617194, - "loss": 0.8618, - "step": 6434 - }, - { - "epoch": 0.4949619260056919, - "learning_rate": 0.0015237403731354386, - "loss": 1.1474, - "step": 6435 - }, - { - "epoch": 0.4950388431659103, - "learning_rate": 0.0015233779542674587, - "loss": 1.397, - "step": 6436 - }, - { - "epoch": 0.49511576032612875, - "learning_rate": 0.0015230155340344145, - "loss": 1.4225, - "step": 6437 - }, - { - "epoch": 0.4951926774863472, - "learning_rate": 0.0015226531124574703, - "loss": 1.0917, - "step": 6438 - }, - { - "epoch": 0.49526959464656567, - "learning_rate": 0.0015222906895577864, - "loss": 1.1323, - "step": 6439 - }, - { - "epoch": 0.4953465118067841, - "learning_rate": 0.0015219282653565265, - "loss": 1.2818, - "step": 6440 - }, - { - "epoch": 0.49542342896700253, - "learning_rate": 0.001521565839874852, - "loss": 1.0726, - "step": 6441 - }, - { - "epoch": 0.495500346127221, - "learning_rate": 0.0015212034131339258, - "loss": 1.1083, - "step": 6442 - }, - { - "epoch": 0.49557726328743945, - "learning_rate": 0.0015208409851549104, - "loss": 1.1327, - "step": 6443 - }, - { - "epoch": 0.49565418044765785, - "learning_rate": 0.001520478555958968, - "loss": 1.031, - "step": 6444 - }, - { - "epoch": 0.4957310976078763, - "learning_rate": 0.0015201161255672614, - "loss": 0.8077, - "step": 6445 - }, - { - "epoch": 0.4958080147680948, - "learning_rate": 0.001519753694000953, - "loss": 1.2604, - "step": 6446 - }, - { - "epoch": 0.49588493192831323, - "learning_rate": 0.0015193912612812062, - "loss": 1.7456, - "step": 6447 - }, - { - "epoch": 0.49596184908853164, - "learning_rate": 0.0015190288274291833, - "loss": 0.9868, - "step": 6448 - }, - { - "epoch": 0.4960387662487501, - "learning_rate": 0.0015186663924660476, - "loss": 1.3619, - "step": 6449 - }, - { - "epoch": 0.49611568340896856, - "learning_rate": 0.0015183039564129613, - "loss": 0.9619, - "step": 6450 - }, - { - "epoch": 0.49619260056918696, - "learning_rate": 0.0015179415192910879, - "loss": 1.1899, - "step": 6451 - }, - { - "epoch": 0.4962695177294054, - "learning_rate": 0.001517579081121591, - "loss": 1.2517, - "step": 6452 - }, - { - "epoch": 0.4963464348896239, - "learning_rate": 0.0015172166419256327, - "loss": 1.1316, - "step": 6453 - }, - { - "epoch": 0.49642335204984234, - "learning_rate": 0.001516854201724377, - "loss": 1.0495, - "step": 6454 - }, - { - "epoch": 0.49650026921006074, - "learning_rate": 0.001516491760538987, - "loss": 1.1894, - "step": 6455 - }, - { - "epoch": 0.4965771863702792, - "learning_rate": 0.0015161293183906259, - "loss": 1.2593, - "step": 6456 - }, - { - "epoch": 0.49665410353049766, - "learning_rate": 0.0015157668753004567, - "loss": 1.1953, - "step": 6457 - }, - { - "epoch": 0.4967310206907161, - "learning_rate": 0.0015154044312896435, - "loss": 1.6624, - "step": 6458 - }, - { - "epoch": 0.4968079378509345, - "learning_rate": 0.0015150419863793497, - "loss": 1.1629, - "step": 6459 - }, - { - "epoch": 0.496884855011153, - "learning_rate": 0.0015146795405907387, - "loss": 1.5651, - "step": 6460 - }, - { - "epoch": 0.49696177217137144, - "learning_rate": 0.0015143170939449734, - "loss": 1.0473, - "step": 6461 - }, - { - "epoch": 0.4970386893315899, - "learning_rate": 0.0015139546464632183, - "loss": 1.0656, - "step": 6462 - }, - { - "epoch": 0.4971156064918083, - "learning_rate": 0.0015135921981666372, - "loss": 0.934, - "step": 6463 - }, - { - "epoch": 0.49719252365202676, - "learning_rate": 0.001513229749076393, - "loss": 1.3089, - "step": 6464 - }, - { - "epoch": 0.4972694408122452, - "learning_rate": 0.0015128672992136506, - "loss": 1.0161, - "step": 6465 - }, - { - "epoch": 0.4973463579724637, - "learning_rate": 0.0015125048485995726, - "loss": 1.4793, - "step": 6466 - }, - { - "epoch": 0.4974232751326821, - "learning_rate": 0.0015121423972553239, - "loss": 1.2478, - "step": 6467 - }, - { - "epoch": 0.49750019229290054, - "learning_rate": 0.0015117799452020674, - "loss": 1.3143, - "step": 6468 - }, - { - "epoch": 0.497577109453119, - "learning_rate": 0.0015114174924609682, - "loss": 1.1109, - "step": 6469 - }, - { - "epoch": 0.4976540266133374, - "learning_rate": 0.0015110550390531896, - "loss": 0.9355, - "step": 6470 - }, - { - "epoch": 0.49773094377355587, - "learning_rate": 0.0015106925849998956, - "loss": 1.045, - "step": 6471 - }, - { - "epoch": 0.4978078609337743, - "learning_rate": 0.00151033013032225, - "loss": 1.1446, - "step": 6472 - }, - { - "epoch": 0.4978847780939928, - "learning_rate": 0.001509967675041418, - "loss": 1.1395, - "step": 6473 - }, - { - "epoch": 0.4979616952542112, - "learning_rate": 0.001509605219178563, - "loss": 1.1233, - "step": 6474 - }, - { - "epoch": 0.49803861241442965, - "learning_rate": 0.001509242762754849, - "loss": 1.1183, - "step": 6475 - }, - { - "epoch": 0.4981155295746481, - "learning_rate": 0.0015088803057914403, - "loss": 1.4301, - "step": 6476 - }, - { - "epoch": 0.49819244673486657, - "learning_rate": 0.0015085178483095014, - "loss": 1.1801, - "step": 6477 - }, - { - "epoch": 0.49826936389508497, - "learning_rate": 0.001508155390330197, - "loss": 0.963, - "step": 6478 - }, - { - "epoch": 0.49834628105530343, - "learning_rate": 0.00150779293187469, - "loss": 1.2661, - "step": 6479 - }, - { - "epoch": 0.4984231982155219, - "learning_rate": 0.0015074304729641463, - "loss": 1.1302, - "step": 6480 - }, - { - "epoch": 0.49850011537574035, - "learning_rate": 0.0015070680136197291, - "loss": 1.0476, - "step": 6481 - }, - { - "epoch": 0.49857703253595875, - "learning_rate": 0.0015067055538626037, - "loss": 1.2589, - "step": 6482 - }, - { - "epoch": 0.4986539496961772, - "learning_rate": 0.0015063430937139336, - "loss": 0.9057, - "step": 6483 - }, - { - "epoch": 0.49873086685639567, - "learning_rate": 0.0015059806331948838, - "loss": 0.9872, - "step": 6484 - }, - { - "epoch": 0.49880778401661413, - "learning_rate": 0.0015056181723266183, - "loss": 1.2148, - "step": 6485 - }, - { - "epoch": 0.49888470117683253, - "learning_rate": 0.001505255711130302, - "loss": 1.2279, - "step": 6486 - }, - { - "epoch": 0.498961618337051, - "learning_rate": 0.0015048932496270998, - "loss": 0.8585, - "step": 6487 - }, - { - "epoch": 0.49903853549726945, - "learning_rate": 0.0015045307878381747, - "loss": 1.2618, - "step": 6488 - }, - { - "epoch": 0.4991154526574879, - "learning_rate": 0.001504168325784693, - "loss": 1.0307, - "step": 6489 - }, - { - "epoch": 0.4991923698177063, - "learning_rate": 0.0015038058634878181, - "loss": 1.0074, - "step": 6490 - }, - { - "epoch": 0.4992692869779248, - "learning_rate": 0.001503443400968715, - "loss": 1.0053, - "step": 6491 - }, - { - "epoch": 0.49934620413814323, - "learning_rate": 0.0015030809382485478, - "loss": 1.1319, - "step": 6492 - }, - { - "epoch": 0.49942312129836164, - "learning_rate": 0.0015027184753484818, - "loss": 1.2984, - "step": 6493 - }, - { - "epoch": 0.4995000384585801, - "learning_rate": 0.0015023560122896808, - "loss": 0.7327, - "step": 6494 - }, - { - "epoch": 0.49957695561879856, - "learning_rate": 0.0015019935490933104, - "loss": 1.4445, - "step": 6495 - }, - { - "epoch": 0.499653872779017, - "learning_rate": 0.001501631085780534, - "loss": 1.1119, - "step": 6496 - }, - { - "epoch": 0.4997307899392354, - "learning_rate": 0.0015012686223725169, - "loss": 1.3338, - "step": 6497 - }, - { - "epoch": 0.4998077070994539, - "learning_rate": 0.0015009061588904236, - "loss": 1.3912, - "step": 6498 - }, - { - "epoch": 0.49988462425967234, - "learning_rate": 0.0015005436953554187, - "loss": 1.2257, - "step": 6499 - }, - { - "epoch": 0.4999615414198908, - "learning_rate": 0.001500181231788667, - "loss": 1.5399, - "step": 6500 - }, - { - "epoch": 0.5000384585801092, - "learning_rate": 0.001499818768211333, - "loss": 0.8163, - "step": 6501 - }, - { - "epoch": 0.5001153757403277, - "learning_rate": 0.0014994563046445812, - "loss": 0.9792, - "step": 6502 - }, - { - "epoch": 0.5001922929005461, - "learning_rate": 0.0014990938411095765, - "loss": 1.0609, - "step": 6503 - }, - { - "epoch": 0.5002692100607645, - "learning_rate": 0.0014987313776274834, - "loss": 1.3299, - "step": 6504 - }, - { - "epoch": 0.500346127220983, - "learning_rate": 0.0014983689142194661, - "loss": 1.3401, - "step": 6505 - }, - { - "epoch": 0.5004230443812014, - "learning_rate": 0.00149800645090669, - "loss": 1.1659, - "step": 6506 - }, - { - "epoch": 0.5004999615414198, - "learning_rate": 0.0014976439877103197, - "loss": 1.1513, - "step": 6507 - }, - { - "epoch": 0.5005768787016384, - "learning_rate": 0.0014972815246515187, - "loss": 1.3099, - "step": 6508 - }, - { - "epoch": 0.5006537958618568, - "learning_rate": 0.0014969190617514525, - "loss": 1.0427, - "step": 6509 - }, - { - "epoch": 0.5007307130220753, - "learning_rate": 0.001496556599031285, - "loss": 1.3597, - "step": 6510 - }, - { - "epoch": 0.5008076301822937, - "learning_rate": 0.0014961941365121822, - "loss": 0.9758, - "step": 6511 - }, - { - "epoch": 0.5008845473425121, - "learning_rate": 0.0014958316742153073, - "loss": 1.3063, - "step": 6512 - }, - { - "epoch": 0.5009614645027306, - "learning_rate": 0.0014954692121618251, - "loss": 1.4286, - "step": 6513 - }, - { - "epoch": 0.501038381662949, - "learning_rate": 0.0014951067503729005, - "loss": 1.4384, - "step": 6514 - }, - { - "epoch": 0.5011152988231674, - "learning_rate": 0.001494744288869698, - "loss": 1.0694, - "step": 6515 - }, - { - "epoch": 0.5011922159833859, - "learning_rate": 0.0014943818276733816, - "loss": 1.3276, - "step": 6516 - }, - { - "epoch": 0.5012691331436043, - "learning_rate": 0.0014940193668051165, - "loss": 0.8976, - "step": 6517 - }, - { - "epoch": 0.5013460503038227, - "learning_rate": 0.001493656906286067, - "loss": 1.0597, - "step": 6518 - }, - { - "epoch": 0.5014229674640412, - "learning_rate": 0.0014932944461373964, - "loss": 0.8895, - "step": 6519 - }, - { - "epoch": 0.5014998846242597, - "learning_rate": 0.0014929319863802712, - "loss": 0.9636, - "step": 6520 - }, - { - "epoch": 0.5015768017844782, - "learning_rate": 0.0014925695270358536, - "loss": 1.2442, - "step": 6521 - }, - { - "epoch": 0.5016537189446966, - "learning_rate": 0.00149220706812531, - "loss": 1.1827, - "step": 6522 - }, - { - "epoch": 0.501730636104915, - "learning_rate": 0.0014918446096698036, - "loss": 1.2397, - "step": 6523 - }, - { - "epoch": 0.5018075532651335, - "learning_rate": 0.0014914821516904984, - "loss": 1.1812, - "step": 6524 - }, - { - "epoch": 0.5018844704253519, - "learning_rate": 0.0014911196942085597, - "loss": 1.4535, - "step": 6525 - }, - { - "epoch": 0.5019613875855703, - "learning_rate": 0.0014907572372451514, - "loss": 1.0805, - "step": 6526 - }, - { - "epoch": 0.5020383047457888, - "learning_rate": 0.0014903947808214372, - "loss": 1.2827, - "step": 6527 - }, - { - "epoch": 0.5021152219060072, - "learning_rate": 0.0014900323249585823, - "loss": 1.1194, - "step": 6528 - }, - { - "epoch": 0.5021921390662257, - "learning_rate": 0.0014896698696777496, - "loss": 1.1704, - "step": 6529 - }, - { - "epoch": 0.5022690562264441, - "learning_rate": 0.0014893074150001046, - "loss": 1.235, - "step": 6530 - }, - { - "epoch": 0.5023459733866625, - "learning_rate": 0.0014889449609468111, - "loss": 1.0981, - "step": 6531 - }, - { - "epoch": 0.502422890546881, - "learning_rate": 0.0014885825075390316, - "loss": 1.2114, - "step": 6532 - }, - { - "epoch": 0.5024998077070995, - "learning_rate": 0.0014882200547979329, - "loss": 1.6672, - "step": 6533 - }, - { - "epoch": 0.5025767248673179, - "learning_rate": 0.0014878576027446766, - "loss": 0.639, - "step": 6534 - }, - { - "epoch": 0.5026536420275364, - "learning_rate": 0.0014874951514004275, - "loss": 1.3364, - "step": 6535 - }, - { - "epoch": 0.5027305591877548, - "learning_rate": 0.00148713270078635, - "loss": 1.3282, - "step": 6536 - }, - { - "epoch": 0.5028074763479732, - "learning_rate": 0.001486770250923607, - "loss": 0.9317, - "step": 6537 - }, - { - "epoch": 0.5028843935081917, - "learning_rate": 0.001486407801833363, - "loss": 0.6212, - "step": 6538 - }, - { - "epoch": 0.5029613106684101, - "learning_rate": 0.0014860453535367817, - "loss": 1.1881, - "step": 6539 - }, - { - "epoch": 0.5030382278286286, - "learning_rate": 0.0014856829060550267, - "loss": 1.2884, - "step": 6540 - }, - { - "epoch": 0.503115144988847, - "learning_rate": 0.0014853204594092618, - "loss": 1.2122, - "step": 6541 - }, - { - "epoch": 0.5031920621490654, - "learning_rate": 0.0014849580136206508, - "loss": 1.3868, - "step": 6542 - }, - { - "epoch": 0.5032689793092839, - "learning_rate": 0.0014845955687103564, - "loss": 1.3568, - "step": 6543 - }, - { - "epoch": 0.5033458964695023, - "learning_rate": 0.0014842331246995436, - "loss": 0.6795, - "step": 6544 - }, - { - "epoch": 0.5034228136297207, - "learning_rate": 0.0014838706816093748, - "loss": 1.0597, - "step": 6545 - }, - { - "epoch": 0.5034997307899393, - "learning_rate": 0.0014835082394610132, - "loss": 1.2906, - "step": 6546 - }, - { - "epoch": 0.5035766479501577, - "learning_rate": 0.0014831457982756233, - "loss": 1.0363, - "step": 6547 - }, - { - "epoch": 0.5036535651103762, - "learning_rate": 0.0014827833580743671, - "loss": 1.4559, - "step": 6548 - }, - { - "epoch": 0.5037304822705946, - "learning_rate": 0.0014824209188784093, - "loss": 0.9388, - "step": 6549 - }, - { - "epoch": 0.503807399430813, - "learning_rate": 0.0014820584807089122, - "loss": 1.0233, - "step": 6550 - }, - { - "epoch": 0.5038843165910315, - "learning_rate": 0.0014816960435870387, - "loss": 0.9559, - "step": 6551 - }, - { - "epoch": 0.5039612337512499, - "learning_rate": 0.0014813336075339529, - "loss": 1.1925, - "step": 6552 - }, - { - "epoch": 0.5040381509114683, - "learning_rate": 0.0014809711725708172, - "loss": 0.8224, - "step": 6553 - }, - { - "epoch": 0.5041150680716868, - "learning_rate": 0.0014806087387187938, - "loss": 1.1409, - "step": 6554 - }, - { - "epoch": 0.5041919852319052, - "learning_rate": 0.0014802463059990472, - "loss": 0.7648, - "step": 6555 - }, - { - "epoch": 0.5042689023921236, - "learning_rate": 0.001479883874432739, - "loss": 1.1594, - "step": 6556 - }, - { - "epoch": 0.5043458195523421, - "learning_rate": 0.0014795214440410322, - "loss": 1.0776, - "step": 6557 - }, - { - "epoch": 0.5044227367125605, - "learning_rate": 0.0014791590148450899, - "loss": 1.282, - "step": 6558 - }, - { - "epoch": 0.5044996538727791, - "learning_rate": 0.001478796586866074, - "loss": 1.106, - "step": 6559 - }, - { - "epoch": 0.5045765710329975, - "learning_rate": 0.001478434160125148, - "loss": 1.316, - "step": 6560 - }, - { - "epoch": 0.5046534881932159, - "learning_rate": 0.0014780717346434737, - "loss": 0.9918, - "step": 6561 - }, - { - "epoch": 0.5047304053534344, - "learning_rate": 0.0014777093104422136, - "loss": 0.8127, - "step": 6562 - }, - { - "epoch": 0.5048073225136528, - "learning_rate": 0.0014773468875425302, - "loss": 1.1305, - "step": 6563 - }, - { - "epoch": 0.5048842396738712, - "learning_rate": 0.001476984465965586, - "loss": 1.1134, - "step": 6564 - }, - { - "epoch": 0.5049611568340897, - "learning_rate": 0.0014766220457325418, - "loss": 1.299, - "step": 6565 - }, - { - "epoch": 0.5050380739943081, - "learning_rate": 0.0014762596268645617, - "loss": 1.4228, - "step": 6566 - }, - { - "epoch": 0.5051149911545266, - "learning_rate": 0.0014758972093828057, - "loss": 1.0898, - "step": 6567 - }, - { - "epoch": 0.505191908314745, - "learning_rate": 0.001475534793308438, - "loss": 1.192, - "step": 6568 - }, - { - "epoch": 0.5052688254749634, - "learning_rate": 0.0014751723786626187, - "loss": 1.4412, - "step": 6569 - }, - { - "epoch": 0.505345742635182, - "learning_rate": 0.0014748099654665096, - "loss": 0.7492, - "step": 6570 - }, - { - "epoch": 0.5054226597954004, - "learning_rate": 0.0014744475537412734, - "loss": 0.8845, - "step": 6571 - }, - { - "epoch": 0.5054995769556188, - "learning_rate": 0.0014740851435080713, - "loss": 1.1231, - "step": 6572 - }, - { - "epoch": 0.5055764941158373, - "learning_rate": 0.0014737227347880642, - "loss": 1.1297, - "step": 6573 - }, - { - "epoch": 0.5056534112760557, - "learning_rate": 0.0014733603276024143, - "loss": 1.6957, - "step": 6574 - }, - { - "epoch": 0.5057303284362741, - "learning_rate": 0.001472997921972283, - "loss": 1.1814, - "step": 6575 - }, - { - "epoch": 0.5058072455964926, - "learning_rate": 0.0014726355179188301, - "loss": 1.3075, - "step": 6576 - }, - { - "epoch": 0.505884162756711, - "learning_rate": 0.0014722731154632187, - "loss": 1.2026, - "step": 6577 - }, - { - "epoch": 0.5059610799169295, - "learning_rate": 0.0014719107146266082, - "loss": 1.096, - "step": 6578 - }, - { - "epoch": 0.5060379970771479, - "learning_rate": 0.0014715483154301614, - "loss": 0.7804, - "step": 6579 - }, - { - "epoch": 0.5061149142373663, - "learning_rate": 0.0014711859178950378, - "loss": 0.9692, - "step": 6580 - }, - { - "epoch": 0.5061918313975848, - "learning_rate": 0.001470823522042398, - "loss": 1.1861, - "step": 6581 - }, - { - "epoch": 0.5062687485578032, - "learning_rate": 0.0014704611278934035, - "loss": 1.0058, - "step": 6582 - }, - { - "epoch": 0.5063456657180216, - "learning_rate": 0.0014700987354692146, - "loss": 1.5371, - "step": 6583 - }, - { - "epoch": 0.5064225828782402, - "learning_rate": 0.0014697363447909912, - "loss": 1.0502, - "step": 6584 - }, - { - "epoch": 0.5064995000384586, - "learning_rate": 0.0014693739558798947, - "loss": 1.1506, - "step": 6585 - }, - { - "epoch": 0.5065764171986771, - "learning_rate": 0.0014690115687570845, - "loss": 1.1711, - "step": 6586 - }, - { - "epoch": 0.5066533343588955, - "learning_rate": 0.0014686491834437214, - "loss": 1.0638, - "step": 6587 - }, - { - "epoch": 0.5067302515191139, - "learning_rate": 0.0014682867999609654, - "loss": 1.0009, - "step": 6588 - }, - { - "epoch": 0.5068071686793324, - "learning_rate": 0.0014679244183299753, - "loss": 1.1536, - "step": 6589 - }, - { - "epoch": 0.5068840858395508, - "learning_rate": 0.0014675620385719128, - "loss": 0.9509, - "step": 6590 - }, - { - "epoch": 0.5069610029997692, - "learning_rate": 0.0014671996607079365, - "loss": 0.9983, - "step": 6591 - }, - { - "epoch": 0.5070379201599877, - "learning_rate": 0.0014668372847592055, - "loss": 1.2864, - "step": 6592 - }, - { - "epoch": 0.5071148373202061, - "learning_rate": 0.0014664749107468804, - "loss": 1.3432, - "step": 6593 - }, - { - "epoch": 0.5071917544804245, - "learning_rate": 0.0014661125386921205, - "loss": 1.1032, - "step": 6594 - }, - { - "epoch": 0.507268671640643, - "learning_rate": 0.0014657501686160842, - "loss": 1.4989, - "step": 6595 - }, - { - "epoch": 0.5073455888008614, - "learning_rate": 0.0014653878005399317, - "loss": 1.3576, - "step": 6596 - }, - { - "epoch": 0.50742250596108, - "learning_rate": 0.0014650254344848212, - "loss": 1.3586, - "step": 6597 - }, - { - "epoch": 0.5074994231212984, - "learning_rate": 0.0014646630704719123, - "loss": 1.2624, - "step": 6598 - }, - { - "epoch": 0.5075763402815168, - "learning_rate": 0.001464300708522364, - "loss": 1.0322, - "step": 6599 - }, - { - "epoch": 0.5076532574417353, - "learning_rate": 0.0014639383486573336, - "loss": 0.835, - "step": 6600 - }, - { - "epoch": 0.5077301746019537, - "learning_rate": 0.0014635759908979815, - "loss": 1.3156, - "step": 6601 - }, - { - "epoch": 0.5078070917621721, - "learning_rate": 0.001463213635265465, - "loss": 1.2155, - "step": 6602 - }, - { - "epoch": 0.5078840089223906, - "learning_rate": 0.0014628512817809422, - "loss": 1.1603, - "step": 6603 - }, - { - "epoch": 0.507960926082609, - "learning_rate": 0.001462488930465572, - "loss": 0.8567, - "step": 6604 - }, - { - "epoch": 0.5080378432428275, - "learning_rate": 0.0014621265813405122, - "loss": 1.3782, - "step": 6605 - }, - { - "epoch": 0.5081147604030459, - "learning_rate": 0.0014617642344269212, - "loss": 1.0919, - "step": 6606 - }, - { - "epoch": 0.5081916775632643, - "learning_rate": 0.001461401889745956, - "loss": 1.0697, - "step": 6607 - }, - { - "epoch": 0.5082685947234828, - "learning_rate": 0.0014610395473187746, - "loss": 1.1664, - "step": 6608 - }, - { - "epoch": 0.5083455118837013, - "learning_rate": 0.0014606772071665349, - "loss": 1.5752, - "step": 6609 - }, - { - "epoch": 0.5084224290439197, - "learning_rate": 0.0014603148693103944, - "loss": 1.3914, - "step": 6610 - }, - { - "epoch": 0.5084993462041382, - "learning_rate": 0.001459952533771509, - "loss": 1.3671, - "step": 6611 - }, - { - "epoch": 0.5085762633643566, - "learning_rate": 0.001459590200571038, - "loss": 1.3097, - "step": 6612 - }, - { - "epoch": 0.5086531805245751, - "learning_rate": 0.0014592278697301362, - "loss": 1.1959, - "step": 6613 - }, - { - "epoch": 0.5087300976847935, - "learning_rate": 0.0014588655412699626, - "loss": 1.3758, - "step": 6614 - }, - { - "epoch": 0.5088070148450119, - "learning_rate": 0.0014585032152116726, - "loss": 1.2366, - "step": 6615 - }, - { - "epoch": 0.5088839320052304, - "learning_rate": 0.0014581408915764228, - "loss": 1.2483, - "step": 6616 - }, - { - "epoch": 0.5089608491654488, - "learning_rate": 0.0014577785703853703, - "loss": 0.9804, - "step": 6617 - }, - { - "epoch": 0.5090377663256672, - "learning_rate": 0.001457416251659671, - "loss": 0.9902, - "step": 6618 - }, - { - "epoch": 0.5091146834858857, - "learning_rate": 0.001457053935420481, - "loss": 1.6953, - "step": 6619 - }, - { - "epoch": 0.5091916006461041, - "learning_rate": 0.0014566916216889566, - "loss": 1.0082, - "step": 6620 - }, - { - "epoch": 0.5092685178063225, - "learning_rate": 0.001456329310486254, - "loss": 1.1904, - "step": 6621 - }, - { - "epoch": 0.509345434966541, - "learning_rate": 0.0014559670018335273, - "loss": 1.3764, - "step": 6622 - }, - { - "epoch": 0.5094223521267595, - "learning_rate": 0.0014556046957519345, - "loss": 1.2398, - "step": 6623 - }, - { - "epoch": 0.509499269286978, - "learning_rate": 0.0014552423922626284, - "loss": 1.2004, - "step": 6624 - }, - { - "epoch": 0.5095761864471964, - "learning_rate": 0.001454880091386767, - "loss": 1.3528, - "step": 6625 - }, - { - "epoch": 0.5096531036074148, - "learning_rate": 0.0014545177931455037, - "loss": 1.0543, - "step": 6626 - }, - { - "epoch": 0.5097300207676333, - "learning_rate": 0.0014541554975599936, - "loss": 1.0902, - "step": 6627 - }, - { - "epoch": 0.5098069379278517, - "learning_rate": 0.001453793204651392, - "loss": 0.7525, - "step": 6628 - }, - { - "epoch": 0.5098838550880701, - "learning_rate": 0.0014534309144408532, - "loss": 1.1906, - "step": 6629 - }, - { - "epoch": 0.5099607722482886, - "learning_rate": 0.0014530686269495316, - "loss": 1.3126, - "step": 6630 - }, - { - "epoch": 0.510037689408507, - "learning_rate": 0.0014527063421985822, - "loss": 1.408, - "step": 6631 - }, - { - "epoch": 0.5101146065687255, - "learning_rate": 0.0014523440602091583, - "loss": 1.0331, - "step": 6632 - }, - { - "epoch": 0.5101915237289439, - "learning_rate": 0.001451981781002415, - "loss": 0.8259, - "step": 6633 - }, - { - "epoch": 0.5102684408891623, - "learning_rate": 0.0014516195045995058, - "loss": 1.2364, - "step": 6634 - }, - { - "epoch": 0.5103453580493809, - "learning_rate": 0.001451257231021583, - "loss": 1.0736, - "step": 6635 - }, - { - "epoch": 0.5104222752095993, - "learning_rate": 0.0014508949602898022, - "loss": 1.1846, - "step": 6636 - }, - { - "epoch": 0.5104991923698177, - "learning_rate": 0.0014505326924253157, - "loss": 1.4337, - "step": 6637 - }, - { - "epoch": 0.5105761095300362, - "learning_rate": 0.0014501704274492765, - "loss": 1.5369, - "step": 6638 - }, - { - "epoch": 0.5106530266902546, - "learning_rate": 0.0014498081653828382, - "loss": 1.0255, - "step": 6639 - }, - { - "epoch": 0.510729943850473, - "learning_rate": 0.0014494459062471537, - "loss": 1.2547, - "step": 6640 - }, - { - "epoch": 0.5108068610106915, - "learning_rate": 0.0014490836500633748, - "loss": 0.8558, - "step": 6641 - }, - { - "epoch": 0.5108837781709099, - "learning_rate": 0.001448721396852655, - "loss": 1.011, - "step": 6642 - }, - { - "epoch": 0.5109606953311284, - "learning_rate": 0.001448359146636146, - "loss": 0.6644, - "step": 6643 - }, - { - "epoch": 0.5110376124913468, - "learning_rate": 0.0014479968994350004, - "loss": 1.4301, - "step": 6644 - }, - { - "epoch": 0.5111145296515652, - "learning_rate": 0.0014476346552703705, - "loss": 1.2024, - "step": 6645 - }, - { - "epoch": 0.5111914468117837, - "learning_rate": 0.0014472724141634068, - "loss": 1.0296, - "step": 6646 - }, - { - "epoch": 0.5112683639720021, - "learning_rate": 0.0014469101761352627, - "loss": 1.4159, - "step": 6647 - }, - { - "epoch": 0.5113452811322206, - "learning_rate": 0.0014465479412070885, - "loss": 1.272, - "step": 6648 - }, - { - "epoch": 0.5114221982924391, - "learning_rate": 0.0014461857094000352, - "loss": 1.3374, - "step": 6649 - }, - { - "epoch": 0.5114991154526575, - "learning_rate": 0.001445823480735255, - "loss": 1.012, - "step": 6650 - }, - { - "epoch": 0.511576032612876, - "learning_rate": 0.0014454612552338977, - "loss": 1.4119, - "step": 6651 - }, - { - "epoch": 0.5116529497730944, - "learning_rate": 0.0014450990329171146, - "loss": 0.8706, - "step": 6652 - }, - { - "epoch": 0.5117298669333128, - "learning_rate": 0.0014447368138060565, - "loss": 1.0146, - "step": 6653 - }, - { - "epoch": 0.5118067840935313, - "learning_rate": 0.0014443745979218733, - "loss": 1.1647, - "step": 6654 - }, - { - "epoch": 0.5118837012537497, - "learning_rate": 0.0014440123852857152, - "loss": 1.1763, - "step": 6655 - }, - { - "epoch": 0.5119606184139681, - "learning_rate": 0.0014436501759187326, - "loss": 1.1731, - "step": 6656 - }, - { - "epoch": 0.5120375355741866, - "learning_rate": 0.0014432879698420744, - "loss": 0.9966, - "step": 6657 - }, - { - "epoch": 0.512114452734405, - "learning_rate": 0.0014429257670768912, - "loss": 1.0666, - "step": 6658 - }, - { - "epoch": 0.5121913698946234, - "learning_rate": 0.0014425635676443322, - "loss": 1.2646, - "step": 6659 - }, - { - "epoch": 0.512268287054842, - "learning_rate": 0.0014422013715655459, - "loss": 1.1913, - "step": 6660 - }, - { - "epoch": 0.5123452042150604, - "learning_rate": 0.0014418391788616819, - "loss": 1.1502, - "step": 6661 - }, - { - "epoch": 0.5124221213752789, - "learning_rate": 0.0014414769895538884, - "loss": 1.2237, - "step": 6662 - }, - { - "epoch": 0.5124990385354973, - "learning_rate": 0.001441114803663315, - "loss": 1.0391, - "step": 6663 - }, - { - "epoch": 0.5125759556957157, - "learning_rate": 0.0014407526212111095, - "loss": 1.1168, - "step": 6664 - }, - { - "epoch": 0.5126528728559342, - "learning_rate": 0.0014403904422184198, - "loss": 0.949, - "step": 6665 - }, - { - "epoch": 0.5127297900161526, - "learning_rate": 0.001440028266706395, - "loss": 1.1032, - "step": 6666 - }, - { - "epoch": 0.512806707176371, - "learning_rate": 0.001439666094696182, - "loss": 1.0011, - "step": 6667 - }, - { - "epoch": 0.5128836243365895, - "learning_rate": 0.001439303926208928, - "loss": 1.0922, - "step": 6668 - }, - { - "epoch": 0.5129605414968079, - "learning_rate": 0.0014389417612657815, - "loss": 1.4588, - "step": 6669 - }, - { - "epoch": 0.5130374586570264, - "learning_rate": 0.001438579599887889, - "loss": 0.9473, - "step": 6670 - }, - { - "epoch": 0.5131143758172448, - "learning_rate": 0.0014382174420963978, - "loss": 1.2888, - "step": 6671 - }, - { - "epoch": 0.5131912929774632, - "learning_rate": 0.0014378552879124552, - "loss": 1.0715, - "step": 6672 - }, - { - "epoch": 0.5132682101376818, - "learning_rate": 0.0014374931373572059, - "loss": 1.0945, - "step": 6673 - }, - { - "epoch": 0.5133451272979002, - "learning_rate": 0.0014371309904517985, - "loss": 1.2115, - "step": 6674 - }, - { - "epoch": 0.5134220444581186, - "learning_rate": 0.0014367688472173776, - "loss": 0.9833, - "step": 6675 - }, - { - "epoch": 0.5134989616183371, - "learning_rate": 0.0014364067076750894, - "loss": 1.0794, - "step": 6676 - }, - { - "epoch": 0.5135758787785555, - "learning_rate": 0.00143604457184608, - "loss": 0.9514, - "step": 6677 - }, - { - "epoch": 0.5136527959387739, - "learning_rate": 0.001435682439751495, - "loss": 1.0807, - "step": 6678 - }, - { - "epoch": 0.5137297130989924, - "learning_rate": 0.0014353203114124788, - "loss": 1.1926, - "step": 6679 - }, - { - "epoch": 0.5138066302592108, - "learning_rate": 0.0014349581868501775, - "loss": 1.0125, - "step": 6680 - }, - { - "epoch": 0.5138835474194293, - "learning_rate": 0.001434596066085735, - "loss": 1.0901, - "step": 6681 - }, - { - "epoch": 0.5139604645796477, - "learning_rate": 0.0014342339491402968, - "loss": 1.1668, - "step": 6682 - }, - { - "epoch": 0.5140373817398661, - "learning_rate": 0.001433871836035007, - "loss": 1.5974, - "step": 6683 - }, - { - "epoch": 0.5141142989000846, - "learning_rate": 0.0014335097267910088, - "loss": 1.4438, - "step": 6684 - }, - { - "epoch": 0.514191216060303, - "learning_rate": 0.001433147621429448, - "loss": 1.3482, - "step": 6685 - }, - { - "epoch": 0.5142681332205215, - "learning_rate": 0.0014327855199714667, - "loss": 1.2907, - "step": 6686 - }, - { - "epoch": 0.51434505038074, - "learning_rate": 0.0014324234224382087, - "loss": 1.0154, - "step": 6687 - }, - { - "epoch": 0.5144219675409584, - "learning_rate": 0.0014320613288508178, - "loss": 1.1176, - "step": 6688 - }, - { - "epoch": 0.5144988847011769, - "learning_rate": 0.0014316992392304363, - "loss": 1.0381, - "step": 6689 - }, - { - "epoch": 0.5145758018613953, - "learning_rate": 0.001431337153598208, - "loss": 1.1203, - "step": 6690 - }, - { - "epoch": 0.5146527190216137, - "learning_rate": 0.0014309750719752746, - "loss": 1.1187, - "step": 6691 - }, - { - "epoch": 0.5147296361818322, - "learning_rate": 0.0014306129943827785, - "loss": 1.0744, - "step": 6692 - }, - { - "epoch": 0.5148065533420506, - "learning_rate": 0.0014302509208418623, - "loss": 1.0064, - "step": 6693 - }, - { - "epoch": 0.514883470502269, - "learning_rate": 0.0014298888513736678, - "loss": 1.0792, - "step": 6694 - }, - { - "epoch": 0.5149603876624875, - "learning_rate": 0.0014295267859993355, - "loss": 0.9389, - "step": 6695 - }, - { - "epoch": 0.5150373048227059, - "learning_rate": 0.0014291647247400084, - "loss": 1.1881, - "step": 6696 - }, - { - "epoch": 0.5151142219829243, - "learning_rate": 0.0014288026676168267, - "loss": 0.8957, - "step": 6697 - }, - { - "epoch": 0.5151911391431429, - "learning_rate": 0.0014284406146509309, - "loss": 1.0012, - "step": 6698 - }, - { - "epoch": 0.5152680563033613, - "learning_rate": 0.0014280785658634626, - "loss": 0.9666, - "step": 6699 - }, - { - "epoch": 0.5153449734635798, - "learning_rate": 0.0014277165212755614, - "loss": 0.8031, - "step": 6700 - }, - { - "epoch": 0.5154218906237982, - "learning_rate": 0.0014273544809083683, - "loss": 1.2961, - "step": 6701 - }, - { - "epoch": 0.5154988077840166, - "learning_rate": 0.0014269924447830227, - "loss": 0.9829, - "step": 6702 - }, - { - "epoch": 0.5155757249442351, - "learning_rate": 0.0014266304129206642, - "loss": 1.4266, - "step": 6703 - }, - { - "epoch": 0.5156526421044535, - "learning_rate": 0.0014262683853424325, - "loss": 1.1295, - "step": 6704 - }, - { - "epoch": 0.5157295592646719, - "learning_rate": 0.0014259063620694669, - "loss": 0.7262, - "step": 6705 - }, - { - "epoch": 0.5158064764248904, - "learning_rate": 0.0014255443431229052, - "loss": 1.3216, - "step": 6706 - }, - { - "epoch": 0.5158833935851088, - "learning_rate": 0.0014251823285238877, - "loss": 1.3511, - "step": 6707 - }, - { - "epoch": 0.5159603107453273, - "learning_rate": 0.001424820318293551, - "loss": 1.5622, - "step": 6708 - }, - { - "epoch": 0.5160372279055457, - "learning_rate": 0.0014244583124530356, - "loss": 1.1363, - "step": 6709 - }, - { - "epoch": 0.5161141450657641, - "learning_rate": 0.0014240963110234775, - "loss": 1.114, - "step": 6710 - }, - { - "epoch": 0.5161910622259827, - "learning_rate": 0.0014237343140260145, - "loss": 1.316, - "step": 6711 - }, - { - "epoch": 0.5162679793862011, - "learning_rate": 0.0014233723214817848, - "loss": 1.119, - "step": 6712 - }, - { - "epoch": 0.5163448965464195, - "learning_rate": 0.0014230103334119252, - "loss": 1.3046, - "step": 6713 - }, - { - "epoch": 0.516421813706638, - "learning_rate": 0.001422648349837572, - "loss": 1.1005, - "step": 6714 - }, - { - "epoch": 0.5164987308668564, - "learning_rate": 0.0014222863707798626, - "loss": 1.3312, - "step": 6715 - }, - { - "epoch": 0.5165756480270748, - "learning_rate": 0.0014219243962599333, - "loss": 1.1983, - "step": 6716 - }, - { - "epoch": 0.5166525651872933, - "learning_rate": 0.001421562426298919, - "loss": 0.9307, - "step": 6717 - }, - { - "epoch": 0.5167294823475117, - "learning_rate": 0.0014212004609179573, - "loss": 1.1534, - "step": 6718 - }, - { - "epoch": 0.5168063995077302, - "learning_rate": 0.001420838500138182, - "loss": 0.843, - "step": 6719 - }, - { - "epoch": 0.5168833166679486, - "learning_rate": 0.00142047654398073, - "loss": 1.1494, - "step": 6720 - }, - { - "epoch": 0.516960233828167, - "learning_rate": 0.001420114592466735, - "loss": 1.1551, - "step": 6721 - }, - { - "epoch": 0.5170371509883855, - "learning_rate": 0.001419752645617332, - "loss": 1.0119, - "step": 6722 - }, - { - "epoch": 0.517114068148604, - "learning_rate": 0.0014193907034536562, - "loss": 1.1552, - "step": 6723 - }, - { - "epoch": 0.5171909853088223, - "learning_rate": 0.001419028765996841, - "loss": 1.1905, - "step": 6724 - }, - { - "epoch": 0.5172679024690409, - "learning_rate": 0.0014186668332680204, - "loss": 1.2755, - "step": 6725 - }, - { - "epoch": 0.5173448196292593, - "learning_rate": 0.0014183049052883282, - "loss": 0.8684, - "step": 6726 - }, - { - "epoch": 0.5174217367894778, - "learning_rate": 0.0014179429820788977, - "loss": 1.0045, - "step": 6727 - }, - { - "epoch": 0.5174986539496962, - "learning_rate": 0.001417581063660862, - "loss": 1.4103, - "step": 6728 - }, - { - "epoch": 0.5175755711099146, - "learning_rate": 0.0014172191500553544, - "loss": 1.1899, - "step": 6729 - }, - { - "epoch": 0.5176524882701331, - "learning_rate": 0.001416857241283506, - "loss": 1.3045, - "step": 6730 - }, - { - "epoch": 0.5177294054303515, - "learning_rate": 0.0014164953373664508, - "loss": 1.2077, - "step": 6731 - }, - { - "epoch": 0.5178063225905699, - "learning_rate": 0.0014161334383253196, - "loss": 1.3035, - "step": 6732 - }, - { - "epoch": 0.5178832397507884, - "learning_rate": 0.001415771544181244, - "loss": 1.2366, - "step": 6733 - }, - { - "epoch": 0.5179601569110068, - "learning_rate": 0.0014154096549553559, - "loss": 1.2346, - "step": 6734 - }, - { - "epoch": 0.5180370740712253, - "learning_rate": 0.0014150477706687862, - "loss": 0.8414, - "step": 6735 - }, - { - "epoch": 0.5181139912314437, - "learning_rate": 0.0014146858913426656, - "loss": 0.974, - "step": 6736 - }, - { - "epoch": 0.5181909083916622, - "learning_rate": 0.0014143240169981251, - "loss": 0.9438, - "step": 6737 - }, - { - "epoch": 0.5182678255518807, - "learning_rate": 0.001413962147656294, - "loss": 1.0879, - "step": 6738 - }, - { - "epoch": 0.5183447427120991, - "learning_rate": 0.001413600283338303, - "loss": 1.079, - "step": 6739 - }, - { - "epoch": 0.5184216598723175, - "learning_rate": 0.001413238424065282, - "loss": 1.144, - "step": 6740 - }, - { - "epoch": 0.518498577032536, - "learning_rate": 0.001412876569858359, - "loss": 1.3145, - "step": 6741 - }, - { - "epoch": 0.5185754941927544, - "learning_rate": 0.0014125147207386646, - "loss": 1.2907, - "step": 6742 - }, - { - "epoch": 0.5186524113529728, - "learning_rate": 0.0014121528767273267, - "loss": 1.1897, - "step": 6743 - }, - { - "epoch": 0.5187293285131913, - "learning_rate": 0.0014117910378454734, - "loss": 1.2253, - "step": 6744 - }, - { - "epoch": 0.5188062456734097, - "learning_rate": 0.0014114292041142336, - "loss": 1.3954, - "step": 6745 - }, - { - "epoch": 0.5188831628336282, - "learning_rate": 0.0014110673755547349, - "loss": 0.9285, - "step": 6746 - }, - { - "epoch": 0.5189600799938466, - "learning_rate": 0.0014107055521881049, - "loss": 1.0801, - "step": 6747 - }, - { - "epoch": 0.519036997154065, - "learning_rate": 0.0014103437340354707, - "loss": 0.8494, - "step": 6748 - }, - { - "epoch": 0.5191139143142836, - "learning_rate": 0.001409981921117959, - "loss": 1.2647, - "step": 6749 - }, - { - "epoch": 0.519190831474502, - "learning_rate": 0.0014096201134566973, - "loss": 0.828, - "step": 6750 - }, - { - "epoch": 0.5192677486347204, - "learning_rate": 0.0014092583110728114, - "loss": 1.0505, - "step": 6751 - }, - { - "epoch": 0.5193446657949389, - "learning_rate": 0.0014088965139874264, - "loss": 1.2376, - "step": 6752 - }, - { - "epoch": 0.5194215829551573, - "learning_rate": 0.00140853472222167, - "loss": 1.5328, - "step": 6753 - }, - { - "epoch": 0.5194985001153758, - "learning_rate": 0.0014081729357966655, - "loss": 1.3196, - "step": 6754 - }, - { - "epoch": 0.5195754172755942, - "learning_rate": 0.0014078111547335397, - "loss": 1.0608, - "step": 6755 - }, - { - "epoch": 0.5196523344358126, - "learning_rate": 0.0014074493790534165, - "loss": 1.0171, - "step": 6756 - }, - { - "epoch": 0.5197292515960311, - "learning_rate": 0.0014070876087774202, - "loss": 1.4048, - "step": 6757 - }, - { - "epoch": 0.5198061687562495, - "learning_rate": 0.0014067258439266757, - "loss": 1.2421, - "step": 6758 - }, - { - "epoch": 0.5198830859164679, - "learning_rate": 0.001406364084522306, - "loss": 0.8108, - "step": 6759 - }, - { - "epoch": 0.5199600030766864, - "learning_rate": 0.001406002330585435, - "loss": 1.175, - "step": 6760 - }, - { - "epoch": 0.5200369202369048, - "learning_rate": 0.0014056405821371862, - "loss": 1.3451, - "step": 6761 - }, - { - "epoch": 0.5201138373971232, - "learning_rate": 0.0014052788391986824, - "loss": 1.3186, - "step": 6762 - }, - { - "epoch": 0.5201907545573418, - "learning_rate": 0.0014049171017910449, - "loss": 0.9307, - "step": 6763 - }, - { - "epoch": 0.5202676717175602, - "learning_rate": 0.001404555369935398, - "loss": 1.0457, - "step": 6764 - }, - { - "epoch": 0.5203445888777787, - "learning_rate": 0.0014041936436528612, - "loss": 0.9869, - "step": 6765 - }, - { - "epoch": 0.5204215060379971, - "learning_rate": 0.0014038319229645586, - "loss": 1.3627, - "step": 6766 - }, - { - "epoch": 0.5204984231982155, - "learning_rate": 0.0014034702078916102, - "loss": 1.1022, - "step": 6767 - }, - { - "epoch": 0.520575340358434, - "learning_rate": 0.0014031084984551362, - "loss": 0.9449, - "step": 6768 - }, - { - "epoch": 0.5206522575186524, - "learning_rate": 0.0014027467946762585, - "loss": 0.9179, - "step": 6769 - }, - { - "epoch": 0.5207291746788708, - "learning_rate": 0.0014023850965760969, - "loss": 0.9102, - "step": 6770 - }, - { - "epoch": 0.5208060918390893, - "learning_rate": 0.0014020234041757706, - "loss": 0.8934, - "step": 6771 - }, - { - "epoch": 0.5208830089993077, - "learning_rate": 0.0014016617174964004, - "loss": 0.9073, - "step": 6772 - }, - { - "epoch": 0.5209599261595262, - "learning_rate": 0.0014013000365591046, - "loss": 1.0801, - "step": 6773 - }, - { - "epoch": 0.5210368433197446, - "learning_rate": 0.0014009383613850029, - "loss": 0.8615, - "step": 6774 - }, - { - "epoch": 0.521113760479963, - "learning_rate": 0.0014005766919952137, - "loss": 0.8591, - "step": 6775 - }, - { - "epoch": 0.5211906776401816, - "learning_rate": 0.0014002150284108542, - "loss": 0.8669, - "step": 6776 - }, - { - "epoch": 0.5212675948004, - "learning_rate": 0.0013998533706530442, - "loss": 1.0367, - "step": 6777 - }, - { - "epoch": 0.5213445119606184, - "learning_rate": 0.0013994917187429002, - "loss": 1.3112, - "step": 6778 - }, - { - "epoch": 0.5214214291208369, - "learning_rate": 0.0013991300727015385, - "loss": 1.364, - "step": 6779 - }, - { - "epoch": 0.5214983462810553, - "learning_rate": 0.0013987684325500779, - "loss": 1.0634, - "step": 6780 - }, - { - "epoch": 0.5215752634412737, - "learning_rate": 0.0013984067983096337, - "loss": 1.1844, - "step": 6781 - }, - { - "epoch": 0.5216521806014922, - "learning_rate": 0.0013980451700013222, - "loss": 1.1443, - "step": 6782 - }, - { - "epoch": 0.5217290977617106, - "learning_rate": 0.00139768354764626, - "loss": 1.3297, - "step": 6783 - }, - { - "epoch": 0.5218060149219291, - "learning_rate": 0.0013973219312655614, - "loss": 1.2687, - "step": 6784 - }, - { - "epoch": 0.5218829320821475, - "learning_rate": 0.0013969603208803427, - "loss": 1.268, - "step": 6785 - }, - { - "epoch": 0.5219598492423659, - "learning_rate": 0.0013965987165117186, - "loss": 1.2805, - "step": 6786 - }, - { - "epoch": 0.5220367664025845, - "learning_rate": 0.0013962371181808022, - "loss": 1.2949, - "step": 6787 - }, - { - "epoch": 0.5221136835628029, - "learning_rate": 0.0013958755259087098, - "loss": 1.1942, - "step": 6788 - }, - { - "epoch": 0.5221906007230213, - "learning_rate": 0.0013955139397165534, - "loss": 1.2702, - "step": 6789 - }, - { - "epoch": 0.5222675178832398, - "learning_rate": 0.0013951523596254466, - "loss": 1.4694, - "step": 6790 - }, - { - "epoch": 0.5223444350434582, - "learning_rate": 0.0013947907856565033, - "loss": 1.4483, - "step": 6791 - }, - { - "epoch": 0.5224213522036767, - "learning_rate": 0.0013944292178308352, - "loss": 1.2538, - "step": 6792 - }, - { - "epoch": 0.5224982693638951, - "learning_rate": 0.0013940676561695555, - "loss": 1.3387, - "step": 6793 - }, - { - "epoch": 0.5225751865241135, - "learning_rate": 0.0013937061006937756, - "loss": 1.1385, - "step": 6794 - }, - { - "epoch": 0.522652103684332, - "learning_rate": 0.001393344551424607, - "loss": 1.3387, - "step": 6795 - }, - { - "epoch": 0.5227290208445504, - "learning_rate": 0.0013929830083831616, - "loss": 0.9344, - "step": 6796 - }, - { - "epoch": 0.5228059380047688, - "learning_rate": 0.00139262147159055, - "loss": 1.1744, - "step": 6797 - }, - { - "epoch": 0.5228828551649873, - "learning_rate": 0.001392259941067882, - "loss": 0.8946, - "step": 6798 - }, - { - "epoch": 0.5229597723252057, - "learning_rate": 0.0013918984168362692, - "loss": 1.1324, - "step": 6799 - }, - { - "epoch": 0.5230366894854241, - "learning_rate": 0.00139153689891682, - "loss": 1.1671, - "step": 6800 - }, - { - "epoch": 0.5231136066456427, - "learning_rate": 0.0013911753873306438, - "loss": 1.1916, - "step": 6801 - }, - { - "epoch": 0.5231905238058611, - "learning_rate": 0.0013908138820988509, - "loss": 1.0871, - "step": 6802 - }, - { - "epoch": 0.5232674409660796, - "learning_rate": 0.0013904523832425487, - "loss": 1.4429, - "step": 6803 - }, - { - "epoch": 0.523344358126298, - "learning_rate": 0.0013900908907828462, - "loss": 1.2224, - "step": 6804 - }, - { - "epoch": 0.5234212752865164, - "learning_rate": 0.0013897294047408514, - "loss": 1.1442, - "step": 6805 - }, - { - "epoch": 0.5234981924467349, - "learning_rate": 0.001389367925137671, - "loss": 1.4412, - "step": 6806 - }, - { - "epoch": 0.5235751096069533, - "learning_rate": 0.0013890064519944134, - "loss": 1.1639, - "step": 6807 - }, - { - "epoch": 0.5236520267671717, - "learning_rate": 0.0013886449853321844, - "loss": 0.8159, - "step": 6808 - }, - { - "epoch": 0.5237289439273902, - "learning_rate": 0.0013882835251720906, - "loss": 1.1721, - "step": 6809 - }, - { - "epoch": 0.5238058610876086, - "learning_rate": 0.0013879220715352384, - "loss": 1.1325, - "step": 6810 - }, - { - "epoch": 0.5238827782478271, - "learning_rate": 0.001387560624442733, - "loss": 0.9891, - "step": 6811 - }, - { - "epoch": 0.5239596954080455, - "learning_rate": 0.0013871991839156804, - "loss": 1.3041, - "step": 6812 - }, - { - "epoch": 0.524036612568264, - "learning_rate": 0.0013868377499751852, - "loss": 1.0041, - "step": 6813 - }, - { - "epoch": 0.5241135297284825, - "learning_rate": 0.001386476322642351, - "loss": 1.5879, - "step": 6814 - }, - { - "epoch": 0.5241904468887009, - "learning_rate": 0.0013861149019382829, - "loss": 1.1554, - "step": 6815 - }, - { - "epoch": 0.5242673640489193, - "learning_rate": 0.0013857534878840845, - "loss": 1.5309, - "step": 6816 - }, - { - "epoch": 0.5243442812091378, - "learning_rate": 0.0013853920805008586, - "loss": 1.2539, - "step": 6817 - }, - { - "epoch": 0.5244211983693562, - "learning_rate": 0.0013850306798097092, - "loss": 1.0906, - "step": 6818 - }, - { - "epoch": 0.5244981155295746, - "learning_rate": 0.0013846692858317379, - "loss": 0.8813, - "step": 6819 - }, - { - "epoch": 0.5245750326897931, - "learning_rate": 0.0013843078985880468, - "loss": 0.9908, - "step": 6820 - }, - { - "epoch": 0.5246519498500115, - "learning_rate": 0.0013839465180997385, - "loss": 1.2657, - "step": 6821 - }, - { - "epoch": 0.52472886701023, - "learning_rate": 0.0013835851443879136, - "loss": 1.4675, - "step": 6822 - }, - { - "epoch": 0.5248057841704484, - "learning_rate": 0.0013832237774736738, - "loss": 1.2915, - "step": 6823 - }, - { - "epoch": 0.5248827013306668, - "learning_rate": 0.0013828624173781198, - "loss": 1.2075, - "step": 6824 - }, - { - "epoch": 0.5249596184908853, - "learning_rate": 0.0013825010641223502, - "loss": 1.3164, - "step": 6825 - }, - { - "epoch": 0.5250365356511038, - "learning_rate": 0.0013821397177274672, - "loss": 1.0839, - "step": 6826 - }, - { - "epoch": 0.5251134528113222, - "learning_rate": 0.0013817783782145681, - "loss": 1.4518, - "step": 6827 - }, - { - "epoch": 0.5251903699715407, - "learning_rate": 0.0013814170456047526, - "loss": 1.2361, - "step": 6828 - }, - { - "epoch": 0.5252672871317591, - "learning_rate": 0.0013810557199191197, - "loss": 1.4809, - "step": 6829 - }, - { - "epoch": 0.5253442042919776, - "learning_rate": 0.001380694401178767, - "loss": 1.3586, - "step": 6830 - }, - { - "epoch": 0.525421121452196, - "learning_rate": 0.001380333089404793, - "loss": 1.0907, - "step": 6831 - }, - { - "epoch": 0.5254980386124144, - "learning_rate": 0.0013799717846182944, - "loss": 1.0633, - "step": 6832 - }, - { - "epoch": 0.5255749557726329, - "learning_rate": 0.0013796104868403682, - "loss": 1.0793, - "step": 6833 - }, - { - "epoch": 0.5256518729328513, - "learning_rate": 0.0013792491960921114, - "loss": 1.1081, - "step": 6834 - }, - { - "epoch": 0.5257287900930697, - "learning_rate": 0.00137888791239462, - "loss": 0.8416, - "step": 6835 - }, - { - "epoch": 0.5258057072532882, - "learning_rate": 0.001378526635768989, - "loss": 1.1909, - "step": 6836 - }, - { - "epoch": 0.5258826244135066, - "learning_rate": 0.0013781653662363151, - "loss": 1.2034, - "step": 6837 - }, - { - "epoch": 0.5259595415737252, - "learning_rate": 0.0013778041038176915, - "loss": 1.0422, - "step": 6838 - }, - { - "epoch": 0.5260364587339436, - "learning_rate": 0.0013774428485342147, - "loss": 1.3183, - "step": 6839 - }, - { - "epoch": 0.526113375894162, - "learning_rate": 0.0013770816004069774, - "loss": 1.0863, - "step": 6840 - }, - { - "epoch": 0.5261902930543805, - "learning_rate": 0.001376720359457073, - "loss": 1.3833, - "step": 6841 - }, - { - "epoch": 0.5262672102145989, - "learning_rate": 0.0013763591257055958, - "loss": 1.0414, - "step": 6842 - }, - { - "epoch": 0.5263441273748173, - "learning_rate": 0.001375997899173638, - "loss": 1.3602, - "step": 6843 - }, - { - "epoch": 0.5264210445350358, - "learning_rate": 0.001375636679882292, - "loss": 1.2499, - "step": 6844 - }, - { - "epoch": 0.5264979616952542, - "learning_rate": 0.0013752754678526499, - "loss": 1.0176, - "step": 6845 - }, - { - "epoch": 0.5265748788554726, - "learning_rate": 0.0013749142631058035, - "loss": 1.4699, - "step": 6846 - }, - { - "epoch": 0.5266517960156911, - "learning_rate": 0.001374553065662843, - "loss": 1.3072, - "step": 6847 - }, - { - "epoch": 0.5267287131759095, - "learning_rate": 0.0013741918755448607, - "loss": 1.1194, - "step": 6848 - }, - { - "epoch": 0.526805630336128, - "learning_rate": 0.0013738306927729444, - "loss": 0.7833, - "step": 6849 - }, - { - "epoch": 0.5268825474963464, - "learning_rate": 0.001373469517368187, - "loss": 0.8987, - "step": 6850 - }, - { - "epoch": 0.5269594646565648, - "learning_rate": 0.0013731083493516757, - "loss": 1.1682, - "step": 6851 - }, - { - "epoch": 0.5270363818167834, - "learning_rate": 0.0013727471887445, - "loss": 1.3395, - "step": 6852 - }, - { - "epoch": 0.5271132989770018, - "learning_rate": 0.0013723860355677488, - "loss": 1.4319, - "step": 6853 - }, - { - "epoch": 0.5271902161372202, - "learning_rate": 0.00137202488984251, - "loss": 1.2318, - "step": 6854 - }, - { - "epoch": 0.5272671332974387, - "learning_rate": 0.0013716637515898708, - "loss": 1.2094, - "step": 6855 - }, - { - "epoch": 0.5273440504576571, - "learning_rate": 0.0013713026208309192, - "loss": 1.1864, - "step": 6856 - }, - { - "epoch": 0.5274209676178756, - "learning_rate": 0.0013709414975867416, - "loss": 1.1114, - "step": 6857 - }, - { - "epoch": 0.527497884778094, - "learning_rate": 0.0013705803818784246, - "loss": 1.0764, - "step": 6858 - }, - { - "epoch": 0.5275748019383124, - "learning_rate": 0.0013702192737270545, - "loss": 0.8763, - "step": 6859 - }, - { - "epoch": 0.5276517190985309, - "learning_rate": 0.0013698581731537152, - "loss": 1.092, - "step": 6860 - }, - { - "epoch": 0.5277286362587493, - "learning_rate": 0.0013694970801794938, - "loss": 1.1911, - "step": 6861 - }, - { - "epoch": 0.5278055534189677, - "learning_rate": 0.0013691359948254734, - "loss": 1.1457, - "step": 6862 - }, - { - "epoch": 0.5278824705791862, - "learning_rate": 0.0013687749171127385, - "loss": 1.11, - "step": 6863 - }, - { - "epoch": 0.5279593877394047, - "learning_rate": 0.0013684138470623731, - "loss": 1.1553, - "step": 6864 - }, - { - "epoch": 0.528036304899623, - "learning_rate": 0.0013680527846954605, - "loss": 1.1272, - "step": 6865 - }, - { - "epoch": 0.5281132220598416, - "learning_rate": 0.001367691730033083, - "loss": 1.3221, - "step": 6866 - }, - { - "epoch": 0.52819013922006, - "learning_rate": 0.0013673306830963234, - "loss": 0.8401, - "step": 6867 - }, - { - "epoch": 0.5282670563802785, - "learning_rate": 0.0013669696439062632, - "loss": 1.0266, - "step": 6868 - }, - { - "epoch": 0.5283439735404969, - "learning_rate": 0.0013666086124839844, - "loss": 1.0802, - "step": 6869 - }, - { - "epoch": 0.5284208907007153, - "learning_rate": 0.0013662475888505683, - "loss": 0.9563, - "step": 6870 - }, - { - "epoch": 0.5284978078609338, - "learning_rate": 0.0013658865730270936, - "loss": 1.1702, - "step": 6871 - }, - { - "epoch": 0.5285747250211522, - "learning_rate": 0.001365525565034643, - "loss": 1.075, - "step": 6872 - }, - { - "epoch": 0.5286516421813706, - "learning_rate": 0.0013651645648942942, - "loss": 1.3054, - "step": 6873 - }, - { - "epoch": 0.5287285593415891, - "learning_rate": 0.0013648035726271266, - "loss": 1.3343, - "step": 6874 - }, - { - "epoch": 0.5288054765018075, - "learning_rate": 0.0013644425882542199, - "loss": 1.1683, - "step": 6875 - }, - { - "epoch": 0.528882393662026, - "learning_rate": 0.0013640816117966514, - "loss": 0.9975, - "step": 6876 - }, - { - "epoch": 0.5289593108222445, - "learning_rate": 0.0013637206432754994, - "loss": 0.877, - "step": 6877 - }, - { - "epoch": 0.5290362279824629, - "learning_rate": 0.001363359682711841, - "loss": 1.0053, - "step": 6878 - }, - { - "epoch": 0.5291131451426814, - "learning_rate": 0.001362998730126753, - "loss": 1.2998, - "step": 6879 - }, - { - "epoch": 0.5291900623028998, - "learning_rate": 0.0013626377855413122, - "loss": 1.1646, - "step": 6880 - }, - { - "epoch": 0.5292669794631182, - "learning_rate": 0.0013622768489765945, - "loss": 1.3411, - "step": 6881 - }, - { - "epoch": 0.5293438966233367, - "learning_rate": 0.0013619159204536742, - "loss": 1.1243, - "step": 6882 - }, - { - "epoch": 0.5294208137835551, - "learning_rate": 0.0013615549999936281, - "loss": 1.1901, - "step": 6883 - }, - { - "epoch": 0.5294977309437735, - "learning_rate": 0.0013611940876175298, - "loss": 0.9948, - "step": 6884 - }, - { - "epoch": 0.529574648103992, - "learning_rate": 0.0013608331833464527, - "loss": 1.031, - "step": 6885 - }, - { - "epoch": 0.5296515652642104, - "learning_rate": 0.0013604722872014715, - "loss": 1.2082, - "step": 6886 - }, - { - "epoch": 0.5297284824244289, - "learning_rate": 0.0013601113992036586, - "loss": 1.1903, - "step": 6887 - }, - { - "epoch": 0.5298053995846473, - "learning_rate": 0.0013597505193740872, - "loss": 1.2638, - "step": 6888 - }, - { - "epoch": 0.5298823167448657, - "learning_rate": 0.0013593896477338292, - "loss": 1.3412, - "step": 6889 - }, - { - "epoch": 0.5299592339050843, - "learning_rate": 0.0013590287843039557, - "loss": 1.2244, - "step": 6890 - }, - { - "epoch": 0.5300361510653027, - "learning_rate": 0.0013586679291055389, - "loss": 1.0126, - "step": 6891 - }, - { - "epoch": 0.5301130682255211, - "learning_rate": 0.0013583070821596493, - "loss": 0.8857, - "step": 6892 - }, - { - "epoch": 0.5301899853857396, - "learning_rate": 0.0013579462434873558, - "loss": 0.9358, - "step": 6893 - }, - { - "epoch": 0.530266902545958, - "learning_rate": 0.0013575854131097301, - "loss": 1.2337, - "step": 6894 - }, - { - "epoch": 0.5303438197061765, - "learning_rate": 0.0013572245910478398, - "loss": 0.8602, - "step": 6895 - }, - { - "epoch": 0.5304207368663949, - "learning_rate": 0.0013568637773227552, - "loss": 1.2004, - "step": 6896 - }, - { - "epoch": 0.5304976540266133, - "learning_rate": 0.0013565029719555437, - "loss": 1.0215, - "step": 6897 - }, - { - "epoch": 0.5305745711868318, - "learning_rate": 0.0013561421749672727, - "loss": 1.0866, - "step": 6898 - }, - { - "epoch": 0.5306514883470502, - "learning_rate": 0.0013557813863790103, - "loss": 1.3846, - "step": 6899 - }, - { - "epoch": 0.5307284055072686, - "learning_rate": 0.0013554206062118233, - "loss": 1.0447, - "step": 6900 - }, - { - "epoch": 0.5308053226674871, - "learning_rate": 0.0013550598344867772, - "loss": 1.0576, - "step": 6901 - }, - { - "epoch": 0.5308822398277055, - "learning_rate": 0.001354699071224939, - "loss": 0.9923, - "step": 6902 - }, - { - "epoch": 0.530959156987924, - "learning_rate": 0.0013543383164473738, - "loss": 0.9693, - "step": 6903 - }, - { - "epoch": 0.5310360741481425, - "learning_rate": 0.0013539775701751449, - "loss": 0.7004, - "step": 6904 - }, - { - "epoch": 0.5311129913083609, - "learning_rate": 0.0013536168324293188, - "loss": 1.1429, - "step": 6905 - }, - { - "epoch": 0.5311899084685794, - "learning_rate": 0.0013532561032309576, - "loss": 0.8434, - "step": 6906 - }, - { - "epoch": 0.5312668256287978, - "learning_rate": 0.0013528953826011264, - "loss": 1.2499, - "step": 6907 - }, - { - "epoch": 0.5313437427890162, - "learning_rate": 0.0013525346705608868, - "loss": 1.3637, - "step": 6908 - }, - { - "epoch": 0.5314206599492347, - "learning_rate": 0.0013521739671313012, - "loss": 1.5843, - "step": 6909 - }, - { - "epoch": 0.5314975771094531, - "learning_rate": 0.001351813272333432, - "loss": 1.0776, - "step": 6910 - }, - { - "epoch": 0.5315744942696715, - "learning_rate": 0.0013514525861883402, - "loss": 1.336, - "step": 6911 - }, - { - "epoch": 0.53165141142989, - "learning_rate": 0.0013510919087170864, - "loss": 1.6208, - "step": 6912 - }, - { - "epoch": 0.5317283285901084, - "learning_rate": 0.0013507312399407315, - "loss": 1.1827, - "step": 6913 - }, - { - "epoch": 0.531805245750327, - "learning_rate": 0.001350370579880335, - "loss": 1.1826, - "step": 6914 - }, - { - "epoch": 0.5318821629105454, - "learning_rate": 0.0013500099285569563, - "loss": 1.0885, - "step": 6915 - }, - { - "epoch": 0.5319590800707638, - "learning_rate": 0.0013496492859916547, - "loss": 1.3705, - "step": 6916 - }, - { - "epoch": 0.5320359972309823, - "learning_rate": 0.0013492886522054868, - "loss": 1.2497, - "step": 6917 - }, - { - "epoch": 0.5321129143912007, - "learning_rate": 0.0013489280272195128, - "loss": 1.2999, - "step": 6918 - }, - { - "epoch": 0.5321898315514191, - "learning_rate": 0.0013485674110547882, - "loss": 0.7348, - "step": 6919 - }, - { - "epoch": 0.5322667487116376, - "learning_rate": 0.0013482068037323702, - "loss": 1.4769, - "step": 6920 - }, - { - "epoch": 0.532343665871856, - "learning_rate": 0.0013478462052733153, - "loss": 0.9537, - "step": 6921 - }, - { - "epoch": 0.5324205830320744, - "learning_rate": 0.0013474856156986792, - "loss": 1.2866, - "step": 6922 - }, - { - "epoch": 0.5324975001922929, - "learning_rate": 0.0013471250350295164, - "loss": 1.0531, - "step": 6923 - }, - { - "epoch": 0.5325744173525113, - "learning_rate": 0.0013467644632868825, - "loss": 1.0772, - "step": 6924 - }, - { - "epoch": 0.5326513345127298, - "learning_rate": 0.001346403900491831, - "loss": 1.0888, - "step": 6925 - }, - { - "epoch": 0.5327282516729482, - "learning_rate": 0.001346043346665416, - "loss": 1.1258, - "step": 6926 - }, - { - "epoch": 0.5328051688331666, - "learning_rate": 0.0013456828018286909, - "loss": 1.2859, - "step": 6927 - }, - { - "epoch": 0.5328820859933852, - "learning_rate": 0.001345322266002707, - "loss": 1.1934, - "step": 6928 - }, - { - "epoch": 0.5329590031536036, - "learning_rate": 0.001344961739208518, - "loss": 1.1608, - "step": 6929 - }, - { - "epoch": 0.533035920313822, - "learning_rate": 0.0013446012214671742, - "loss": 1.4867, - "step": 6930 - }, - { - "epoch": 0.5331128374740405, - "learning_rate": 0.0013442407127997268, - "loss": 0.9409, - "step": 6931 - }, - { - "epoch": 0.5331897546342589, - "learning_rate": 0.0013438802132272269, - "loss": 1.2508, - "step": 6932 - }, - { - "epoch": 0.5332666717944774, - "learning_rate": 0.0013435197227707237, - "loss": 1.0573, - "step": 6933 - }, - { - "epoch": 0.5333435889546958, - "learning_rate": 0.0013431592414512674, - "loss": 1.1462, - "step": 6934 - }, - { - "epoch": 0.5334205061149142, - "learning_rate": 0.0013427987692899062, - "loss": 1.4857, - "step": 6935 - }, - { - "epoch": 0.5334974232751327, - "learning_rate": 0.0013424383063076887, - "loss": 1.1281, - "step": 6936 - }, - { - "epoch": 0.5335743404353511, - "learning_rate": 0.0013420778525256626, - "loss": 1.1199, - "step": 6937 - }, - { - "epoch": 0.5336512575955695, - "learning_rate": 0.001341717407964876, - "loss": 0.9395, - "step": 6938 - }, - { - "epoch": 0.533728174755788, - "learning_rate": 0.0013413569726463735, - "loss": 1.01, - "step": 6939 - }, - { - "epoch": 0.5338050919160064, - "learning_rate": 0.001340996546591204, - "loss": 1.0332, - "step": 6940 - }, - { - "epoch": 0.5338820090762249, - "learning_rate": 0.0013406361298204114, - "loss": 1.2737, - "step": 6941 - }, - { - "epoch": 0.5339589262364434, - "learning_rate": 0.0013402757223550412, - "loss": 1.146, - "step": 6942 - }, - { - "epoch": 0.5340358433966618, - "learning_rate": 0.001339915324216138, - "loss": 0.9185, - "step": 6943 - }, - { - "epoch": 0.5341127605568803, - "learning_rate": 0.0013395549354247459, - "loss": 1.1744, - "step": 6944 - }, - { - "epoch": 0.5341896777170987, - "learning_rate": 0.0013391945560019083, - "loss": 0.9265, - "step": 6945 - }, - { - "epoch": 0.5342665948773171, - "learning_rate": 0.0013388341859686683, - "loss": 1.1616, - "step": 6946 - }, - { - "epoch": 0.5343435120375356, - "learning_rate": 0.001338473825346068, - "loss": 0.9231, - "step": 6947 - }, - { - "epoch": 0.534420429197754, - "learning_rate": 0.0013381134741551493, - "loss": 1.2317, - "step": 6948 - }, - { - "epoch": 0.5344973463579724, - "learning_rate": 0.0013377531324169538, - "loss": 1.4175, - "step": 6949 - }, - { - "epoch": 0.5345742635181909, - "learning_rate": 0.0013373928001525217, - "loss": 0.8812, - "step": 6950 - }, - { - "epoch": 0.5346511806784093, - "learning_rate": 0.001337032477382894, - "loss": 1.2232, - "step": 6951 - }, - { - "epoch": 0.5347280978386278, - "learning_rate": 0.0013366721641291089, - "loss": 1.4312, - "step": 6952 - }, - { - "epoch": 0.5348050149988463, - "learning_rate": 0.0013363118604122074, - "loss": 1.1784, - "step": 6953 - }, - { - "epoch": 0.5348819321590647, - "learning_rate": 0.0013359515662532265, - "loss": 1.0246, - "step": 6954 - }, - { - "epoch": 0.5349588493192832, - "learning_rate": 0.0013355912816732044, - "loss": 0.789, - "step": 6955 - }, - { - "epoch": 0.5350357664795016, - "learning_rate": 0.0013352310066931789, - "loss": 1.348, - "step": 6956 - }, - { - "epoch": 0.53511268363972, - "learning_rate": 0.0013348707413341867, - "loss": 1.016, - "step": 6957 - }, - { - "epoch": 0.5351896007999385, - "learning_rate": 0.0013345104856172635, - "loss": 1.295, - "step": 6958 - }, - { - "epoch": 0.5352665179601569, - "learning_rate": 0.0013341502395634462, - "loss": 1.066, - "step": 6959 - }, - { - "epoch": 0.5353434351203754, - "learning_rate": 0.0013337900031937687, - "loss": 1.0589, - "step": 6960 - }, - { - "epoch": 0.5354203522805938, - "learning_rate": 0.0013334297765292666, - "loss": 1.19, - "step": 6961 - }, - { - "epoch": 0.5354972694408122, - "learning_rate": 0.0013330695595909733, - "loss": 0.81, - "step": 6962 - }, - { - "epoch": 0.5355741866010307, - "learning_rate": 0.001332709352399922, - "loss": 1.1576, - "step": 6963 - }, - { - "epoch": 0.5356511037612491, - "learning_rate": 0.0013323491549771464, - "loss": 1.3964, - "step": 6964 - }, - { - "epoch": 0.5357280209214675, - "learning_rate": 0.0013319889673436788, - "loss": 1.2666, - "step": 6965 - }, - { - "epoch": 0.535804938081686, - "learning_rate": 0.0013316287895205497, - "loss": 1.2331, - "step": 6966 - }, - { - "epoch": 0.5358818552419045, - "learning_rate": 0.0013312686215287914, - "loss": 1.0216, - "step": 6967 - }, - { - "epoch": 0.5359587724021229, - "learning_rate": 0.001330908463389434, - "loss": 1.2304, - "step": 6968 - }, - { - "epoch": 0.5360356895623414, - "learning_rate": 0.0013305483151235076, - "loss": 1.0852, - "step": 6969 - }, - { - "epoch": 0.5361126067225598, - "learning_rate": 0.001330188176752042, - "loss": 0.8415, - "step": 6970 - }, - { - "epoch": 0.5361895238827783, - "learning_rate": 0.0013298280482960652, - "loss": 1.28, - "step": 6971 - }, - { - "epoch": 0.5362664410429967, - "learning_rate": 0.0013294679297766062, - "loss": 1.2948, - "step": 6972 - }, - { - "epoch": 0.5363433582032151, - "learning_rate": 0.0013291078212146928, - "loss": 0.992, - "step": 6973 - }, - { - "epoch": 0.5364202753634336, - "learning_rate": 0.0013287477226313514, - "loss": 0.9287, - "step": 6974 - }, - { - "epoch": 0.536497192523652, - "learning_rate": 0.0013283876340476094, - "loss": 1.3024, - "step": 6975 - }, - { - "epoch": 0.5365741096838704, - "learning_rate": 0.0013280275554844925, - "loss": 1.1904, - "step": 6976 - }, - { - "epoch": 0.5366510268440889, - "learning_rate": 0.0013276674869630248, - "loss": 1.2229, - "step": 6977 - }, - { - "epoch": 0.5367279440043073, - "learning_rate": 0.0013273074285042332, - "loss": 1.329, - "step": 6978 - }, - { - "epoch": 0.5368048611645259, - "learning_rate": 0.0013269473801291402, - "loss": 1.1713, - "step": 6979 - }, - { - "epoch": 0.5368817783247443, - "learning_rate": 0.0013265873418587702, - "loss": 1.2024, - "step": 6980 - }, - { - "epoch": 0.5369586954849627, - "learning_rate": 0.0013262273137141464, - "loss": 1.3499, - "step": 6981 - }, - { - "epoch": 0.5370356126451812, - "learning_rate": 0.00132586729571629, - "loss": 1.3022, - "step": 6982 - }, - { - "epoch": 0.5371125298053996, - "learning_rate": 0.0013255072878862245, - "loss": 1.024, - "step": 6983 - }, - { - "epoch": 0.537189446965618, - "learning_rate": 0.00132514729024497, - "loss": 1.078, - "step": 6984 - }, - { - "epoch": 0.5372663641258365, - "learning_rate": 0.0013247873028135473, - "loss": 1.61, - "step": 6985 - }, - { - "epoch": 0.5373432812860549, - "learning_rate": 0.0013244273256129767, - "loss": 1.3039, - "step": 6986 - }, - { - "epoch": 0.5374201984462733, - "learning_rate": 0.0013240673586642781, - "loss": 0.8892, - "step": 6987 - }, - { - "epoch": 0.5374971156064918, - "learning_rate": 0.0013237074019884687, - "loss": 1.1584, - "step": 6988 - }, - { - "epoch": 0.5375740327667102, - "learning_rate": 0.0013233474556065689, - "loss": 1.2637, - "step": 6989 - }, - { - "epoch": 0.5376509499269287, - "learning_rate": 0.0013229875195395941, - "loss": 0.8415, - "step": 6990 - }, - { - "epoch": 0.5377278670871471, - "learning_rate": 0.0013226275938085638, - "loss": 1.1912, - "step": 6991 - }, - { - "epoch": 0.5378047842473656, - "learning_rate": 0.0013222676784344924, - "loss": 1.0295, - "step": 6992 - }, - { - "epoch": 0.5378817014075841, - "learning_rate": 0.0013219077734383964, - "loss": 1.2195, - "step": 6993 - }, - { - "epoch": 0.5379586185678025, - "learning_rate": 0.0013215478788412916, - "loss": 1.1863, - "step": 6994 - }, - { - "epoch": 0.5380355357280209, - "learning_rate": 0.0013211879946641919, - "loss": 1.0444, - "step": 6995 - }, - { - "epoch": 0.5381124528882394, - "learning_rate": 0.0013208281209281113, - "loss": 1.185, - "step": 6996 - }, - { - "epoch": 0.5381893700484578, - "learning_rate": 0.0013204682576540637, - "loss": 1.2222, - "step": 6997 - }, - { - "epoch": 0.5382662872086763, - "learning_rate": 0.0013201084048630614, - "loss": 1.0717, - "step": 6998 - }, - { - "epoch": 0.5383432043688947, - "learning_rate": 0.001319748562576117, - "loss": 0.9739, - "step": 6999 - }, - { - "epoch": 0.5384201215291131, - "learning_rate": 0.0013193887308142424, - "loss": 1.1365, - "step": 7000 - }, - { - "epoch": 0.5384970386893316, - "learning_rate": 0.0013190289095984468, - "loss": 1.292, - "step": 7001 - }, - { - "epoch": 0.53857395584955, - "learning_rate": 0.001318669098949743, - "loss": 1.0203, - "step": 7002 - }, - { - "epoch": 0.5386508730097684, - "learning_rate": 0.001318309298889139, - "loss": 0.9898, - "step": 7003 - }, - { - "epoch": 0.538727790169987, - "learning_rate": 0.001317949509437644, - "loss": 1.2468, - "step": 7004 - }, - { - "epoch": 0.5388047073302054, - "learning_rate": 0.0013175897306162677, - "loss": 1.4557, - "step": 7005 - }, - { - "epoch": 0.5388816244904238, - "learning_rate": 0.0013172299624460166, - "loss": 0.8888, - "step": 7006 - }, - { - "epoch": 0.5389585416506423, - "learning_rate": 0.0013168702049478984, - "loss": 0.9741, - "step": 7007 - }, - { - "epoch": 0.5390354588108607, - "learning_rate": 0.00131651045814292, - "loss": 1.2969, - "step": 7008 - }, - { - "epoch": 0.5391123759710792, - "learning_rate": 0.0013161507220520868, - "loss": 1.4163, - "step": 7009 - }, - { - "epoch": 0.5391892931312976, - "learning_rate": 0.001315790996696405, - "loss": 1.3695, - "step": 7010 - }, - { - "epoch": 0.539266210291516, - "learning_rate": 0.0013154312820968792, - "loss": 1.3334, - "step": 7011 - }, - { - "epoch": 0.5393431274517345, - "learning_rate": 0.0013150715782745124, - "loss": 1.105, - "step": 7012 - }, - { - "epoch": 0.5394200446119529, - "learning_rate": 0.0013147118852503093, - "loss": 1.2386, - "step": 7013 - }, - { - "epoch": 0.5394969617721713, - "learning_rate": 0.0013143522030452726, - "loss": 1.2102, - "step": 7014 - }, - { - "epoch": 0.5395738789323898, - "learning_rate": 0.0013139925316804034, - "loss": 0.9835, - "step": 7015 - }, - { - "epoch": 0.5396507960926082, - "learning_rate": 0.001313632871176705, - "loss": 1.3669, - "step": 7016 - }, - { - "epoch": 0.5397277132528268, - "learning_rate": 0.0013132732215551768, - "loss": 1.003, - "step": 7017 - }, - { - "epoch": 0.5398046304130452, - "learning_rate": 0.00131291358283682, - "loss": 1.1371, - "step": 7018 - }, - { - "epoch": 0.5398815475732636, - "learning_rate": 0.0013125539550426343, - "loss": 1.3373, - "step": 7019 - }, - { - "epoch": 0.5399584647334821, - "learning_rate": 0.0013121943381936181, - "loss": 1.4093, - "step": 7020 - }, - { - "epoch": 0.5400353818937005, - "learning_rate": 0.0013118347323107706, - "loss": 1.2889, - "step": 7021 - }, - { - "epoch": 0.5401122990539189, - "learning_rate": 0.0013114751374150894, - "loss": 1.3281, - "step": 7022 - }, - { - "epoch": 0.5401892162141374, - "learning_rate": 0.0013111155535275706, - "loss": 1.0294, - "step": 7023 - }, - { - "epoch": 0.5402661333743558, - "learning_rate": 0.0013107559806692123, - "loss": 0.8113, - "step": 7024 - }, - { - "epoch": 0.5403430505345742, - "learning_rate": 0.0013103964188610092, - "loss": 1.1516, - "step": 7025 - }, - { - "epoch": 0.5404199676947927, - "learning_rate": 0.0013100368681239567, - "loss": 0.7669, - "step": 7026 - }, - { - "epoch": 0.5404968848550111, - "learning_rate": 0.0013096773284790495, - "loss": 0.9348, - "step": 7027 - }, - { - "epoch": 0.5405738020152296, - "learning_rate": 0.0013093177999472813, - "loss": 1.3785, - "step": 7028 - }, - { - "epoch": 0.540650719175448, - "learning_rate": 0.0013089582825496456, - "loss": 0.9523, - "step": 7029 - }, - { - "epoch": 0.5407276363356665, - "learning_rate": 0.0013085987763071351, - "loss": 1.2283, - "step": 7030 - }, - { - "epoch": 0.540804553495885, - "learning_rate": 0.001308239281240741, - "loss": 1.0576, - "step": 7031 - }, - { - "epoch": 0.5408814706561034, - "learning_rate": 0.0013078797973714558, - "loss": 1.2732, - "step": 7032 - }, - { - "epoch": 0.5409583878163218, - "learning_rate": 0.0013075203247202693, - "loss": 0.8747, - "step": 7033 - }, - { - "epoch": 0.5410353049765403, - "learning_rate": 0.001307160863308171, - "loss": 0.8084, - "step": 7034 - }, - { - "epoch": 0.5411122221367587, - "learning_rate": 0.0013068014131561517, - "loss": 1.3837, - "step": 7035 - }, - { - "epoch": 0.5411891392969772, - "learning_rate": 0.0013064419742851983, - "loss": 1.02, - "step": 7036 - }, - { - "epoch": 0.5412660564571956, - "learning_rate": 0.0013060825467163008, - "loss": 0.8834, - "step": 7037 - }, - { - "epoch": 0.541342973617414, - "learning_rate": 0.0013057231304704452, - "loss": 1.6106, - "step": 7038 - }, - { - "epoch": 0.5414198907776325, - "learning_rate": 0.001305363725568618, - "loss": 1.4662, - "step": 7039 - }, - { - "epoch": 0.5414968079378509, - "learning_rate": 0.001305004332031806, - "loss": 1.2767, - "step": 7040 - }, - { - "epoch": 0.5415737250980693, - "learning_rate": 0.0013046449498809942, - "loss": 1.1696, - "step": 7041 - }, - { - "epoch": 0.5416506422582879, - "learning_rate": 0.0013042855791371671, - "loss": 1.274, - "step": 7042 - }, - { - "epoch": 0.5417275594185063, - "learning_rate": 0.0013039262198213095, - "loss": 1.2978, - "step": 7043 - }, - { - "epoch": 0.5418044765787247, - "learning_rate": 0.0013035668719544047, - "loss": 1.1238, - "step": 7044 - }, - { - "epoch": 0.5418813937389432, - "learning_rate": 0.0013032075355574337, - "loss": 1.4077, - "step": 7045 - }, - { - "epoch": 0.5419583108991616, - "learning_rate": 0.0013028482106513808, - "loss": 1.2791, - "step": 7046 - }, - { - "epoch": 0.5420352280593801, - "learning_rate": 0.0013024888972572254, - "loss": 1.475, - "step": 7047 - }, - { - "epoch": 0.5421121452195985, - "learning_rate": 0.0013021295953959502, - "loss": 1.2005, - "step": 7048 - }, - { - "epoch": 0.5421890623798169, - "learning_rate": 0.0013017703050885337, - "loss": 1.0672, - "step": 7049 - }, - { - "epoch": 0.5422659795400354, - "learning_rate": 0.0013014110263559555, - "loss": 1.0102, - "step": 7050 - }, - { - "epoch": 0.5423428967002538, - "learning_rate": 0.0013010517592191946, - "loss": 1.1015, - "step": 7051 - }, - { - "epoch": 0.5424198138604722, - "learning_rate": 0.0013006925036992288, - "loss": 1.1986, - "step": 7052 - }, - { - "epoch": 0.5424967310206907, - "learning_rate": 0.0013003332598170352, - "loss": 1.0695, - "step": 7053 - }, - { - "epoch": 0.5425736481809091, - "learning_rate": 0.001299974027593591, - "loss": 1.3769, - "step": 7054 - }, - { - "epoch": 0.5426505653411277, - "learning_rate": 0.0012996148070498715, - "loss": 0.9897, - "step": 7055 - }, - { - "epoch": 0.5427274825013461, - "learning_rate": 0.0012992555982068526, - "loss": 1.3528, - "step": 7056 - }, - { - "epoch": 0.5428043996615645, - "learning_rate": 0.0012988964010855087, - "loss": 1.4178, - "step": 7057 - }, - { - "epoch": 0.542881316821783, - "learning_rate": 0.0012985372157068124, - "loss": 1.0812, - "step": 7058 - }, - { - "epoch": 0.5429582339820014, - "learning_rate": 0.0012981780420917393, - "loss": 0.9687, - "step": 7059 - }, - { - "epoch": 0.5430351511422198, - "learning_rate": 0.0012978188802612604, - "loss": 1.2151, - "step": 7060 - }, - { - "epoch": 0.5431120683024383, - "learning_rate": 0.0012974597302363472, - "loss": 0.9797, - "step": 7061 - }, - { - "epoch": 0.5431889854626567, - "learning_rate": 0.0012971005920379718, - "loss": 1.2642, - "step": 7062 - }, - { - "epoch": 0.5432659026228752, - "learning_rate": 0.001296741465687104, - "loss": 1.4976, - "step": 7063 - }, - { - "epoch": 0.5433428197830936, - "learning_rate": 0.0012963823512047142, - "loss": 1.1779, - "step": 7064 - }, - { - "epoch": 0.543419736943312, - "learning_rate": 0.0012960232486117712, - "loss": 1.2746, - "step": 7065 - }, - { - "epoch": 0.5434966541035305, - "learning_rate": 0.0012956641579292428, - "loss": 1.263, - "step": 7066 - }, - { - "epoch": 0.543573571263749, - "learning_rate": 0.0012953050791780977, - "loss": 1.3056, - "step": 7067 - }, - { - "epoch": 0.5436504884239673, - "learning_rate": 0.0012949460123793026, - "loss": 1.1749, - "step": 7068 - }, - { - "epoch": 0.5437274055841859, - "learning_rate": 0.0012945869575538226, - "loss": 1.3322, - "step": 7069 - }, - { - "epoch": 0.5438043227444043, - "learning_rate": 0.0012942279147226251, - "loss": 1.2806, - "step": 7070 - }, - { - "epoch": 0.5438812399046227, - "learning_rate": 0.0012938688839066739, - "loss": 1.1828, - "step": 7071 - }, - { - "epoch": 0.5439581570648412, - "learning_rate": 0.0012935098651269327, - "loss": 1.3037, - "step": 7072 - }, - { - "epoch": 0.5440350742250596, - "learning_rate": 0.0012931508584043663, - "loss": 1.0784, - "step": 7073 - }, - { - "epoch": 0.5441119913852781, - "learning_rate": 0.0012927918637599368, - "loss": 1.3263, - "step": 7074 - }, - { - "epoch": 0.5441889085454965, - "learning_rate": 0.001292432881214606, - "loss": 1.3612, - "step": 7075 - }, - { - "epoch": 0.5442658257057149, - "learning_rate": 0.001292073910789336, - "loss": 1.1196, - "step": 7076 - }, - { - "epoch": 0.5443427428659334, - "learning_rate": 0.0012917149525050868, - "loss": 0.9935, - "step": 7077 - }, - { - "epoch": 0.5444196600261518, - "learning_rate": 0.0012913560063828188, - "loss": 1.1576, - "step": 7078 - }, - { - "epoch": 0.5444965771863702, - "learning_rate": 0.0012909970724434914, - "loss": 0.9746, - "step": 7079 - }, - { - "epoch": 0.5445734943465887, - "learning_rate": 0.0012906381507080617, - "loss": 1.3635, - "step": 7080 - }, - { - "epoch": 0.5446504115068072, - "learning_rate": 0.0012902792411974896, - "loss": 1.0496, - "step": 7081 - }, - { - "epoch": 0.5447273286670257, - "learning_rate": 0.0012899203439327301, - "loss": 1.1322, - "step": 7082 - }, - { - "epoch": 0.5448042458272441, - "learning_rate": 0.0012895614589347416, - "loss": 0.9644, - "step": 7083 - }, - { - "epoch": 0.5448811629874625, - "learning_rate": 0.0012892025862244786, - "loss": 1.4189, - "step": 7084 - }, - { - "epoch": 0.544958080147681, - "learning_rate": 0.0012888437258228962, - "loss": 1.4761, - "step": 7085 - }, - { - "epoch": 0.5450349973078994, - "learning_rate": 0.0012884848777509486, - "loss": 1.3381, - "step": 7086 - }, - { - "epoch": 0.5451119144681178, - "learning_rate": 0.0012881260420295896, - "loss": 1.1425, - "step": 7087 - }, - { - "epoch": 0.5451888316283363, - "learning_rate": 0.0012877672186797715, - "loss": 1.0682, - "step": 7088 - }, - { - "epoch": 0.5452657487885547, - "learning_rate": 0.001287408407722447, - "loss": 1.4989, - "step": 7089 - }, - { - "epoch": 0.5453426659487731, - "learning_rate": 0.0012870496091785676, - "loss": 1.2307, - "step": 7090 - }, - { - "epoch": 0.5454195831089916, - "learning_rate": 0.0012866908230690823, - "loss": 1.219, - "step": 7091 - }, - { - "epoch": 0.54549650026921, - "learning_rate": 0.0012863320494149431, - "loss": 1.0517, - "step": 7092 - }, - { - "epoch": 0.5455734174294286, - "learning_rate": 0.0012859732882370973, - "loss": 0.8331, - "step": 7093 - }, - { - "epoch": 0.545650334589647, - "learning_rate": 0.0012856145395564952, - "loss": 1.5791, - "step": 7094 - }, - { - "epoch": 0.5457272517498654, - "learning_rate": 0.0012852558033940834, - "loss": 0.8782, - "step": 7095 - }, - { - "epoch": 0.5458041689100839, - "learning_rate": 0.0012848970797708086, - "loss": 1.0444, - "step": 7096 - }, - { - "epoch": 0.5458810860703023, - "learning_rate": 0.0012845383687076176, - "loss": 1.0185, - "step": 7097 - }, - { - "epoch": 0.5459580032305207, - "learning_rate": 0.0012841796702254562, - "loss": 1.4756, - "step": 7098 - }, - { - "epoch": 0.5460349203907392, - "learning_rate": 0.0012838209843452682, - "loss": 1.3353, - "step": 7099 - }, - { - "epoch": 0.5461118375509576, - "learning_rate": 0.0012834623110879986, - "loss": 1.3954, - "step": 7100 - }, - { - "epoch": 0.5461887547111761, - "learning_rate": 0.0012831036504745898, - "loss": 1.1106, - "step": 7101 - }, - { - "epoch": 0.5462656718713945, - "learning_rate": 0.0012827450025259858, - "loss": 0.9238, - "step": 7102 - }, - { - "epoch": 0.5463425890316129, - "learning_rate": 0.001282386367263127, - "loss": 1.2487, - "step": 7103 - }, - { - "epoch": 0.5464195061918314, - "learning_rate": 0.0012820277447069548, - "loss": 1.0021, - "step": 7104 - }, - { - "epoch": 0.5464964233520498, - "learning_rate": 0.0012816691348784103, - "loss": 1.3208, - "step": 7105 - }, - { - "epoch": 0.5465733405122682, - "learning_rate": 0.0012813105377984324, - "loss": 1.0015, - "step": 7106 - }, - { - "epoch": 0.5466502576724868, - "learning_rate": 0.0012809519534879598, - "loss": 0.9199, - "step": 7107 - }, - { - "epoch": 0.5467271748327052, - "learning_rate": 0.0012805933819679313, - "loss": 0.8717, - "step": 7108 - }, - { - "epoch": 0.5468040919929236, - "learning_rate": 0.0012802348232592835, - "loss": 0.9197, - "step": 7109 - }, - { - "epoch": 0.5468810091531421, - "learning_rate": 0.0012798762773829535, - "loss": 0.992, - "step": 7110 - }, - { - "epoch": 0.5469579263133605, - "learning_rate": 0.001279517744359877, - "loss": 1.1453, - "step": 7111 - }, - { - "epoch": 0.547034843473579, - "learning_rate": 0.0012791592242109894, - "loss": 0.9082, - "step": 7112 - }, - { - "epoch": 0.5471117606337974, - "learning_rate": 0.0012788007169572248, - "loss": 1.2377, - "step": 7113 - }, - { - "epoch": 0.5471886777940158, - "learning_rate": 0.001278442222619517, - "loss": 0.9316, - "step": 7114 - }, - { - "epoch": 0.5472655949542343, - "learning_rate": 0.0012780837412187983, - "loss": 1.4862, - "step": 7115 - }, - { - "epoch": 0.5473425121144527, - "learning_rate": 0.0012777252727760016, - "loss": 1.0822, - "step": 7116 - }, - { - "epoch": 0.5474194292746711, - "learning_rate": 0.0012773668173120581, - "loss": 1.1401, - "step": 7117 - }, - { - "epoch": 0.5474963464348896, - "learning_rate": 0.0012770083748478978, - "loss": 1.0458, - "step": 7118 - }, - { - "epoch": 0.547573263595108, - "learning_rate": 0.0012766499454044508, - "loss": 1.1473, - "step": 7119 - }, - { - "epoch": 0.5476501807553266, - "learning_rate": 0.0012762915290026463, - "loss": 1.023, - "step": 7120 - }, - { - "epoch": 0.547727097915545, - "learning_rate": 0.0012759331256634128, - "loss": 1.2007, - "step": 7121 - }, - { - "epoch": 0.5478040150757634, - "learning_rate": 0.0012755747354076775, - "loss": 1.1641, - "step": 7122 - }, - { - "epoch": 0.5478809322359819, - "learning_rate": 0.0012752163582563669, - "loss": 1.3878, - "step": 7123 - }, - { - "epoch": 0.5479578493962003, - "learning_rate": 0.0012748579942304078, - "loss": 1.1641, - "step": 7124 - }, - { - "epoch": 0.5480347665564187, - "learning_rate": 0.0012744996433507252, - "loss": 1.1184, - "step": 7125 - }, - { - "epoch": 0.5481116837166372, - "learning_rate": 0.0012741413056382431, - "loss": 1.3532, - "step": 7126 - }, - { - "epoch": 0.5481886008768556, - "learning_rate": 0.0012737829811138857, - "loss": 0.9796, - "step": 7127 - }, - { - "epoch": 0.548265518037074, - "learning_rate": 0.0012734246697985762, - "loss": 1.048, - "step": 7128 - }, - { - "epoch": 0.5483424351972925, - "learning_rate": 0.0012730663717132355, - "loss": 1.1809, - "step": 7129 - }, - { - "epoch": 0.5484193523575109, - "learning_rate": 0.0012727080868787867, - "loss": 1.5304, - "step": 7130 - }, - { - "epoch": 0.5484962695177295, - "learning_rate": 0.0012723498153161487, - "loss": 0.6619, - "step": 7131 - }, - { - "epoch": 0.5485731866779479, - "learning_rate": 0.001271991557046243, - "loss": 1.0458, - "step": 7132 - }, - { - "epoch": 0.5486501038381663, - "learning_rate": 0.0012716333120899876, - "loss": 1.1986, - "step": 7133 - }, - { - "epoch": 0.5487270209983848, - "learning_rate": 0.0012712750804683012, - "loss": 1.0602, - "step": 7134 - }, - { - "epoch": 0.5488039381586032, - "learning_rate": 0.0012709168622021011, - "loss": 0.8825, - "step": 7135 - }, - { - "epoch": 0.5488808553188216, - "learning_rate": 0.0012705586573123044, - "loss": 1.0024, - "step": 7136 - }, - { - "epoch": 0.5489577724790401, - "learning_rate": 0.0012702004658198263, - "loss": 1.1447, - "step": 7137 - }, - { - "epoch": 0.5490346896392585, - "learning_rate": 0.001269842287745583, - "loss": 1.1722, - "step": 7138 - }, - { - "epoch": 0.549111606799477, - "learning_rate": 0.001269484123110488, - "loss": 1.1509, - "step": 7139 - }, - { - "epoch": 0.5491885239596954, - "learning_rate": 0.0012691259719354554, - "loss": 1.1132, - "step": 7140 - }, - { - "epoch": 0.5492654411199138, - "learning_rate": 0.0012687678342413985, - "loss": 1.3512, - "step": 7141 - }, - { - "epoch": 0.5493423582801323, - "learning_rate": 0.0012684097100492278, - "loss": 1.3356, - "step": 7142 - }, - { - "epoch": 0.5494192754403507, - "learning_rate": 0.0012680515993798564, - "loss": 1.1692, - "step": 7143 - }, - { - "epoch": 0.5494961926005691, - "learning_rate": 0.0012676935022541934, - "loss": 1.1104, - "step": 7144 - }, - { - "epoch": 0.5495731097607877, - "learning_rate": 0.0012673354186931488, - "loss": 1.212, - "step": 7145 - }, - { - "epoch": 0.5496500269210061, - "learning_rate": 0.0012669773487176317, - "loss": 1.2503, - "step": 7146 - }, - { - "epoch": 0.5497269440812245, - "learning_rate": 0.0012666192923485502, - "loss": 1.2845, - "step": 7147 - }, - { - "epoch": 0.549803861241443, - "learning_rate": 0.0012662612496068108, - "loss": 1.7123, - "step": 7148 - }, - { - "epoch": 0.5498807784016614, - "learning_rate": 0.0012659032205133212, - "loss": 1.2454, - "step": 7149 - }, - { - "epoch": 0.5499576955618799, - "learning_rate": 0.0012655452050889862, - "loss": 1.0506, - "step": 7150 - }, - { - "epoch": 0.5500346127220983, - "learning_rate": 0.0012651872033547114, - "loss": 1.3297, - "step": 7151 - }, - { - "epoch": 0.5501115298823167, - "learning_rate": 0.0012648292153314008, - "loss": 1.1001, - "step": 7152 - }, - { - "epoch": 0.5501884470425352, - "learning_rate": 0.0012644712410399565, - "loss": 1.0706, - "step": 7153 - }, - { - "epoch": 0.5502653642027536, - "learning_rate": 0.0012641132805012827, - "loss": 1.1781, - "step": 7154 - }, - { - "epoch": 0.550342281362972, - "learning_rate": 0.00126375533373628, - "loss": 1.3003, - "step": 7155 - }, - { - "epoch": 0.5504191985231905, - "learning_rate": 0.0012633974007658494, - "loss": 1.2581, - "step": 7156 - }, - { - "epoch": 0.550496115683409, - "learning_rate": 0.0012630394816108912, - "loss": 1.2777, - "step": 7157 - }, - { - "epoch": 0.5505730328436275, - "learning_rate": 0.0012626815762923045, - "loss": 0.9019, - "step": 7158 - }, - { - "epoch": 0.5506499500038459, - "learning_rate": 0.0012623236848309882, - "loss": 1.0706, - "step": 7159 - }, - { - "epoch": 0.5507268671640643, - "learning_rate": 0.0012619658072478396, - "loss": 1.2874, - "step": 7160 - }, - { - "epoch": 0.5508037843242828, - "learning_rate": 0.0012616079435637554, - "loss": 1.0621, - "step": 7161 - }, - { - "epoch": 0.5508807014845012, - "learning_rate": 0.0012612500937996321, - "loss": 1.3226, - "step": 7162 - }, - { - "epoch": 0.5509576186447196, - "learning_rate": 0.0012608922579763649, - "loss": 1.4676, - "step": 7163 - }, - { - "epoch": 0.5510345358049381, - "learning_rate": 0.0012605344361148467, - "loss": 1.2182, - "step": 7164 - }, - { - "epoch": 0.5511114529651565, - "learning_rate": 0.0012601766282359737, - "loss": 1.0978, - "step": 7165 - }, - { - "epoch": 0.5511883701253749, - "learning_rate": 0.001259818834360637, - "loss": 1.3387, - "step": 7166 - }, - { - "epoch": 0.5512652872855934, - "learning_rate": 0.0012594610545097282, - "loss": 0.8153, - "step": 7167 - }, - { - "epoch": 0.5513422044458118, - "learning_rate": 0.0012591032887041397, - "loss": 1.6278, - "step": 7168 - }, - { - "epoch": 0.5514191216060303, - "learning_rate": 0.001258745536964761, - "loss": 1.0743, - "step": 7169 - }, - { - "epoch": 0.5514960387662488, - "learning_rate": 0.001258387799312482, - "loss": 1.4928, - "step": 7170 - }, - { - "epoch": 0.5515729559264672, - "learning_rate": 0.0012580300757681912, - "loss": 0.801, - "step": 7171 - }, - { - "epoch": 0.5516498730866857, - "learning_rate": 0.001257672366352776, - "loss": 1.5612, - "step": 7172 - }, - { - "epoch": 0.5517267902469041, - "learning_rate": 0.0012573146710871245, - "loss": 0.7027, - "step": 7173 - }, - { - "epoch": 0.5518037074071225, - "learning_rate": 0.0012569569899921224, - "loss": 1.6275, - "step": 7174 - }, - { - "epoch": 0.551880624567341, - "learning_rate": 0.001256599323088654, - "loss": 1.2391, - "step": 7175 - }, - { - "epoch": 0.5519575417275594, - "learning_rate": 0.0012562416703976059, - "loss": 1.2503, - "step": 7176 - }, - { - "epoch": 0.5520344588877779, - "learning_rate": 0.0012558840319398595, - "loss": 0.9731, - "step": 7177 - }, - { - "epoch": 0.5521113760479963, - "learning_rate": 0.0012555264077363, - "loss": 0.9332, - "step": 7178 - }, - { - "epoch": 0.5521882932082147, - "learning_rate": 0.001255168797807808, - "loss": 0.7701, - "step": 7179 - }, - { - "epoch": 0.5522652103684332, - "learning_rate": 0.0012548112021752648, - "loss": 1.1712, - "step": 7180 - }, - { - "epoch": 0.5523421275286516, - "learning_rate": 0.0012544536208595513, - "loss": 1.1259, - "step": 7181 - }, - { - "epoch": 0.55241904468887, - "learning_rate": 0.0012540960538815467, - "loss": 1.2825, - "step": 7182 - }, - { - "epoch": 0.5524959618490886, - "learning_rate": 0.0012537385012621294, - "loss": 1.1437, - "step": 7183 - }, - { - "epoch": 0.552572879009307, - "learning_rate": 0.0012533809630221782, - "loss": 1.2397, - "step": 7184 - }, - { - "epoch": 0.5526497961695255, - "learning_rate": 0.001253023439182569, - "loss": 0.9709, - "step": 7185 - }, - { - "epoch": 0.5527267133297439, - "learning_rate": 0.0012526659297641793, - "loss": 1.7203, - "step": 7186 - }, - { - "epoch": 0.5528036304899623, - "learning_rate": 0.0012523084347878838, - "loss": 0.988, - "step": 7187 - }, - { - "epoch": 0.5528805476501808, - "learning_rate": 0.001251950954274556, - "loss": 0.9961, - "step": 7188 - }, - { - "epoch": 0.5529574648103992, - "learning_rate": 0.0012515934882450714, - "loss": 1.0273, - "step": 7189 - }, - { - "epoch": 0.5530343819706176, - "learning_rate": 0.0012512360367203017, - "loss": 1.0816, - "step": 7190 - }, - { - "epoch": 0.5531112991308361, - "learning_rate": 0.0012508785997211184, - "loss": 1.0205, - "step": 7191 - }, - { - "epoch": 0.5531882162910545, - "learning_rate": 0.001250521177268394, - "loss": 1.035, - "step": 7192 - }, - { - "epoch": 0.5532651334512729, - "learning_rate": 0.001250163769382998, - "loss": 0.9753, - "step": 7193 - }, - { - "epoch": 0.5533420506114914, - "learning_rate": 0.0012498063760857994, - "loss": 1.0268, - "step": 7194 - }, - { - "epoch": 0.5534189677717098, - "learning_rate": 0.0012494489973976677, - "loss": 0.8882, - "step": 7195 - }, - { - "epoch": 0.5534958849319284, - "learning_rate": 0.0012490916333394698, - "loss": 1.1814, - "step": 7196 - }, - { - "epoch": 0.5535728020921468, - "learning_rate": 0.0012487342839320734, - "loss": 1.0913, - "step": 7197 - }, - { - "epoch": 0.5536497192523652, - "learning_rate": 0.0012483769491963442, - "loss": 1.0421, - "step": 7198 - }, - { - "epoch": 0.5537266364125837, - "learning_rate": 0.001248019629153146, - "loss": 1.2698, - "step": 7199 - }, - { - "epoch": 0.5538035535728021, - "learning_rate": 0.0012476623238233454, - "loss": 1.0751, - "step": 7200 - }, - { - "epoch": 0.5538804707330205, - "learning_rate": 0.0012473050332278044, - "loss": 0.8832, - "step": 7201 - }, - { - "epoch": 0.553957387893239, - "learning_rate": 0.0012469477573873856, - "loss": 1.2194, - "step": 7202 - }, - { - "epoch": 0.5540343050534574, - "learning_rate": 0.0012465904963229514, - "loss": 1.1259, - "step": 7203 - }, - { - "epoch": 0.5541112222136759, - "learning_rate": 0.0012462332500553616, - "loss": 1.3259, - "step": 7204 - }, - { - "epoch": 0.5541881393738943, - "learning_rate": 0.0012458760186054771, - "loss": 1.3579, - "step": 7205 - }, - { - "epoch": 0.5542650565341127, - "learning_rate": 0.001245518801994157, - "loss": 1.0214, - "step": 7206 - }, - { - "epoch": 0.5543419736943312, - "learning_rate": 0.001245161600242259, - "loss": 1.3077, - "step": 7207 - }, - { - "epoch": 0.5544188908545497, - "learning_rate": 0.0012448044133706408, - "loss": 1.1951, - "step": 7208 - }, - { - "epoch": 0.554495808014768, - "learning_rate": 0.001244447241400159, - "loss": 1.2022, - "step": 7209 - }, - { - "epoch": 0.5545727251749866, - "learning_rate": 0.0012440900843516684, - "loss": 1.1184, - "step": 7210 - }, - { - "epoch": 0.554649642335205, - "learning_rate": 0.0012437329422460256, - "loss": 1.0422, - "step": 7211 - }, - { - "epoch": 0.5547265594954234, - "learning_rate": 0.0012433758151040829, - "loss": 1.0733, - "step": 7212 - }, - { - "epoch": 0.5548034766556419, - "learning_rate": 0.0012430187029466933, - "loss": 1.2094, - "step": 7213 - }, - { - "epoch": 0.5548803938158603, - "learning_rate": 0.0012426616057947101, - "loss": 1.262, - "step": 7214 - }, - { - "epoch": 0.5549573109760788, - "learning_rate": 0.0012423045236689835, - "loss": 0.8998, - "step": 7215 - }, - { - "epoch": 0.5550342281362972, - "learning_rate": 0.0012419474565903647, - "loss": 1.2346, - "step": 7216 - }, - { - "epoch": 0.5551111452965156, - "learning_rate": 0.0012415904045797027, - "loss": 0.9243, - "step": 7217 - }, - { - "epoch": 0.5551880624567341, - "learning_rate": 0.001241233367657846, - "loss": 1.2205, - "step": 7218 - }, - { - "epoch": 0.5552649796169525, - "learning_rate": 0.001240876345845643, - "loss": 0.9868, - "step": 7219 - }, - { - "epoch": 0.5553418967771709, - "learning_rate": 0.0012405193391639405, - "loss": 0.7864, - "step": 7220 - }, - { - "epoch": 0.5554188139373895, - "learning_rate": 0.0012401623476335833, - "loss": 1.3593, - "step": 7221 - }, - { - "epoch": 0.5554957310976079, - "learning_rate": 0.0012398053712754183, - "loss": 1.4042, - "step": 7222 - }, - { - "epoch": 0.5555726482578264, - "learning_rate": 0.001239448410110288, - "loss": 1.0608, - "step": 7223 - }, - { - "epoch": 0.5556495654180448, - "learning_rate": 0.0012390914641590375, - "loss": 1.2384, - "step": 7224 - }, - { - "epoch": 0.5557264825782632, - "learning_rate": 0.0012387345334425078, - "loss": 1.219, - "step": 7225 - }, - { - "epoch": 0.5558033997384817, - "learning_rate": 0.0012383776179815409, - "loss": 1.5466, - "step": 7226 - }, - { - "epoch": 0.5558803168987001, - "learning_rate": 0.0012380207177969777, - "loss": 1.2514, - "step": 7227 - }, - { - "epoch": 0.5559572340589185, - "learning_rate": 0.001237663832909658, - "loss": 1.0605, - "step": 7228 - }, - { - "epoch": 0.556034151219137, - "learning_rate": 0.0012373069633404199, - "loss": 1.1861, - "step": 7229 - }, - { - "epoch": 0.5561110683793554, - "learning_rate": 0.0012369501091101026, - "loss": 1.3284, - "step": 7230 - }, - { - "epoch": 0.5561879855395738, - "learning_rate": 0.0012365932702395428, - "loss": 1.1109, - "step": 7231 - }, - { - "epoch": 0.5562649026997923, - "learning_rate": 0.0012362364467495754, - "loss": 1.0749, - "step": 7232 - }, - { - "epoch": 0.5563418198600107, - "learning_rate": 0.001235879638661038, - "loss": 1.0757, - "step": 7233 - }, - { - "epoch": 0.5564187370202293, - "learning_rate": 0.0012355228459947625, - "loss": 1.2782, - "step": 7234 - }, - { - "epoch": 0.5564956541804477, - "learning_rate": 0.0012351660687715846, - "loss": 0.998, - "step": 7235 - }, - { - "epoch": 0.5565725713406661, - "learning_rate": 0.0012348093070123358, - "loss": 1.2242, - "step": 7236 - }, - { - "epoch": 0.5566494885008846, - "learning_rate": 0.0012344525607378475, - "loss": 0.8711, - "step": 7237 - }, - { - "epoch": 0.556726405661103, - "learning_rate": 0.0012340958299689514, - "loss": 1.0938, - "step": 7238 - }, - { - "epoch": 0.5568033228213214, - "learning_rate": 0.0012337391147264768, - "loss": 1.3598, - "step": 7239 - }, - { - "epoch": 0.5568802399815399, - "learning_rate": 0.0012333824150312525, - "loss": 1.0687, - "step": 7240 - }, - { - "epoch": 0.5569571571417583, - "learning_rate": 0.001233025730904107, - "loss": 0.8455, - "step": 7241 - }, - { - "epoch": 0.5570340743019768, - "learning_rate": 0.001232669062365867, - "loss": 1.1195, - "step": 7242 - }, - { - "epoch": 0.5571109914621952, - "learning_rate": 0.0012323124094373598, - "loss": 1.5632, - "step": 7243 - }, - { - "epoch": 0.5571879086224136, - "learning_rate": 0.0012319557721394098, - "loss": 1.1215, - "step": 7244 - }, - { - "epoch": 0.5572648257826321, - "learning_rate": 0.001231599150492841, - "loss": 1.6228, - "step": 7245 - }, - { - "epoch": 0.5573417429428505, - "learning_rate": 0.0012312425445184783, - "loss": 1.1486, - "step": 7246 - }, - { - "epoch": 0.557418660103069, - "learning_rate": 0.0012308859542371432, - "loss": 1.2171, - "step": 7247 - }, - { - "epoch": 0.5574955772632875, - "learning_rate": 0.0012305293796696574, - "loss": 1.3485, - "step": 7248 - }, - { - "epoch": 0.5575724944235059, - "learning_rate": 0.0012301728208368424, - "loss": 1.272, - "step": 7249 - }, - { - "epoch": 0.5576494115837243, - "learning_rate": 0.0012298162777595175, - "loss": 1.0851, - "step": 7250 - }, - { - "epoch": 0.5577263287439428, - "learning_rate": 0.0012294597504585014, - "loss": 1.2688, - "step": 7251 - }, - { - "epoch": 0.5578032459041612, - "learning_rate": 0.0012291032389546127, - "loss": 1.1037, - "step": 7252 - }, - { - "epoch": 0.5578801630643797, - "learning_rate": 0.001228746743268668, - "loss": 1.1284, - "step": 7253 - }, - { - "epoch": 0.5579570802245981, - "learning_rate": 0.0012283902634214838, - "loss": 1.0962, - "step": 7254 - }, - { - "epoch": 0.5580339973848165, - "learning_rate": 0.0012280337994338751, - "loss": 1.0351, - "step": 7255 - }, - { - "epoch": 0.558110914545035, - "learning_rate": 0.0012276773513266563, - "loss": 1.6946, - "step": 7256 - }, - { - "epoch": 0.5581878317052534, - "learning_rate": 0.0012273209191206414, - "loss": 1.1502, - "step": 7257 - }, - { - "epoch": 0.5582647488654718, - "learning_rate": 0.0012269645028366416, - "loss": 1.1958, - "step": 7258 - }, - { - "epoch": 0.5583416660256904, - "learning_rate": 0.0012266081024954688, - "loss": 1.3933, - "step": 7259 - }, - { - "epoch": 0.5584185831859088, - "learning_rate": 0.0012262517181179343, - "loss": 1.235, - "step": 7260 - }, - { - "epoch": 0.5584955003461273, - "learning_rate": 0.0012258953497248468, - "loss": 1.2645, - "step": 7261 - }, - { - "epoch": 0.5585724175063457, - "learning_rate": 0.001225538997337016, - "loss": 1.2166, - "step": 7262 - }, - { - "epoch": 0.5586493346665641, - "learning_rate": 0.001225182660975249, - "loss": 1.1861, - "step": 7263 - }, - { - "epoch": 0.5587262518267826, - "learning_rate": 0.0012248263406603524, - "loss": 1.0994, - "step": 7264 - }, - { - "epoch": 0.558803168987001, - "learning_rate": 0.001224470036413133, - "loss": 0.7398, - "step": 7265 - }, - { - "epoch": 0.5588800861472194, - "learning_rate": 0.0012241137482543953, - "loss": 1.2622, - "step": 7266 - }, - { - "epoch": 0.5589570033074379, - "learning_rate": 0.0012237574762049431, - "loss": 0.9521, - "step": 7267 - }, - { - "epoch": 0.5590339204676563, - "learning_rate": 0.0012234012202855802, - "loss": 0.9145, - "step": 7268 - }, - { - "epoch": 0.5591108376278747, - "learning_rate": 0.0012230449805171084, - "loss": 0.8893, - "step": 7269 - }, - { - "epoch": 0.5591877547880932, - "learning_rate": 0.0012226887569203283, - "loss": 1.2631, - "step": 7270 - }, - { - "epoch": 0.5592646719483116, - "learning_rate": 0.001222332549516041, - "loss": 1.0852, - "step": 7271 - }, - { - "epoch": 0.5593415891085302, - "learning_rate": 0.001221976358325045, - "loss": 1.3567, - "step": 7272 - }, - { - "epoch": 0.5594185062687486, - "learning_rate": 0.0012216201833681398, - "loss": 1.3905, - "step": 7273 - }, - { - "epoch": 0.559495423428967, - "learning_rate": 0.0012212640246661221, - "loss": 0.9227, - "step": 7274 - }, - { - "epoch": 0.5595723405891855, - "learning_rate": 0.0012209078822397882, - "loss": 1.4219, - "step": 7275 - }, - { - "epoch": 0.5596492577494039, - "learning_rate": 0.001220551756109934, - "loss": 0.6822, - "step": 7276 - }, - { - "epoch": 0.5597261749096223, - "learning_rate": 0.0012201956462973545, - "loss": 1.2539, - "step": 7277 - }, - { - "epoch": 0.5598030920698408, - "learning_rate": 0.001219839552822842, - "loss": 1.2788, - "step": 7278 - }, - { - "epoch": 0.5598800092300592, - "learning_rate": 0.0012194834757071906, - "loss": 1.1444, - "step": 7279 - }, - { - "epoch": 0.5599569263902777, - "learning_rate": 0.001219127414971191, - "loss": 1.0939, - "step": 7280 - }, - { - "epoch": 0.5600338435504961, - "learning_rate": 0.0012187713706356346, - "loss": 1.2404, - "step": 7281 - }, - { - "epoch": 0.5601107607107145, - "learning_rate": 0.0012184153427213112, - "loss": 1.2991, - "step": 7282 - }, - { - "epoch": 0.560187677870933, - "learning_rate": 0.0012180593312490088, - "loss": 1.1995, - "step": 7283 - }, - { - "epoch": 0.5602645950311514, - "learning_rate": 0.0012177033362395166, - "loss": 1.3429, - "step": 7284 - }, - { - "epoch": 0.5603415121913699, - "learning_rate": 0.0012173473577136206, - "loss": 1.1776, - "step": 7285 - }, - { - "epoch": 0.5604184293515884, - "learning_rate": 0.0012169913956921064, - "loss": 1.239, - "step": 7286 - }, - { - "epoch": 0.5604953465118068, - "learning_rate": 0.00121663545019576, - "loss": 1.0809, - "step": 7287 - }, - { - "epoch": 0.5605722636720253, - "learning_rate": 0.0012162795212453648, - "loss": 1.4057, - "step": 7288 - }, - { - "epoch": 0.5606491808322437, - "learning_rate": 0.0012159236088617042, - "loss": 1.3556, - "step": 7289 - }, - { - "epoch": 0.5607260979924621, - "learning_rate": 0.00121556771306556, - "loss": 0.7773, - "step": 7290 - }, - { - "epoch": 0.5608030151526806, - "learning_rate": 0.0012152118338777133, - "loss": 1.2532, - "step": 7291 - }, - { - "epoch": 0.560879932312899, - "learning_rate": 0.0012148559713189447, - "loss": 1.1577, - "step": 7292 - }, - { - "epoch": 0.5609568494731174, - "learning_rate": 0.0012145001254100336, - "loss": 1.322, - "step": 7293 - }, - { - "epoch": 0.5610337666333359, - "learning_rate": 0.0012141442961717567, - "loss": 1.1817, - "step": 7294 - }, - { - "epoch": 0.5611106837935543, - "learning_rate": 0.001213788483624893, - "loss": 1.1667, - "step": 7295 - }, - { - "epoch": 0.5611876009537727, - "learning_rate": 0.001213432687790218, - "loss": 1.346, - "step": 7296 - }, - { - "epoch": 0.5612645181139913, - "learning_rate": 0.0012130769086885064, - "loss": 1.2367, - "step": 7297 - }, - { - "epoch": 0.5613414352742097, - "learning_rate": 0.0012127211463405336, - "loss": 0.867, - "step": 7298 - }, - { - "epoch": 0.5614183524344282, - "learning_rate": 0.0012123654007670723, - "loss": 1.1833, - "step": 7299 - }, - { - "epoch": 0.5614952695946466, - "learning_rate": 0.001212009671988895, - "loss": 1.3038, - "step": 7300 - }, - { - "epoch": 0.561572186754865, - "learning_rate": 0.0012116539600267734, - "loss": 1.0554, - "step": 7301 - }, - { - "epoch": 0.5616491039150835, - "learning_rate": 0.001211298264901477, - "loss": 1.0425, - "step": 7302 - }, - { - "epoch": 0.5617260210753019, - "learning_rate": 0.0012109425866337763, - "loss": 1.0325, - "step": 7303 - }, - { - "epoch": 0.5618029382355203, - "learning_rate": 0.0012105869252444393, - "loss": 0.7702, - "step": 7304 - }, - { - "epoch": 0.5618798553957388, - "learning_rate": 0.0012102312807542326, - "loss": 0.9658, - "step": 7305 - }, - { - "epoch": 0.5619567725559572, - "learning_rate": 0.0012098756531839245, - "loss": 1.1969, - "step": 7306 - }, - { - "epoch": 0.5620336897161757, - "learning_rate": 0.0012095200425542782, - "loss": 1.1286, - "step": 7307 - }, - { - "epoch": 0.5621106068763941, - "learning_rate": 0.0012091644488860603, - "loss": 0.6555, - "step": 7308 - }, - { - "epoch": 0.5621875240366125, - "learning_rate": 0.001208808872200033, - "loss": 0.943, - "step": 7309 - }, - { - "epoch": 0.562264441196831, - "learning_rate": 0.0012084533125169589, - "loss": 1.2899, - "step": 7310 - }, - { - "epoch": 0.5623413583570495, - "learning_rate": 0.0012080977698576, - "loss": 1.1096, - "step": 7311 - }, - { - "epoch": 0.5624182755172679, - "learning_rate": 0.0012077422442427165, - "loss": 1.2236, - "step": 7312 - }, - { - "epoch": 0.5624951926774864, - "learning_rate": 0.0012073867356930676, - "loss": 0.8613, - "step": 7313 - }, - { - "epoch": 0.5625721098377048, - "learning_rate": 0.0012070312442294124, - "loss": 1.2096, - "step": 7314 - }, - { - "epoch": 0.5626490269979232, - "learning_rate": 0.0012066757698725088, - "loss": 1.2344, - "step": 7315 - }, - { - "epoch": 0.5627259441581417, - "learning_rate": 0.0012063203126431116, - "loss": 1.5587, - "step": 7316 - }, - { - "epoch": 0.5628028613183601, - "learning_rate": 0.0012059648725619783, - "loss": 1.2057, - "step": 7317 - }, - { - "epoch": 0.5628797784785786, - "learning_rate": 0.0012056094496498615, - "loss": 1.0798, - "step": 7318 - }, - { - "epoch": 0.562956695638797, - "learning_rate": 0.0012052540439275168, - "loss": 1.092, - "step": 7319 - }, - { - "epoch": 0.5630336127990154, - "learning_rate": 0.0012048986554156953, - "loss": 0.8852, - "step": 7320 - }, - { - "epoch": 0.5631105299592339, - "learning_rate": 0.0012045432841351488, - "loss": 1.3227, - "step": 7321 - }, - { - "epoch": 0.5631874471194523, - "learning_rate": 0.0012041879301066278, - "loss": 1.0863, - "step": 7322 - }, - { - "epoch": 0.5632643642796707, - "learning_rate": 0.0012038325933508818, - "loss": 1.3352, - "step": 7323 - }, - { - "epoch": 0.5633412814398893, - "learning_rate": 0.0012034772738886593, - "loss": 0.8323, - "step": 7324 - }, - { - "epoch": 0.5634181986001077, - "learning_rate": 0.001203121971740708, - "loss": 1.1595, - "step": 7325 - }, - { - "epoch": 0.5634951157603262, - "learning_rate": 0.001202766686927774, - "loss": 0.9934, - "step": 7326 - }, - { - "epoch": 0.5635720329205446, - "learning_rate": 0.001202411419470603, - "loss": 1.0507, - "step": 7327 - }, - { - "epoch": 0.563648950080763, - "learning_rate": 0.0012020561693899397, - "loss": 1.1643, - "step": 7328 - }, - { - "epoch": 0.5637258672409815, - "learning_rate": 0.0012017009367065264, - "loss": 1.1667, - "step": 7329 - }, - { - "epoch": 0.5638027844011999, - "learning_rate": 0.001201345721441107, - "loss": 0.7244, - "step": 7330 - }, - { - "epoch": 0.5638797015614183, - "learning_rate": 0.0012009905236144223, - "loss": 1.1365, - "step": 7331 - }, - { - "epoch": 0.5639566187216368, - "learning_rate": 0.001200635343247212, - "loss": 0.9453, - "step": 7332 - }, - { - "epoch": 0.5640335358818552, - "learning_rate": 0.001200280180360216, - "loss": 0.9196, - "step": 7333 - }, - { - "epoch": 0.5641104530420736, - "learning_rate": 0.001199925034974173, - "loss": 1.1879, - "step": 7334 - }, - { - "epoch": 0.5641873702022921, - "learning_rate": 0.0011995699071098195, - "loss": 1.3436, - "step": 7335 - }, - { - "epoch": 0.5642642873625106, - "learning_rate": 0.0011992147967878925, - "loss": 1.1447, - "step": 7336 - }, - { - "epoch": 0.5643412045227291, - "learning_rate": 0.0011988597040291267, - "loss": 1.1356, - "step": 7337 - }, - { - "epoch": 0.5644181216829475, - "learning_rate": 0.001198504628854257, - "loss": 0.844, - "step": 7338 - }, - { - "epoch": 0.5644950388431659, - "learning_rate": 0.0011981495712840165, - "loss": 1.4253, - "step": 7339 - }, - { - "epoch": 0.5645719560033844, - "learning_rate": 0.0011977945313391362, - "loss": 0.9571, - "step": 7340 - }, - { - "epoch": 0.5646488731636028, - "learning_rate": 0.001197439509040349, - "loss": 1.2405, - "step": 7341 - }, - { - "epoch": 0.5647257903238212, - "learning_rate": 0.0011970845044083842, - "loss": 1.1636, - "step": 7342 - }, - { - "epoch": 0.5648027074840397, - "learning_rate": 0.0011967295174639702, - "loss": 1.3601, - "step": 7343 - }, - { - "epoch": 0.5648796246442581, - "learning_rate": 0.0011963745482278362, - "loss": 1.2052, - "step": 7344 - }, - { - "epoch": 0.5649565418044766, - "learning_rate": 0.0011960195967207084, - "loss": 1.1753, - "step": 7345 - }, - { - "epoch": 0.565033458964695, - "learning_rate": 0.0011956646629633135, - "loss": 0.7865, - "step": 7346 - }, - { - "epoch": 0.5651103761249134, - "learning_rate": 0.001195309746976376, - "loss": 1.497, - "step": 7347 - }, - { - "epoch": 0.565187293285132, - "learning_rate": 0.0011949548487806196, - "loss": 1.139, - "step": 7348 - }, - { - "epoch": 0.5652642104453504, - "learning_rate": 0.001194599968396768, - "loss": 1.2221, - "step": 7349 - }, - { - "epoch": 0.5653411276055688, - "learning_rate": 0.0011942451058455426, - "loss": 1.0699, - "step": 7350 - }, - { - "epoch": 0.5654180447657873, - "learning_rate": 0.0011938902611476634, - "loss": 1.2305, - "step": 7351 - }, - { - "epoch": 0.5654949619260057, - "learning_rate": 0.0011935354343238515, - "loss": 1.4869, - "step": 7352 - }, - { - "epoch": 0.5655718790862241, - "learning_rate": 0.001193180625394825, - "loss": 1.2495, - "step": 7353 - }, - { - "epoch": 0.5656487962464426, - "learning_rate": 0.001192825834381301, - "loss": 1.107, - "step": 7354 - }, - { - "epoch": 0.565725713406661, - "learning_rate": 0.0011924710613039973, - "loss": 1.0977, - "step": 7355 - }, - { - "epoch": 0.5658026305668795, - "learning_rate": 0.0011921163061836284, - "loss": 0.936, - "step": 7356 - }, - { - "epoch": 0.5658795477270979, - "learning_rate": 0.0011917615690409092, - "loss": 1.2081, - "step": 7357 - }, - { - "epoch": 0.5659564648873163, - "learning_rate": 0.0011914068498965536, - "loss": 0.9764, - "step": 7358 - }, - { - "epoch": 0.5660333820475348, - "learning_rate": 0.0011910521487712732, - "loss": 1.202, - "step": 7359 - }, - { - "epoch": 0.5661102992077532, - "learning_rate": 0.0011906974656857802, - "loss": 1.345, - "step": 7360 - }, - { - "epoch": 0.5661872163679716, - "learning_rate": 0.0011903428006607847, - "loss": 0.8083, - "step": 7361 - }, - { - "epoch": 0.5662641335281902, - "learning_rate": 0.001189988153716995, - "loss": 1.1411, - "step": 7362 - }, - { - "epoch": 0.5663410506884086, - "learning_rate": 0.0011896335248751209, - "loss": 1.2688, - "step": 7363 - }, - { - "epoch": 0.5664179678486271, - "learning_rate": 0.001189278914155868, - "loss": 1.2281, - "step": 7364 - }, - { - "epoch": 0.5664948850088455, - "learning_rate": 0.0011889243215799437, - "loss": 0.998, - "step": 7365 - }, - { - "epoch": 0.5665718021690639, - "learning_rate": 0.0011885697471680525, - "loss": 1.0692, - "step": 7366 - }, - { - "epoch": 0.5666487193292824, - "learning_rate": 0.001188215190940898, - "loss": 1.176, - "step": 7367 - }, - { - "epoch": 0.5667256364895008, - "learning_rate": 0.0011878606529191836, - "loss": 1.1958, - "step": 7368 - }, - { - "epoch": 0.5668025536497192, - "learning_rate": 0.0011875061331236107, - "loss": 1.1545, - "step": 7369 - }, - { - "epoch": 0.5668794708099377, - "learning_rate": 0.0011871516315748805, - "loss": 1.2527, - "step": 7370 - }, - { - "epoch": 0.5669563879701561, - "learning_rate": 0.0011867971482936927, - "loss": 1.0219, - "step": 7371 - }, - { - "epoch": 0.5670333051303745, - "learning_rate": 0.0011864426833007463, - "loss": 1.2691, - "step": 7372 - }, - { - "epoch": 0.567110222290593, - "learning_rate": 0.0011860882366167373, - "loss": 1.3616, - "step": 7373 - }, - { - "epoch": 0.5671871394508115, - "learning_rate": 0.0011857338082623644, - "loss": 1.5308, - "step": 7374 - }, - { - "epoch": 0.56726405661103, - "learning_rate": 0.0011853793982583207, - "loss": 1.2038, - "step": 7375 - }, - { - "epoch": 0.5673409737712484, - "learning_rate": 0.001185025006625303, - "loss": 0.9733, - "step": 7376 - }, - { - "epoch": 0.5674178909314668, - "learning_rate": 0.001184670633384003, - "loss": 1.5879, - "step": 7377 - }, - { - "epoch": 0.5674948080916853, - "learning_rate": 0.0011843162785551134, - "loss": 1.2297, - "step": 7378 - }, - { - "epoch": 0.5675717252519037, - "learning_rate": 0.0011839619421593253, - "loss": 1.0965, - "step": 7379 - }, - { - "epoch": 0.5676486424121221, - "learning_rate": 0.0011836076242173288, - "loss": 1.139, - "step": 7380 - }, - { - "epoch": 0.5677255595723406, - "learning_rate": 0.0011832533247498123, - "loss": 1.139, - "step": 7381 - }, - { - "epoch": 0.567802476732559, - "learning_rate": 0.001182899043777465, - "loss": 1.253, - "step": 7382 - }, - { - "epoch": 0.5678793938927775, - "learning_rate": 0.0011825447813209723, - "loss": 1.1121, - "step": 7383 - }, - { - "epoch": 0.5679563110529959, - "learning_rate": 0.0011821905374010213, - "loss": 1.2031, - "step": 7384 - }, - { - "epoch": 0.5680332282132143, - "learning_rate": 0.0011818363120382964, - "loss": 0.6744, - "step": 7385 - }, - { - "epoch": 0.5681101453734329, - "learning_rate": 0.0011814821052534795, - "loss": 0.9784, - "step": 7386 - }, - { - "epoch": 0.5681870625336513, - "learning_rate": 0.0011811279170672558, - "loss": 1.1948, - "step": 7387 - }, - { - "epoch": 0.5682639796938697, - "learning_rate": 0.0011807737475003047, - "loss": 0.8045, - "step": 7388 - }, - { - "epoch": 0.5683408968540882, - "learning_rate": 0.001180419596573307, - "loss": 0.9747, - "step": 7389 - }, - { - "epoch": 0.5684178140143066, - "learning_rate": 0.0011800654643069424, - "loss": 0.9116, - "step": 7390 - }, - { - "epoch": 0.5684947311745251, - "learning_rate": 0.0011797113507218883, - "loss": 0.9082, - "step": 7391 - }, - { - "epoch": 0.5685716483347435, - "learning_rate": 0.0011793572558388228, - "loss": 0.9714, - "step": 7392 - }, - { - "epoch": 0.5686485654949619, - "learning_rate": 0.0011790031796784213, - "loss": 1.3846, - "step": 7393 - }, - { - "epoch": 0.5687254826551804, - "learning_rate": 0.0011786491222613583, - "loss": 1.0717, - "step": 7394 - }, - { - "epoch": 0.5688023998153988, - "learning_rate": 0.0011782950836083082, - "loss": 1.1803, - "step": 7395 - }, - { - "epoch": 0.5688793169756172, - "learning_rate": 0.0011779410637399437, - "loss": 1.6949, - "step": 7396 - }, - { - "epoch": 0.5689562341358357, - "learning_rate": 0.0011775870626769354, - "loss": 0.9212, - "step": 7397 - }, - { - "epoch": 0.5690331512960541, - "learning_rate": 0.0011772330804399552, - "loss": 1.1032, - "step": 7398 - }, - { - "epoch": 0.5691100684562725, - "learning_rate": 0.0011768791170496716, - "loss": 1.0047, - "step": 7399 - }, - { - "epoch": 0.5691869856164911, - "learning_rate": 0.001176525172526753, - "loss": 1.2054, - "step": 7400 - }, - { - "epoch": 0.5692639027767095, - "learning_rate": 0.0011761712468918667, - "loss": 1.2594, - "step": 7401 - }, - { - "epoch": 0.569340819936928, - "learning_rate": 0.0011758173401656788, - "loss": 1.2848, - "step": 7402 - }, - { - "epoch": 0.5694177370971464, - "learning_rate": 0.0011754634523688543, - "loss": 1.403, - "step": 7403 - }, - { - "epoch": 0.5694946542573648, - "learning_rate": 0.0011751095835220572, - "loss": 1.1681, - "step": 7404 - }, - { - "epoch": 0.5695715714175833, - "learning_rate": 0.0011747557336459495, - "loss": 1.1703, - "step": 7405 - }, - { - "epoch": 0.5696484885778017, - "learning_rate": 0.001174401902761194, - "loss": 0.9024, - "step": 7406 - }, - { - "epoch": 0.5697254057380201, - "learning_rate": 0.0011740480908884509, - "loss": 1.1178, - "step": 7407 - }, - { - "epoch": 0.5698023228982386, - "learning_rate": 0.0011736942980483788, - "loss": 1.1575, - "step": 7408 - }, - { - "epoch": 0.569879240058457, - "learning_rate": 0.0011733405242616375, - "loss": 1.1706, - "step": 7409 - }, - { - "epoch": 0.5699561572186755, - "learning_rate": 0.0011729867695488825, - "loss": 1.3981, - "step": 7410 - }, - { - "epoch": 0.570033074378894, - "learning_rate": 0.001172633033930772, - "loss": 1.055, - "step": 7411 - }, - { - "epoch": 0.5701099915391123, - "learning_rate": 0.0011722793174279592, - "loss": 1.102, - "step": 7412 - }, - { - "epoch": 0.5701869086993309, - "learning_rate": 0.0011719256200610983, - "loss": 0.9951, - "step": 7413 - }, - { - "epoch": 0.5702638258595493, - "learning_rate": 0.001171571941850843, - "loss": 1.2232, - "step": 7414 - }, - { - "epoch": 0.5703407430197677, - "learning_rate": 0.0011712182828178437, - "loss": 1.2202, - "step": 7415 - }, - { - "epoch": 0.5704176601799862, - "learning_rate": 0.0011708646429827517, - "loss": 1.0668, - "step": 7416 - }, - { - "epoch": 0.5704945773402046, - "learning_rate": 0.0011705110223662162, - "loss": 1.3328, - "step": 7417 - }, - { - "epoch": 0.570571494500423, - "learning_rate": 0.0011701574209888857, - "loss": 1.094, - "step": 7418 - }, - { - "epoch": 0.5706484116606415, - "learning_rate": 0.0011698038388714069, - "loss": 1.6162, - "step": 7419 - }, - { - "epoch": 0.5707253288208599, - "learning_rate": 0.001169450276034426, - "loss": 1.0043, - "step": 7420 - }, - { - "epoch": 0.5708022459810784, - "learning_rate": 0.0011690967324985882, - "loss": 1.1951, - "step": 7421 - }, - { - "epoch": 0.5708791631412968, - "learning_rate": 0.0011687432082845368, - "loss": 0.7274, - "step": 7422 - }, - { - "epoch": 0.5709560803015152, - "learning_rate": 0.0011683897034129155, - "loss": 0.8897, - "step": 7423 - }, - { - "epoch": 0.5710329974617337, - "learning_rate": 0.001168036217904364, - "loss": 1.4012, - "step": 7424 - }, - { - "epoch": 0.5711099146219522, - "learning_rate": 0.0011676827517795242, - "loss": 1.2045, - "step": 7425 - }, - { - "epoch": 0.5711868317821706, - "learning_rate": 0.001167329305059035, - "loss": 1.7016, - "step": 7426 - }, - { - "epoch": 0.5712637489423891, - "learning_rate": 0.0011669758777635337, - "loss": 1.2601, - "step": 7427 - }, - { - "epoch": 0.5713406661026075, - "learning_rate": 0.0011666224699136587, - "loss": 0.7885, - "step": 7428 - }, - { - "epoch": 0.571417583262826, - "learning_rate": 0.0011662690815300446, - "loss": 1.3878, - "step": 7429 - }, - { - "epoch": 0.5714945004230444, - "learning_rate": 0.001165915712633327, - "loss": 1.3918, - "step": 7430 - }, - { - "epoch": 0.5715714175832628, - "learning_rate": 0.001165562363244139, - "loss": 1.343, - "step": 7431 - }, - { - "epoch": 0.5716483347434813, - "learning_rate": 0.0011652090333831128, - "loss": 1.037, - "step": 7432 - }, - { - "epoch": 0.5717252519036997, - "learning_rate": 0.0011648557230708805, - "loss": 1.3406, - "step": 7433 - }, - { - "epoch": 0.5718021690639181, - "learning_rate": 0.0011645024323280722, - "loss": 1.1148, - "step": 7434 - }, - { - "epoch": 0.5718790862241366, - "learning_rate": 0.0011641491611753152, - "loss": 1.1338, - "step": 7435 - }, - { - "epoch": 0.571956003384355, - "learning_rate": 0.00116379590963324, - "loss": 1.2277, - "step": 7436 - }, - { - "epoch": 0.5720329205445734, - "learning_rate": 0.0011634426777224717, - "loss": 1.5736, - "step": 7437 - }, - { - "epoch": 0.572109837704792, - "learning_rate": 0.0011630894654636357, - "loss": 1.0534, - "step": 7438 - }, - { - "epoch": 0.5721867548650104, - "learning_rate": 0.0011627362728773574, - "loss": 1.2428, - "step": 7439 - }, - { - "epoch": 0.5722636720252289, - "learning_rate": 0.0011623830999842593, - "loss": 1.3041, - "step": 7440 - }, - { - "epoch": 0.5723405891854473, - "learning_rate": 0.001162029946804964, - "loss": 1.2164, - "step": 7441 - }, - { - "epoch": 0.5724175063456657, - "learning_rate": 0.0011616768133600925, - "loss": 0.6077, - "step": 7442 - }, - { - "epoch": 0.5724944235058842, - "learning_rate": 0.0011613236996702641, - "loss": 1.0239, - "step": 7443 - }, - { - "epoch": 0.5725713406661026, - "learning_rate": 0.0011609706057560982, - "loss": 1.3251, - "step": 7444 - }, - { - "epoch": 0.572648257826321, - "learning_rate": 0.001160617531638212, - "loss": 0.7548, - "step": 7445 - }, - { - "epoch": 0.5727251749865395, - "learning_rate": 0.0011602644773372212, - "loss": 1.1027, - "step": 7446 - }, - { - "epoch": 0.5728020921467579, - "learning_rate": 0.0011599114428737425, - "loss": 1.1049, - "step": 7447 - }, - { - "epoch": 0.5728790093069764, - "learning_rate": 0.0011595584282683882, - "loss": 1.0308, - "step": 7448 - }, - { - "epoch": 0.5729559264671948, - "learning_rate": 0.0011592054335417728, - "loss": 1.1353, - "step": 7449 - }, - { - "epoch": 0.5730328436274132, - "learning_rate": 0.001158852458714507, - "loss": 0.9771, - "step": 7450 - }, - { - "epoch": 0.5731097607876318, - "learning_rate": 0.0011584995038072016, - "loss": 1.2061, - "step": 7451 - }, - { - "epoch": 0.5731866779478502, - "learning_rate": 0.0011581465688404661, - "loss": 1.3396, - "step": 7452 - }, - { - "epoch": 0.5732635951080686, - "learning_rate": 0.001157793653834909, - "loss": 1.2358, - "step": 7453 - }, - { - "epoch": 0.5733405122682871, - "learning_rate": 0.0011574407588111366, - "loss": 1.0596, - "step": 7454 - }, - { - "epoch": 0.5734174294285055, - "learning_rate": 0.0011570878837897558, - "loss": 1.2003, - "step": 7455 - }, - { - "epoch": 0.5734943465887239, - "learning_rate": 0.0011567350287913709, - "loss": 1.3258, - "step": 7456 - }, - { - "epoch": 0.5735712637489424, - "learning_rate": 0.0011563821938365844, - "loss": 1.1323, - "step": 7457 - }, - { - "epoch": 0.5736481809091608, - "learning_rate": 0.001156029378946001, - "loss": 0.9305, - "step": 7458 - }, - { - "epoch": 0.5737250980693793, - "learning_rate": 0.0011556765841402195, - "loss": 0.9148, - "step": 7459 - }, - { - "epoch": 0.5738020152295977, - "learning_rate": 0.001155323809439842, - "loss": 0.9186, - "step": 7460 - }, - { - "epoch": 0.5738789323898161, - "learning_rate": 0.0011549710548654664, - "loss": 1.4991, - "step": 7461 - }, - { - "epoch": 0.5739558495500346, - "learning_rate": 0.0011546183204376899, - "loss": 1.5976, - "step": 7462 - }, - { - "epoch": 0.574032766710253, - "learning_rate": 0.00115426560617711, - "loss": 1.1626, - "step": 7463 - }, - { - "epoch": 0.5741096838704715, - "learning_rate": 0.0011539129121043218, - "loss": 0.9012, - "step": 7464 - }, - { - "epoch": 0.57418660103069, - "learning_rate": 0.001153560238239919, - "loss": 1.1351, - "step": 7465 - }, - { - "epoch": 0.5742635181909084, - "learning_rate": 0.0011532075846044953, - "loss": 1.2098, - "step": 7466 - }, - { - "epoch": 0.5743404353511269, - "learning_rate": 0.0011528549512186414, - "loss": 1.013, - "step": 7467 - }, - { - "epoch": 0.5744173525113453, - "learning_rate": 0.0011525023381029496, - "loss": 1.0897, - "step": 7468 - }, - { - "epoch": 0.5744942696715637, - "learning_rate": 0.0011521497452780085, - "loss": 0.9073, - "step": 7469 - }, - { - "epoch": 0.5745711868317822, - "learning_rate": 0.0011517971727644052, - "loss": 1.2426, - "step": 7470 - }, - { - "epoch": 0.5746481039920006, - "learning_rate": 0.001151444620582729, - "loss": 1.2048, - "step": 7471 - }, - { - "epoch": 0.574725021152219, - "learning_rate": 0.0011510920887535644, - "loss": 1.2618, - "step": 7472 - }, - { - "epoch": 0.5748019383124375, - "learning_rate": 0.0011507395772974958, - "loss": 1.235, - "step": 7473 - }, - { - "epoch": 0.5748788554726559, - "learning_rate": 0.0011503870862351077, - "loss": 0.6297, - "step": 7474 - }, - { - "epoch": 0.5749557726328743, - "learning_rate": 0.001150034615586982, - "loss": 1.1965, - "step": 7475 - }, - { - "epoch": 0.5750326897930929, - "learning_rate": 0.0011496821653736995, - "loss": 1.3758, - "step": 7476 - }, - { - "epoch": 0.5751096069533113, - "learning_rate": 0.001149329735615841, - "loss": 1.236, - "step": 7477 - }, - { - "epoch": 0.5751865241135298, - "learning_rate": 0.001148977326333984, - "loss": 1.1907, - "step": 7478 - }, - { - "epoch": 0.5752634412737482, - "learning_rate": 0.0011486249375487073, - "loss": 1.1245, - "step": 7479 - }, - { - "epoch": 0.5753403584339666, - "learning_rate": 0.001148272569280587, - "loss": 0.888, - "step": 7480 - }, - { - "epoch": 0.5754172755941851, - "learning_rate": 0.0011479202215501968, - "loss": 1.2184, - "step": 7481 - }, - { - "epoch": 0.5754941927544035, - "learning_rate": 0.001147567894378113, - "loss": 1.2947, - "step": 7482 - }, - { - "epoch": 0.5755711099146219, - "learning_rate": 0.0011472155877849067, - "loss": 1.3256, - "step": 7483 - }, - { - "epoch": 0.5756480270748404, - "learning_rate": 0.0011468633017911495, - "loss": 1.3511, - "step": 7484 - }, - { - "epoch": 0.5757249442350588, - "learning_rate": 0.0011465110364174127, - "loss": 0.7183, - "step": 7485 - }, - { - "epoch": 0.5758018613952773, - "learning_rate": 0.0011461587916842644, - "loss": 1.0434, - "step": 7486 - }, - { - "epoch": 0.5758787785554957, - "learning_rate": 0.0011458065676122732, - "loss": 1.032, - "step": 7487 - }, - { - "epoch": 0.5759556957157141, - "learning_rate": 0.0011454543642220058, - "loss": 1.4464, - "step": 7488 - }, - { - "epoch": 0.5760326128759327, - "learning_rate": 0.0011451021815340272, - "loss": 1.2936, - "step": 7489 - }, - { - "epoch": 0.5761095300361511, - "learning_rate": 0.0011447500195689024, - "loss": 1.0794, - "step": 7490 - }, - { - "epoch": 0.5761864471963695, - "learning_rate": 0.0011443978783471946, - "loss": 1.2515, - "step": 7491 - }, - { - "epoch": 0.576263364356588, - "learning_rate": 0.0011440457578894645, - "loss": 1.0259, - "step": 7492 - }, - { - "epoch": 0.5763402815168064, - "learning_rate": 0.0011436936582162741, - "loss": 0.9607, - "step": 7493 - }, - { - "epoch": 0.5764171986770248, - "learning_rate": 0.0011433415793481823, - "loss": 1.8073, - "step": 7494 - }, - { - "epoch": 0.5764941158372433, - "learning_rate": 0.001142989521305747, - "loss": 0.8091, - "step": 7495 - }, - { - "epoch": 0.5765710329974617, - "learning_rate": 0.0011426374841095256, - "loss": 1.2961, - "step": 7496 - }, - { - "epoch": 0.5766479501576802, - "learning_rate": 0.001142285467780074, - "loss": 1.3273, - "step": 7497 - }, - { - "epoch": 0.5767248673178986, - "learning_rate": 0.001141933472337947, - "loss": 1.635, - "step": 7498 - }, - { - "epoch": 0.576801784478117, - "learning_rate": 0.0011415814978036978, - "loss": 1.0022, - "step": 7499 - }, - { - "epoch": 0.5768787016383355, - "learning_rate": 0.001141229544197878, - "loss": 1.1319, - "step": 7500 - }, - { - "epoch": 0.576955618798554, - "learning_rate": 0.0011408776115410396, - "loss": 1.3035, - "step": 7501 - }, - { - "epoch": 0.5770325359587724, - "learning_rate": 0.0011405256998537317, - "loss": 1.3327, - "step": 7502 - }, - { - "epoch": 0.5771094531189909, - "learning_rate": 0.001140173809156502, - "loss": 1.1914, - "step": 7503 - }, - { - "epoch": 0.5771863702792093, - "learning_rate": 0.0011398219394698995, - "loss": 1.0573, - "step": 7504 - }, - { - "epoch": 0.5772632874394278, - "learning_rate": 0.0011394700908144684, - "loss": 1.3627, - "step": 7505 - }, - { - "epoch": 0.5773402045996462, - "learning_rate": 0.0011391182632107554, - "loss": 0.9205, - "step": 7506 - }, - { - "epoch": 0.5774171217598646, - "learning_rate": 0.001138766456679303, - "loss": 1.3411, - "step": 7507 - }, - { - "epoch": 0.5774940389200831, - "learning_rate": 0.001138414671240653, - "loss": 1.5169, - "step": 7508 - }, - { - "epoch": 0.5775709560803015, - "learning_rate": 0.0011380629069153477, - "loss": 1.1586, - "step": 7509 - }, - { - "epoch": 0.5776478732405199, - "learning_rate": 0.0011377111637239264, - "loss": 0.977, - "step": 7510 - }, - { - "epoch": 0.5777247904007384, - "learning_rate": 0.0011373594416869274, - "loss": 0.8814, - "step": 7511 - }, - { - "epoch": 0.5778017075609568, - "learning_rate": 0.001137007740824889, - "loss": 1.3461, - "step": 7512 - }, - { - "epoch": 0.5778786247211753, - "learning_rate": 0.0011366560611583466, - "loss": 0.9412, - "step": 7513 - }, - { - "epoch": 0.5779555418813938, - "learning_rate": 0.0011363044027078357, - "loss": 1.235, - "step": 7514 - }, - { - "epoch": 0.5780324590416122, - "learning_rate": 0.0011359527654938902, - "loss": 1.1531, - "step": 7515 - }, - { - "epoch": 0.5781093762018307, - "learning_rate": 0.0011356011495370411, - "loss": 1.1962, - "step": 7516 - }, - { - "epoch": 0.5781862933620491, - "learning_rate": 0.0011352495548578216, - "loss": 1.1847, - "step": 7517 - }, - { - "epoch": 0.5782632105222675, - "learning_rate": 0.0011348979814767603, - "loss": 0.9579, - "step": 7518 - }, - { - "epoch": 0.578340127682486, - "learning_rate": 0.0011345464294143862, - "loss": 0.8197, - "step": 7519 - }, - { - "epoch": 0.5784170448427044, - "learning_rate": 0.0011341948986912272, - "loss": 1.2128, - "step": 7520 - }, - { - "epoch": 0.5784939620029228, - "learning_rate": 0.0011338433893278094, - "loss": 1.2701, - "step": 7521 - }, - { - "epoch": 0.5785708791631413, - "learning_rate": 0.0011334919013446573, - "loss": 1.0184, - "step": 7522 - }, - { - "epoch": 0.5786477963233597, - "learning_rate": 0.0011331404347622953, - "loss": 1.5434, - "step": 7523 - }, - { - "epoch": 0.5787247134835782, - "learning_rate": 0.0011327889896012454, - "loss": 0.9095, - "step": 7524 - }, - { - "epoch": 0.5788016306437966, - "learning_rate": 0.0011324375658820292, - "loss": 1.3754, - "step": 7525 - }, - { - "epoch": 0.578878547804015, - "learning_rate": 0.0011320861636251673, - "loss": 1.1569, - "step": 7526 - }, - { - "epoch": 0.5789554649642336, - "learning_rate": 0.0011317347828511764, - "loss": 1.0138, - "step": 7527 - }, - { - "epoch": 0.579032382124452, - "learning_rate": 0.0011313834235805766, - "loss": 1.212, - "step": 7528 - }, - { - "epoch": 0.5791092992846704, - "learning_rate": 0.0011310320858338822, - "loss": 1.4247, - "step": 7529 - }, - { - "epoch": 0.5791862164448889, - "learning_rate": 0.0011306807696316089, - "loss": 1.0551, - "step": 7530 - }, - { - "epoch": 0.5792631336051073, - "learning_rate": 0.0011303294749942705, - "loss": 1.189, - "step": 7531 - }, - { - "epoch": 0.5793400507653258, - "learning_rate": 0.0011299782019423789, - "loss": 1.4765, - "step": 7532 - }, - { - "epoch": 0.5794169679255442, - "learning_rate": 0.0011296269504964462, - "loss": 1.2702, - "step": 7533 - }, - { - "epoch": 0.5794938850857626, - "learning_rate": 0.0011292757206769817, - "loss": 1.2545, - "step": 7534 - }, - { - "epoch": 0.5795708022459811, - "learning_rate": 0.001128924512504494, - "loss": 1.1158, - "step": 7535 - }, - { - "epoch": 0.5796477194061995, - "learning_rate": 0.0011285733259994912, - "loss": 0.7285, - "step": 7536 - }, - { - "epoch": 0.5797246365664179, - "learning_rate": 0.0011282221611824792, - "loss": 1.5378, - "step": 7537 - }, - { - "epoch": 0.5798015537266364, - "learning_rate": 0.0011278710180739617, - "loss": 1.0237, - "step": 7538 - }, - { - "epoch": 0.5798784708868548, - "learning_rate": 0.0011275198966944442, - "loss": 1.0707, - "step": 7539 - }, - { - "epoch": 0.5799553880470733, - "learning_rate": 0.0011271687970644277, - "loss": 0.9255, - "step": 7540 - }, - { - "epoch": 0.5800323052072918, - "learning_rate": 0.0011268177192044135, - "loss": 1.1747, - "step": 7541 - }, - { - "epoch": 0.5801092223675102, - "learning_rate": 0.0011264666631349018, - "loss": 1.2695, - "step": 7542 - }, - { - "epoch": 0.5801861395277287, - "learning_rate": 0.0011261156288763909, - "loss": 1.0433, - "step": 7543 - }, - { - "epoch": 0.5802630566879471, - "learning_rate": 0.001125764616449378, - "loss": 0.8894, - "step": 7544 - }, - { - "epoch": 0.5803399738481655, - "learning_rate": 0.0011254136258743595, - "loss": 1.4501, - "step": 7545 - }, - { - "epoch": 0.580416891008384, - "learning_rate": 0.0011250626571718293, - "loss": 1.0785, - "step": 7546 - }, - { - "epoch": 0.5804938081686024, - "learning_rate": 0.0011247117103622816, - "loss": 1.2745, - "step": 7547 - }, - { - "epoch": 0.5805707253288208, - "learning_rate": 0.0011243607854662086, - "loss": 1.278, - "step": 7548 - }, - { - "epoch": 0.5806476424890393, - "learning_rate": 0.0011240098825040998, - "loss": 1.1555, - "step": 7549 - }, - { - "epoch": 0.5807245596492577, - "learning_rate": 0.001123659001496447, - "loss": 1.2342, - "step": 7550 - }, - { - "epoch": 0.5808014768094762, - "learning_rate": 0.001123308142463736, - "loss": 0.9779, - "step": 7551 - }, - { - "epoch": 0.5808783939696947, - "learning_rate": 0.0011229573054264564, - "loss": 1.3377, - "step": 7552 - }, - { - "epoch": 0.580955311129913, - "learning_rate": 0.0011226064904050924, - "loss": 0.8546, - "step": 7553 - }, - { - "epoch": 0.5810322282901316, - "learning_rate": 0.0011222556974201281, - "loss": 1.1539, - "step": 7554 - }, - { - "epoch": 0.58110914545035, - "learning_rate": 0.0011219049264920478, - "loss": 0.819, - "step": 7555 - }, - { - "epoch": 0.5811860626105684, - "learning_rate": 0.001121554177641333, - "loss": 1.2979, - "step": 7556 - }, - { - "epoch": 0.5812629797707869, - "learning_rate": 0.0011212034508884638, - "loss": 0.7625, - "step": 7557 - }, - { - "epoch": 0.5813398969310053, - "learning_rate": 0.00112085274625392, - "loss": 0.8487, - "step": 7558 - }, - { - "epoch": 0.5814168140912237, - "learning_rate": 0.0011205020637581795, - "loss": 1.195, - "step": 7559 - }, - { - "epoch": 0.5814937312514422, - "learning_rate": 0.001120151403421719, - "loss": 1.2928, - "step": 7560 - }, - { - "epoch": 0.5815706484116606, - "learning_rate": 0.0011198007652650143, - "loss": 1.268, - "step": 7561 - }, - { - "epoch": 0.5816475655718791, - "learning_rate": 0.001119450149308538, - "loss": 1.115, - "step": 7562 - }, - { - "epoch": 0.5817244827320975, - "learning_rate": 0.0011190995555727652, - "loss": 1.4845, - "step": 7563 - }, - { - "epoch": 0.5818013998923159, - "learning_rate": 0.0011187489840781663, - "loss": 0.9678, - "step": 7564 - }, - { - "epoch": 0.5818783170525345, - "learning_rate": 0.0011183984348452109, - "loss": 1.1935, - "step": 7565 - }, - { - "epoch": 0.5819552342127529, - "learning_rate": 0.0011180479078943691, - "loss": 1.3641, - "step": 7566 - }, - { - "epoch": 0.5820321513729713, - "learning_rate": 0.001117697403246108, - "loss": 1.0703, - "step": 7567 - }, - { - "epoch": 0.5821090685331898, - "learning_rate": 0.0011173469209208935, - "loss": 1.2778, - "step": 7568 - }, - { - "epoch": 0.5821859856934082, - "learning_rate": 0.0011169964609391914, - "loss": 1.0941, - "step": 7569 - }, - { - "epoch": 0.5822629028536267, - "learning_rate": 0.0011166460233214648, - "loss": 1.2988, - "step": 7570 - }, - { - "epoch": 0.5823398200138451, - "learning_rate": 0.0011162956080881768, - "loss": 1.1732, - "step": 7571 - }, - { - "epoch": 0.5824167371740635, - "learning_rate": 0.0011159452152597882, - "loss": 1.1327, - "step": 7572 - }, - { - "epoch": 0.582493654334282, - "learning_rate": 0.0011155948448567585, - "loss": 1.192, - "step": 7573 - }, - { - "epoch": 0.5825705714945004, - "learning_rate": 0.0011152444968995465, - "loss": 1.1793, - "step": 7574 - }, - { - "epoch": 0.5826474886547188, - "learning_rate": 0.0011148941714086097, - "loss": 0.7788, - "step": 7575 - }, - { - "epoch": 0.5827244058149373, - "learning_rate": 0.0011145438684044028, - "loss": 1.1694, - "step": 7576 - }, - { - "epoch": 0.5828013229751557, - "learning_rate": 0.0011141935879073817, - "loss": 0.776, - "step": 7577 - }, - { - "epoch": 0.5828782401353741, - "learning_rate": 0.0011138433299379988, - "loss": 1.3314, - "step": 7578 - }, - { - "epoch": 0.5829551572955927, - "learning_rate": 0.0011134930945167064, - "loss": 1.2636, - "step": 7579 - }, - { - "epoch": 0.5830320744558111, - "learning_rate": 0.001113142881663955, - "loss": 1.3117, - "step": 7580 - }, - { - "epoch": 0.5831089916160296, - "learning_rate": 0.0011127926914001936, - "loss": 1.4092, - "step": 7581 - }, - { - "epoch": 0.583185908776248, - "learning_rate": 0.001112442523745871, - "loss": 1.1105, - "step": 7582 - }, - { - "epoch": 0.5832628259364664, - "learning_rate": 0.0011120923787214332, - "loss": 1.3215, - "step": 7583 - }, - { - "epoch": 0.5833397430966849, - "learning_rate": 0.0011117422563473253, - "loss": 1.4006, - "step": 7584 - }, - { - "epoch": 0.5834166602569033, - "learning_rate": 0.0011113921566439922, - "loss": 1.3776, - "step": 7585 - }, - { - "epoch": 0.5834935774171217, - "learning_rate": 0.0011110420796318764, - "loss": 1.3827, - "step": 7586 - }, - { - "epoch": 0.5835704945773402, - "learning_rate": 0.0011106920253314179, - "loss": 1.1859, - "step": 7587 - }, - { - "epoch": 0.5836474117375586, - "learning_rate": 0.0011103419937630585, - "loss": 1.1144, - "step": 7588 - }, - { - "epoch": 0.5837243288977771, - "learning_rate": 0.0011099919849472358, - "loss": 1.0833, - "step": 7589 - }, - { - "epoch": 0.5838012460579955, - "learning_rate": 0.001109641998904388, - "loss": 1.1369, - "step": 7590 - }, - { - "epoch": 0.583878163218214, - "learning_rate": 0.0011092920356549504, - "loss": 0.9468, - "step": 7591 - }, - { - "epoch": 0.5839550803784325, - "learning_rate": 0.0011089420952193577, - "loss": 0.9523, - "step": 7592 - }, - { - "epoch": 0.5840319975386509, - "learning_rate": 0.0011085921776180442, - "loss": 0.9987, - "step": 7593 - }, - { - "epoch": 0.5841089146988693, - "learning_rate": 0.0011082422828714413, - "loss": 1.3263, - "step": 7594 - }, - { - "epoch": 0.5841858318590878, - "learning_rate": 0.0011078924109999791, - "loss": 0.7874, - "step": 7595 - }, - { - "epoch": 0.5842627490193062, - "learning_rate": 0.0011075425620240885, - "loss": 0.7541, - "step": 7596 - }, - { - "epoch": 0.5843396661795246, - "learning_rate": 0.0011071927359641968, - "loss": 1.4845, - "step": 7597 - }, - { - "epoch": 0.5844165833397431, - "learning_rate": 0.0011068429328407296, - "loss": 1.0559, - "step": 7598 - }, - { - "epoch": 0.5844935004999615, - "learning_rate": 0.0011064931526741143, - "loss": 1.1676, - "step": 7599 - }, - { - "epoch": 0.58457041766018, - "learning_rate": 0.0011061433954847732, - "loss": 1.2336, - "step": 7600 - }, - { - "epoch": 0.5846473348203984, - "learning_rate": 0.0011057936612931302, - "loss": 1.1726, - "step": 7601 - }, - { - "epoch": 0.5847242519806168, - "learning_rate": 0.001105443950119606, - "loss": 1.1307, - "step": 7602 - }, - { - "epoch": 0.5848011691408354, - "learning_rate": 0.0011050942619846205, - "loss": 1.2726, - "step": 7603 - }, - { - "epoch": 0.5848780863010538, - "learning_rate": 0.001104744596908593, - "loss": 0.8969, - "step": 7604 - }, - { - "epoch": 0.5849550034612722, - "learning_rate": 0.0011043949549119404, - "loss": 1.1497, - "step": 7605 - }, - { - "epoch": 0.5850319206214907, - "learning_rate": 0.001104045336015078, - "loss": 1.1684, - "step": 7606 - }, - { - "epoch": 0.5851088377817091, - "learning_rate": 0.0011036957402384218, - "loss": 1.2694, - "step": 7607 - }, - { - "epoch": 0.5851857549419276, - "learning_rate": 0.0011033461676023839, - "loss": 1.4821, - "step": 7608 - }, - { - "epoch": 0.585262672102146, - "learning_rate": 0.001102996618127377, - "loss": 0.9267, - "step": 7609 - }, - { - "epoch": 0.5853395892623644, - "learning_rate": 0.0011026470918338116, - "loss": 1.0763, - "step": 7610 - }, - { - "epoch": 0.5854165064225829, - "learning_rate": 0.0011022975887420958, - "loss": 1.2333, - "step": 7611 - }, - { - "epoch": 0.5854934235828013, - "learning_rate": 0.001101948108872639, - "loss": 1.174, - "step": 7612 - }, - { - "epoch": 0.5855703407430197, - "learning_rate": 0.0011015986522458466, - "loss": 1.4906, - "step": 7613 - }, - { - "epoch": 0.5856472579032382, - "learning_rate": 0.001101249218882124, - "loss": 0.809, - "step": 7614 - }, - { - "epoch": 0.5857241750634566, - "learning_rate": 0.0011008998088018753, - "loss": 1.0597, - "step": 7615 - }, - { - "epoch": 0.5858010922236752, - "learning_rate": 0.0011005504220255025, - "loss": 1.1544, - "step": 7616 - }, - { - "epoch": 0.5858780093838936, - "learning_rate": 0.001100201058573407, - "loss": 1.1005, - "step": 7617 - }, - { - "epoch": 0.585954926544112, - "learning_rate": 0.0010998517184659885, - "loss": 1.0385, - "step": 7618 - }, - { - "epoch": 0.5860318437043305, - "learning_rate": 0.0010995024017236448, - "loss": 1.2724, - "step": 7619 - }, - { - "epoch": 0.5861087608645489, - "learning_rate": 0.0010991531083667735, - "loss": 1.1509, - "step": 7620 - }, - { - "epoch": 0.5861856780247673, - "learning_rate": 0.0010988038384157704, - "loss": 1.073, - "step": 7621 - }, - { - "epoch": 0.5862625951849858, - "learning_rate": 0.0010984545918910282, - "loss": 1.2951, - "step": 7622 - }, - { - "epoch": 0.5863395123452042, - "learning_rate": 0.0010981053688129418, - "loss": 1.0164, - "step": 7623 - }, - { - "epoch": 0.5864164295054226, - "learning_rate": 0.0010977561692019017, - "loss": 1.2168, - "step": 7624 - }, - { - "epoch": 0.5864933466656411, - "learning_rate": 0.0010974069930782975, - "loss": 1.3536, - "step": 7625 - }, - { - "epoch": 0.5865702638258595, - "learning_rate": 0.001097057840462519, - "loss": 1.263, - "step": 7626 - }, - { - "epoch": 0.586647180986078, - "learning_rate": 0.001096708711374953, - "loss": 1.0746, - "step": 7627 - }, - { - "epoch": 0.5867240981462964, - "learning_rate": 0.0010963596058359854, - "loss": 0.9617, - "step": 7628 - }, - { - "epoch": 0.5868010153065149, - "learning_rate": 0.0010960105238660015, - "loss": 1.0653, - "step": 7629 - }, - { - "epoch": 0.5868779324667334, - "learning_rate": 0.0010956614654853837, - "loss": 1.0892, - "step": 7630 - }, - { - "epoch": 0.5869548496269518, - "learning_rate": 0.0010953124307145146, - "loss": 1.2078, - "step": 7631 - }, - { - "epoch": 0.5870317667871702, - "learning_rate": 0.0010949634195737748, - "loss": 1.3924, - "step": 7632 - }, - { - "epoch": 0.5871086839473887, - "learning_rate": 0.0010946144320835422, - "loss": 1.1187, - "step": 7633 - }, - { - "epoch": 0.5871856011076071, - "learning_rate": 0.0010942654682641962, - "loss": 1.0561, - "step": 7634 - }, - { - "epoch": 0.5872625182678256, - "learning_rate": 0.0010939165281361116, - "loss": 1.2642, - "step": 7635 - }, - { - "epoch": 0.587339435428044, - "learning_rate": 0.0010935676117196649, - "loss": 1.2873, - "step": 7636 - }, - { - "epoch": 0.5874163525882624, - "learning_rate": 0.0010932187190352287, - "loss": 1.3543, - "step": 7637 - }, - { - "epoch": 0.5874932697484809, - "learning_rate": 0.0010928698501031751, - "loss": 1.1779, - "step": 7638 - }, - { - "epoch": 0.5875701869086993, - "learning_rate": 0.0010925210049438755, - "loss": 0.9547, - "step": 7639 - }, - { - "epoch": 0.5876471040689177, - "learning_rate": 0.001092172183577699, - "loss": 1.0573, - "step": 7640 - }, - { - "epoch": 0.5877240212291363, - "learning_rate": 0.0010918233860250136, - "loss": 1.068, - "step": 7641 - }, - { - "epoch": 0.5878009383893547, - "learning_rate": 0.0010914746123061865, - "loss": 0.7642, - "step": 7642 - }, - { - "epoch": 0.5878778555495731, - "learning_rate": 0.0010911258624415826, - "loss": 1.1254, - "step": 7643 - }, - { - "epoch": 0.5879547727097916, - "learning_rate": 0.001090777136451565, - "loss": 1.0528, - "step": 7644 - }, - { - "epoch": 0.58803168987001, - "learning_rate": 0.0010904284343564974, - "loss": 1.0813, - "step": 7645 - }, - { - "epoch": 0.5881086070302285, - "learning_rate": 0.0010900797561767398, - "loss": 0.8347, - "step": 7646 - }, - { - "epoch": 0.5881855241904469, - "learning_rate": 0.0010897311019326533, - "loss": 1.137, - "step": 7647 - }, - { - "epoch": 0.5882624413506653, - "learning_rate": 0.0010893824716445952, - "loss": 0.9631, - "step": 7648 - }, - { - "epoch": 0.5883393585108838, - "learning_rate": 0.001089033865332922, - "loss": 1.2, - "step": 7649 - }, - { - "epoch": 0.5884162756711022, - "learning_rate": 0.00108868528301799, - "loss": 1.1167, - "step": 7650 - }, - { - "epoch": 0.5884931928313206, - "learning_rate": 0.0010883367247201529, - "loss": 1.158, - "step": 7651 - }, - { - "epoch": 0.5885701099915391, - "learning_rate": 0.0010879881904597634, - "loss": 1.1843, - "step": 7652 - }, - { - "epoch": 0.5886470271517575, - "learning_rate": 0.001087639680257173, - "loss": 0.872, - "step": 7653 - }, - { - "epoch": 0.588723944311976, - "learning_rate": 0.0010872911941327312, - "loss": 1.4038, - "step": 7654 - }, - { - "epoch": 0.5888008614721945, - "learning_rate": 0.001086942732106787, - "loss": 1.2358, - "step": 7655 - }, - { - "epoch": 0.5888777786324129, - "learning_rate": 0.0010865942941996874, - "loss": 1.1987, - "step": 7656 - }, - { - "epoch": 0.5889546957926314, - "learning_rate": 0.0010862458804317768, - "loss": 1.3849, - "step": 7657 - }, - { - "epoch": 0.5890316129528498, - "learning_rate": 0.0010858974908234014, - "loss": 0.9745, - "step": 7658 - }, - { - "epoch": 0.5891085301130682, - "learning_rate": 0.0010855491253949027, - "loss": 1.3895, - "step": 7659 - }, - { - "epoch": 0.5891854472732867, - "learning_rate": 0.0010852007841666224, - "loss": 1.146, - "step": 7660 - }, - { - "epoch": 0.5892623644335051, - "learning_rate": 0.0010848524671589008, - "loss": 1.1424, - "step": 7661 - }, - { - "epoch": 0.5893392815937235, - "learning_rate": 0.0010845041743920763, - "loss": 1.1147, - "step": 7662 - }, - { - "epoch": 0.589416198753942, - "learning_rate": 0.0010841559058864857, - "loss": 0.9315, - "step": 7663 - }, - { - "epoch": 0.5894931159141604, - "learning_rate": 0.0010838076616624657, - "loss": 1.2049, - "step": 7664 - }, - { - "epoch": 0.5895700330743789, - "learning_rate": 0.0010834594417403495, - "loss": 0.9668, - "step": 7665 - }, - { - "epoch": 0.5896469502345973, - "learning_rate": 0.0010831112461404712, - "loss": 0.8758, - "step": 7666 - }, - { - "epoch": 0.5897238673948157, - "learning_rate": 0.0010827630748831618, - "loss": 0.9744, - "step": 7667 - }, - { - "epoch": 0.5898007845550343, - "learning_rate": 0.0010824149279887504, - "loss": 1.1687, - "step": 7668 - }, - { - "epoch": 0.5898777017152527, - "learning_rate": 0.0010820668054775675, - "loss": 1.3187, - "step": 7669 - }, - { - "epoch": 0.5899546188754711, - "learning_rate": 0.0010817187073699388, - "loss": 0.8032, - "step": 7670 - }, - { - "epoch": 0.5900315360356896, - "learning_rate": 0.0010813706336861909, - "loss": 1.0688, - "step": 7671 - }, - { - "epoch": 0.590108453195908, - "learning_rate": 0.001081022584446648, - "loss": 0.9207, - "step": 7672 - }, - { - "epoch": 0.5901853703561265, - "learning_rate": 0.0010806745596716328, - "loss": 1.4195, - "step": 7673 - }, - { - "epoch": 0.5902622875163449, - "learning_rate": 0.0010803265593814673, - "loss": 1.1843, - "step": 7674 - }, - { - "epoch": 0.5903392046765633, - "learning_rate": 0.0010799785835964714, - "loss": 1.6493, - "step": 7675 - }, - { - "epoch": 0.5904161218367818, - "learning_rate": 0.0010796306323369634, - "loss": 1.2095, - "step": 7676 - }, - { - "epoch": 0.5904930389970002, - "learning_rate": 0.0010792827056232614, - "loss": 1.1495, - "step": 7677 - }, - { - "epoch": 0.5905699561572186, - "learning_rate": 0.0010789348034756809, - "loss": 1.1788, - "step": 7678 - }, - { - "epoch": 0.5906468733174371, - "learning_rate": 0.0010785869259145348, - "loss": 1.1645, - "step": 7679 - }, - { - "epoch": 0.5907237904776556, - "learning_rate": 0.0010782390729601385, - "loss": 1.136, - "step": 7680 - }, - { - "epoch": 0.590800707637874, - "learning_rate": 0.0010778912446328022, - "loss": 0.9883, - "step": 7681 - }, - { - "epoch": 0.5908776247980925, - "learning_rate": 0.0010775434409528353, - "loss": 1.2706, - "step": 7682 - }, - { - "epoch": 0.5909545419583109, - "learning_rate": 0.0010771956619405477, - "loss": 1.3735, - "step": 7683 - }, - { - "epoch": 0.5910314591185294, - "learning_rate": 0.0010768479076162457, - "loss": 1.0717, - "step": 7684 - }, - { - "epoch": 0.5911083762787478, - "learning_rate": 0.0010765001780002356, - "loss": 1.087, - "step": 7685 - }, - { - "epoch": 0.5911852934389662, - "learning_rate": 0.0010761524731128214, - "loss": 1.3048, - "step": 7686 - }, - { - "epoch": 0.5912622105991847, - "learning_rate": 0.0010758047929743059, - "loss": 1.2326, - "step": 7687 - }, - { - "epoch": 0.5913391277594031, - "learning_rate": 0.001075457137604991, - "loss": 0.9792, - "step": 7688 - }, - { - "epoch": 0.5914160449196215, - "learning_rate": 0.0010751095070251763, - "loss": 0.9589, - "step": 7689 - }, - { - "epoch": 0.59149296207984, - "learning_rate": 0.0010747619012551595, - "loss": 1.0688, - "step": 7690 - }, - { - "epoch": 0.5915698792400584, - "learning_rate": 0.0010744143203152395, - "loss": 1.0496, - "step": 7691 - }, - { - "epoch": 0.591646796400277, - "learning_rate": 0.0010740667642257097, - "loss": 1.4762, - "step": 7692 - }, - { - "epoch": 0.5917237135604954, - "learning_rate": 0.0010737192330068665, - "loss": 1.1987, - "step": 7693 - }, - { - "epoch": 0.5918006307207138, - "learning_rate": 0.0010733717266790013, - "loss": 0.9884, - "step": 7694 - }, - { - "epoch": 0.5918775478809323, - "learning_rate": 0.0010730242452624051, - "loss": 1.2176, - "step": 7695 - }, - { - "epoch": 0.5919544650411507, - "learning_rate": 0.0010726767887773688, - "loss": 1.1729, - "step": 7696 - }, - { - "epoch": 0.5920313822013691, - "learning_rate": 0.0010723293572441799, - "loss": 1.1773, - "step": 7697 - }, - { - "epoch": 0.5921082993615876, - "learning_rate": 0.0010719819506831252, - "loss": 1.1422, - "step": 7698 - }, - { - "epoch": 0.592185216521806, - "learning_rate": 0.0010716345691144911, - "loss": 1.1546, - "step": 7699 - }, - { - "epoch": 0.5922621336820244, - "learning_rate": 0.0010712872125585608, - "loss": 0.9597, - "step": 7700 - }, - { - "epoch": 0.5923390508422429, - "learning_rate": 0.0010709398810356163, - "loss": 1.353, - "step": 7701 - }, - { - "epoch": 0.5924159680024613, - "learning_rate": 0.0010705925745659404, - "loss": 1.2339, - "step": 7702 - }, - { - "epoch": 0.5924928851626798, - "learning_rate": 0.0010702452931698105, - "loss": 1.377, - "step": 7703 - }, - { - "epoch": 0.5925698023228982, - "learning_rate": 0.0010698980368675066, - "loss": 1.0627, - "step": 7704 - }, - { - "epoch": 0.5926467194831166, - "learning_rate": 0.0010695508056793044, - "loss": 1.0616, - "step": 7705 - }, - { - "epoch": 0.5927236366433352, - "learning_rate": 0.001069203599625479, - "loss": 1.4626, - "step": 7706 - }, - { - "epoch": 0.5928005538035536, - "learning_rate": 0.0010688564187263043, - "loss": 1.0914, - "step": 7707 - }, - { - "epoch": 0.592877470963772, - "learning_rate": 0.001068509263002053, - "loss": 1.2574, - "step": 7708 - }, - { - "epoch": 0.5929543881239905, - "learning_rate": 0.0010681621324729951, - "loss": 1.2338, - "step": 7709 - }, - { - "epoch": 0.5930313052842089, - "learning_rate": 0.0010678150271594008, - "loss": 1.4186, - "step": 7710 - }, - { - "epoch": 0.5931082224444274, - "learning_rate": 0.001067467947081537, - "loss": 1.1059, - "step": 7711 - }, - { - "epoch": 0.5931851396046458, - "learning_rate": 0.001067120892259671, - "loss": 1.0338, - "step": 7712 - }, - { - "epoch": 0.5932620567648642, - "learning_rate": 0.001066773862714067, - "loss": 0.9603, - "step": 7713 - }, - { - "epoch": 0.5933389739250827, - "learning_rate": 0.0010664268584649886, - "loss": 1.025, - "step": 7714 - }, - { - "epoch": 0.5934158910853011, - "learning_rate": 0.0010660798795326984, - "loss": 1.0605, - "step": 7715 - }, - { - "epoch": 0.5934928082455195, - "learning_rate": 0.0010657329259374558, - "loss": 0.9222, - "step": 7716 - }, - { - "epoch": 0.593569725405738, - "learning_rate": 0.00106538599769952, - "loss": 1.4801, - "step": 7717 - }, - { - "epoch": 0.5936466425659565, - "learning_rate": 0.001065039094839149, - "loss": 1.4249, - "step": 7718 - }, - { - "epoch": 0.5937235597261749, - "learning_rate": 0.0010646922173765986, - "loss": 1.2505, - "step": 7719 - }, - { - "epoch": 0.5938004768863934, - "learning_rate": 0.0010643453653321228, - "loss": 1.09, - "step": 7720 - }, - { - "epoch": 0.5938773940466118, - "learning_rate": 0.0010639985387259757, - "loss": 0.8344, - "step": 7721 - }, - { - "epoch": 0.5939543112068303, - "learning_rate": 0.001063651737578408, - "loss": 0.9064, - "step": 7722 - }, - { - "epoch": 0.5940312283670487, - "learning_rate": 0.0010633049619096703, - "loss": 1.3103, - "step": 7723 - }, - { - "epoch": 0.5941081455272671, - "learning_rate": 0.001062958211740011, - "loss": 1.344, - "step": 7724 - }, - { - "epoch": 0.5941850626874856, - "learning_rate": 0.001062611487089677, - "loss": 1.3596, - "step": 7725 - }, - { - "epoch": 0.594261979847704, - "learning_rate": 0.0010622647879789144, - "loss": 1.2105, - "step": 7726 - }, - { - "epoch": 0.5943388970079224, - "learning_rate": 0.0010619181144279673, - "loss": 1.4586, - "step": 7727 - }, - { - "epoch": 0.5944158141681409, - "learning_rate": 0.0010615714664570773, - "loss": 1.382, - "step": 7728 - }, - { - "epoch": 0.5944927313283593, - "learning_rate": 0.0010612248440864866, - "loss": 1.0237, - "step": 7729 - }, - { - "epoch": 0.5945696484885779, - "learning_rate": 0.0010608782473364344, - "loss": 1.4369, - "step": 7730 - }, - { - "epoch": 0.5946465656487963, - "learning_rate": 0.001060531676227159, - "loss": 1.1992, - "step": 7731 - }, - { - "epoch": 0.5947234828090147, - "learning_rate": 0.0010601851307788974, - "loss": 1.3042, - "step": 7732 - }, - { - "epoch": 0.5948003999692332, - "learning_rate": 0.001059838611011884, - "loss": 0.9944, - "step": 7733 - }, - { - "epoch": 0.5948773171294516, - "learning_rate": 0.001059492116946353, - "loss": 1.1956, - "step": 7734 - }, - { - "epoch": 0.59495423428967, - "learning_rate": 0.0010591456486025363, - "loss": 1.5136, - "step": 7735 - }, - { - "epoch": 0.5950311514498885, - "learning_rate": 0.0010587992060006645, - "loss": 1.3086, - "step": 7736 - }, - { - "epoch": 0.5951080686101069, - "learning_rate": 0.0010584527891609671, - "loss": 0.9448, - "step": 7737 - }, - { - "epoch": 0.5951849857703254, - "learning_rate": 0.0010581063981036712, - "loss": 1.2859, - "step": 7738 - }, - { - "epoch": 0.5952619029305438, - "learning_rate": 0.0010577600328490037, - "loss": 1.4452, - "step": 7739 - }, - { - "epoch": 0.5953388200907622, - "learning_rate": 0.001057413693417189, - "loss": 1.2173, - "step": 7740 - }, - { - "epoch": 0.5954157372509807, - "learning_rate": 0.0010570673798284492, - "loss": 1.3203, - "step": 7741 - }, - { - "epoch": 0.5954926544111991, - "learning_rate": 0.0010567210921030071, - "loss": 1.053, - "step": 7742 - }, - { - "epoch": 0.5955695715714175, - "learning_rate": 0.0010563748302610824, - "loss": 1.629, - "step": 7743 - }, - { - "epoch": 0.5956464887316361, - "learning_rate": 0.0010560285943228935, - "loss": 1.1092, - "step": 7744 - }, - { - "epoch": 0.5957234058918545, - "learning_rate": 0.001055682384308658, - "loss": 1.1787, - "step": 7745 - }, - { - "epoch": 0.5958003230520729, - "learning_rate": 0.0010553362002385912, - "loss": 1.0458, - "step": 7746 - }, - { - "epoch": 0.5958772402122914, - "learning_rate": 0.0010549900421329065, - "loss": 1.1286, - "step": 7747 - }, - { - "epoch": 0.5959541573725098, - "learning_rate": 0.0010546439100118173, - "loss": 1.1391, - "step": 7748 - }, - { - "epoch": 0.5960310745327283, - "learning_rate": 0.0010542978038955342, - "loss": 1.2542, - "step": 7749 - }, - { - "epoch": 0.5961079916929467, - "learning_rate": 0.0010539517238042672, - "loss": 1.3272, - "step": 7750 - }, - { - "epoch": 0.5961849088531651, - "learning_rate": 0.0010536056697582241, - "loss": 1.249, - "step": 7751 - }, - { - "epoch": 0.5962618260133836, - "learning_rate": 0.0010532596417776102, - "loss": 1.1186, - "step": 7752 - }, - { - "epoch": 0.596338743173602, - "learning_rate": 0.0010529136398826325, - "loss": 0.9633, - "step": 7753 - }, - { - "epoch": 0.5964156603338204, - "learning_rate": 0.001052567664093493, - "loss": 1.0394, - "step": 7754 - }, - { - "epoch": 0.596492577494039, - "learning_rate": 0.0010522217144303934, - "loss": 0.9104, - "step": 7755 - }, - { - "epoch": 0.5965694946542573, - "learning_rate": 0.0010518757909135348, - "loss": 1.4556, - "step": 7756 - }, - { - "epoch": 0.5966464118144759, - "learning_rate": 0.0010515298935631157, - "loss": 1.1109, - "step": 7757 - }, - { - "epoch": 0.5967233289746943, - "learning_rate": 0.0010511840223993338, - "loss": 0.9677, - "step": 7758 - }, - { - "epoch": 0.5968002461349127, - "learning_rate": 0.0010508381774423847, - "loss": 1.2739, - "step": 7759 - }, - { - "epoch": 0.5968771632951312, - "learning_rate": 0.001050492358712462, - "loss": 1.031, - "step": 7760 - }, - { - "epoch": 0.5969540804553496, - "learning_rate": 0.0010501465662297591, - "loss": 1.2035, - "step": 7761 - }, - { - "epoch": 0.597030997615568, - "learning_rate": 0.0010498008000144677, - "loss": 0.935, - "step": 7762 - }, - { - "epoch": 0.5971079147757865, - "learning_rate": 0.0010494550600867755, - "loss": 0.9552, - "step": 7763 - }, - { - "epoch": 0.5971848319360049, - "learning_rate": 0.001049109346466873, - "loss": 1.1457, - "step": 7764 - }, - { - "epoch": 0.5972617490962233, - "learning_rate": 0.0010487636591749454, - "loss": 1.3244, - "step": 7765 - }, - { - "epoch": 0.5973386662564418, - "learning_rate": 0.0010484179982311775, - "loss": 0.9031, - "step": 7766 - }, - { - "epoch": 0.5974155834166602, - "learning_rate": 0.0010480723636557539, - "loss": 1.2468, - "step": 7767 - }, - { - "epoch": 0.5974925005768787, - "learning_rate": 0.0010477267554688556, - "loss": 1.4144, - "step": 7768 - }, - { - "epoch": 0.5975694177370972, - "learning_rate": 0.0010473811736906634, - "loss": 1.0551, - "step": 7769 - }, - { - "epoch": 0.5976463348973156, - "learning_rate": 0.0010470356183413566, - "loss": 1.4656, - "step": 7770 - }, - { - "epoch": 0.5977232520575341, - "learning_rate": 0.0010466900894411117, - "loss": 1.3411, - "step": 7771 - }, - { - "epoch": 0.5978001692177525, - "learning_rate": 0.001046344587010105, - "loss": 1.3289, - "step": 7772 - }, - { - "epoch": 0.5978770863779709, - "learning_rate": 0.0010459991110685109, - "loss": 1.1409, - "step": 7773 - }, - { - "epoch": 0.5979540035381894, - "learning_rate": 0.0010456536616365013, - "loss": 1.211, - "step": 7774 - }, - { - "epoch": 0.5980309206984078, - "learning_rate": 0.0010453082387342487, - "loss": 1.1654, - "step": 7775 - }, - { - "epoch": 0.5981078378586263, - "learning_rate": 0.0010449628423819207, - "loss": 1.2061, - "step": 7776 - }, - { - "epoch": 0.5981847550188447, - "learning_rate": 0.001044617472599688, - "loss": 1.2899, - "step": 7777 - }, - { - "epoch": 0.5982616721790631, - "learning_rate": 0.001044272129407715, - "loss": 1.429, - "step": 7778 - }, - { - "epoch": 0.5983385893392816, - "learning_rate": 0.001043926812826167, - "loss": 1.2076, - "step": 7779 - }, - { - "epoch": 0.5984155064995, - "learning_rate": 0.0010435815228752082, - "loss": 1.0541, - "step": 7780 - }, - { - "epoch": 0.5984924236597184, - "learning_rate": 0.0010432362595749996, - "loss": 1.0445, - "step": 7781 - }, - { - "epoch": 0.598569340819937, - "learning_rate": 0.0010428910229457018, - "loss": 1.3896, - "step": 7782 - }, - { - "epoch": 0.5986462579801554, - "learning_rate": 0.0010425458130074738, - "loss": 1.277, - "step": 7783 - }, - { - "epoch": 0.5987231751403738, - "learning_rate": 0.0010422006297804729, - "loss": 0.8614, - "step": 7784 - }, - { - "epoch": 0.5988000923005923, - "learning_rate": 0.0010418554732848535, - "loss": 1.2623, - "step": 7785 - }, - { - "epoch": 0.5988770094608107, - "learning_rate": 0.0010415103435407712, - "loss": 0.9991, - "step": 7786 - }, - { - "epoch": 0.5989539266210292, - "learning_rate": 0.001041165240568377, - "loss": 1.8123, - "step": 7787 - }, - { - "epoch": 0.5990308437812476, - "learning_rate": 0.0010408201643878235, - "loss": 0.9383, - "step": 7788 - }, - { - "epoch": 0.599107760941466, - "learning_rate": 0.0010404751150192586, - "loss": 1.3833, - "step": 7789 - }, - { - "epoch": 0.5991846781016845, - "learning_rate": 0.0010401300924828306, - "loss": 1.1599, - "step": 7790 - }, - { - "epoch": 0.5992615952619029, - "learning_rate": 0.0010397850967986861, - "loss": 1.0424, - "step": 7791 - }, - { - "epoch": 0.5993385124221213, - "learning_rate": 0.001039440127986969, - "loss": 1.0356, - "step": 7792 - }, - { - "epoch": 0.5994154295823398, - "learning_rate": 0.001039095186067823, - "loss": 0.8999, - "step": 7793 - }, - { - "epoch": 0.5994923467425582, - "learning_rate": 0.0010387502710613894, - "loss": 0.7223, - "step": 7794 - }, - { - "epoch": 0.5995692639027768, - "learning_rate": 0.001038405382987808, - "loss": 0.7494, - "step": 7795 - }, - { - "epoch": 0.5996461810629952, - "learning_rate": 0.0010380605218672178, - "loss": 1.0897, - "step": 7796 - }, - { - "epoch": 0.5997230982232136, - "learning_rate": 0.0010377156877197553, - "loss": 0.9918, - "step": 7797 - }, - { - "epoch": 0.5998000153834321, - "learning_rate": 0.0010373708805655546, - "loss": 1.4431, - "step": 7798 - }, - { - "epoch": 0.5998769325436505, - "learning_rate": 0.0010370261004247513, - "loss": 0.9219, - "step": 7799 - }, - { - "epoch": 0.5999538497038689, - "learning_rate": 0.0010366813473174762, - "loss": 1.0544, - "step": 7800 - }, - { - "epoch": 0.6000307668640874, - "learning_rate": 0.00103633662126386, - "loss": 1.3724, - "step": 7801 - }, - { - "epoch": 0.6001076840243058, - "learning_rate": 0.0010359919222840317, - "loss": 1.1616, - "step": 7802 - }, - { - "epoch": 0.6001846011845242, - "learning_rate": 0.0010356472503981187, - "loss": 0.9862, - "step": 7803 - }, - { - "epoch": 0.6002615183447427, - "learning_rate": 0.0010353026056262463, - "loss": 1.2182, - "step": 7804 - }, - { - "epoch": 0.6003384355049611, - "learning_rate": 0.0010349579879885395, - "loss": 1.2488, - "step": 7805 - }, - { - "epoch": 0.6004153526651796, - "learning_rate": 0.0010346133975051204, - "loss": 1.319, - "step": 7806 - }, - { - "epoch": 0.600492269825398, - "learning_rate": 0.0010342688341961098, - "loss": 1.0387, - "step": 7807 - }, - { - "epoch": 0.6005691869856165, - "learning_rate": 0.001033924298081628, - "loss": 0.9835, - "step": 7808 - }, - { - "epoch": 0.600646104145835, - "learning_rate": 0.001033579789181791, - "loss": 0.9648, - "step": 7809 - }, - { - "epoch": 0.6007230213060534, - "learning_rate": 0.0010332353075167174, - "loss": 1.1639, - "step": 7810 - }, - { - "epoch": 0.6007999384662718, - "learning_rate": 0.0010328908531065202, - "loss": 1.1934, - "step": 7811 - }, - { - "epoch": 0.6008768556264903, - "learning_rate": 0.0010325464259713125, - "loss": 0.9779, - "step": 7812 - }, - { - "epoch": 0.6009537727867087, - "learning_rate": 0.0010322020261312066, - "loss": 1.212, - "step": 7813 - }, - { - "epoch": 0.6010306899469272, - "learning_rate": 0.0010318576536063116, - "loss": 0.8679, - "step": 7814 - }, - { - "epoch": 0.6011076071071456, - "learning_rate": 0.0010315133084167363, - "loss": 0.994, - "step": 7815 - }, - { - "epoch": 0.601184524267364, - "learning_rate": 0.0010311689905825872, - "loss": 1.0423, - "step": 7816 - }, - { - "epoch": 0.6012614414275825, - "learning_rate": 0.0010308247001239693, - "loss": 0.9352, - "step": 7817 - }, - { - "epoch": 0.6013383585878009, - "learning_rate": 0.001030480437060986, - "loss": 1.1421, - "step": 7818 - }, - { - "epoch": 0.6014152757480193, - "learning_rate": 0.00103013620141374, - "loss": 0.9548, - "step": 7819 - }, - { - "epoch": 0.6014921929082379, - "learning_rate": 0.0010297919932023296, - "loss": 1.0947, - "step": 7820 - }, - { - "epoch": 0.6015691100684563, - "learning_rate": 0.0010294478124468558, - "loss": 1.029, - "step": 7821 - }, - { - "epoch": 0.6016460272286747, - "learning_rate": 0.0010291036591674144, - "loss": 0.8608, - "step": 7822 - }, - { - "epoch": 0.6017229443888932, - "learning_rate": 0.0010287595333841007, - "loss": 1.2513, - "step": 7823 - }, - { - "epoch": 0.6017998615491116, - "learning_rate": 0.0010284154351170095, - "loss": 1.3662, - "step": 7824 - }, - { - "epoch": 0.6018767787093301, - "learning_rate": 0.0010280713643862319, - "loss": 0.9338, - "step": 7825 - }, - { - "epoch": 0.6019536958695485, - "learning_rate": 0.0010277273212118596, - "loss": 0.8625, - "step": 7826 - }, - { - "epoch": 0.6020306130297669, - "learning_rate": 0.0010273833056139812, - "loss": 1.2494, - "step": 7827 - }, - { - "epoch": 0.6021075301899854, - "learning_rate": 0.001027039317612684, - "loss": 0.8106, - "step": 7828 - }, - { - "epoch": 0.6021844473502038, - "learning_rate": 0.001026695357228054, - "loss": 1.3656, - "step": 7829 - }, - { - "epoch": 0.6022613645104222, - "learning_rate": 0.0010263514244801756, - "loss": 0.9023, - "step": 7830 - }, - { - "epoch": 0.6023382816706407, - "learning_rate": 0.0010260075193891307, - "loss": 0.9513, - "step": 7831 - }, - { - "epoch": 0.6024151988308591, - "learning_rate": 0.0010256636419750014, - "loss": 1.0575, - "step": 7832 - }, - { - "epoch": 0.6024921159910777, - "learning_rate": 0.0010253197922578652, - "loss": 0.9623, - "step": 7833 - }, - { - "epoch": 0.6025690331512961, - "learning_rate": 0.0010249759702578023, - "loss": 1.0004, - "step": 7834 - }, - { - "epoch": 0.6026459503115145, - "learning_rate": 0.001024632175994887, - "loss": 0.9848, - "step": 7835 - }, - { - "epoch": 0.602722867471733, - "learning_rate": 0.0010242884094891942, - "loss": 1.2173, - "step": 7836 - }, - { - "epoch": 0.6027997846319514, - "learning_rate": 0.0010239446707607973, - "loss": 1.2593, - "step": 7837 - }, - { - "epoch": 0.6028767017921698, - "learning_rate": 0.0010236009598297668, - "loss": 0.8232, - "step": 7838 - }, - { - "epoch": 0.6029536189523883, - "learning_rate": 0.0010232572767161728, - "loss": 1.0004, - "step": 7839 - }, - { - "epoch": 0.6030305361126067, - "learning_rate": 0.0010229136214400837, - "loss": 1.1045, - "step": 7840 - }, - { - "epoch": 0.6031074532728252, - "learning_rate": 0.001022569994021565, - "loss": 1.2416, - "step": 7841 - }, - { - "epoch": 0.6031843704330436, - "learning_rate": 0.001022226394480682, - "loss": 0.8683, - "step": 7842 - }, - { - "epoch": 0.603261287593262, - "learning_rate": 0.0010218828228374984, - "loss": 1.2691, - "step": 7843 - }, - { - "epoch": 0.6033382047534805, - "learning_rate": 0.001021539279112074, - "loss": 1.0257, - "step": 7844 - }, - { - "epoch": 0.603415121913699, - "learning_rate": 0.0010211957633244708, - "loss": 1.4517, - "step": 7845 - }, - { - "epoch": 0.6034920390739174, - "learning_rate": 0.0010208522754947453, - "loss": 0.9462, - "step": 7846 - }, - { - "epoch": 0.6035689562341359, - "learning_rate": 0.0010205088156429547, - "loss": 1.1834, - "step": 7847 - }, - { - "epoch": 0.6036458733943543, - "learning_rate": 0.0010201653837891545, - "loss": 1.2009, - "step": 7848 - }, - { - "epoch": 0.6037227905545727, - "learning_rate": 0.0010198219799533973, - "loss": 1.239, - "step": 7849 - }, - { - "epoch": 0.6037997077147912, - "learning_rate": 0.001019478604155735, - "loss": 1.2887, - "step": 7850 - }, - { - "epoch": 0.6038766248750096, - "learning_rate": 0.0010191352564162181, - "loss": 1.0743, - "step": 7851 - }, - { - "epoch": 0.6039535420352281, - "learning_rate": 0.0010187919367548944, - "loss": 0.8909, - "step": 7852 - }, - { - "epoch": 0.6040304591954465, - "learning_rate": 0.0010184486451918114, - "loss": 1.0651, - "step": 7853 - }, - { - "epoch": 0.6041073763556649, - "learning_rate": 0.0010181053817470143, - "loss": 0.8757, - "step": 7854 - }, - { - "epoch": 0.6041842935158834, - "learning_rate": 0.001017762146440545, - "loss": 1.1608, - "step": 7855 - }, - { - "epoch": 0.6042612106761018, - "learning_rate": 0.0010174189392924474, - "loss": 1.1209, - "step": 7856 - }, - { - "epoch": 0.6043381278363202, - "learning_rate": 0.0010170757603227606, - "loss": 1.1479, - "step": 7857 - }, - { - "epoch": 0.6044150449965388, - "learning_rate": 0.0010167326095515234, - "loss": 0.9983, - "step": 7858 - }, - { - "epoch": 0.6044919621567572, - "learning_rate": 0.0010163894869987728, - "loss": 1.2428, - "step": 7859 - }, - { - "epoch": 0.6045688793169757, - "learning_rate": 0.0010160463926845438, - "loss": 1.0819, - "step": 7860 - }, - { - "epoch": 0.6046457964771941, - "learning_rate": 0.0010157033266288708, - "loss": 1.0805, - "step": 7861 - }, - { - "epoch": 0.6047227136374125, - "learning_rate": 0.001015360288851785, - "loss": 1.3644, - "step": 7862 - }, - { - "epoch": 0.604799630797631, - "learning_rate": 0.001015017279373317, - "loss": 1.123, - "step": 7863 - }, - { - "epoch": 0.6048765479578494, - "learning_rate": 0.0010146742982134957, - "loss": 1.2639, - "step": 7864 - }, - { - "epoch": 0.6049534651180678, - "learning_rate": 0.0010143313453923478, - "loss": 1.0517, - "step": 7865 - }, - { - "epoch": 0.6050303822782863, - "learning_rate": 0.0010139884209298985, - "loss": 1.2348, - "step": 7866 - }, - { - "epoch": 0.6051072994385047, - "learning_rate": 0.0010136455248461725, - "loss": 1.0953, - "step": 7867 - }, - { - "epoch": 0.6051842165987231, - "learning_rate": 0.001013302657161191, - "loss": 0.6503, - "step": 7868 - }, - { - "epoch": 0.6052611337589416, - "learning_rate": 0.001012959817894974, - "loss": 1.3275, - "step": 7869 - }, - { - "epoch": 0.60533805091916, - "learning_rate": 0.0010126170070675414, - "loss": 1.2339, - "step": 7870 - }, - { - "epoch": 0.6054149680793786, - "learning_rate": 0.001012274224698909, - "loss": 1.1524, - "step": 7871 - }, - { - "epoch": 0.605491885239597, - "learning_rate": 0.0010119314708090935, - "loss": 0.8756, - "step": 7872 - }, - { - "epoch": 0.6055688023998154, - "learning_rate": 0.001011588745418108, - "loss": 1.1831, - "step": 7873 - }, - { - "epoch": 0.6056457195600339, - "learning_rate": 0.0010112460485459645, - "loss": 1.2186, - "step": 7874 - }, - { - "epoch": 0.6057226367202523, - "learning_rate": 0.0010109033802126737, - "loss": 1.281, - "step": 7875 - }, - { - "epoch": 0.6057995538804707, - "learning_rate": 0.0010105607404382443, - "loss": 1.1512, - "step": 7876 - }, - { - "epoch": 0.6058764710406892, - "learning_rate": 0.0010102181292426832, - "loss": 0.9668, - "step": 7877 - }, - { - "epoch": 0.6059533882009076, - "learning_rate": 0.001009875546645996, - "loss": 1.2598, - "step": 7878 - }, - { - "epoch": 0.6060303053611261, - "learning_rate": 0.0010095329926681864, - "loss": 1.3061, - "step": 7879 - }, - { - "epoch": 0.6061072225213445, - "learning_rate": 0.001009190467329257, - "loss": 1.2282, - "step": 7880 - }, - { - "epoch": 0.6061841396815629, - "learning_rate": 0.0010088479706492075, - "loss": 1.0917, - "step": 7881 - }, - { - "epoch": 0.6062610568417814, - "learning_rate": 0.0010085055026480365, - "loss": 1.3351, - "step": 7882 - }, - { - "epoch": 0.6063379740019998, - "learning_rate": 0.0010081630633457415, - "loss": 1.4174, - "step": 7883 - }, - { - "epoch": 0.6064148911622183, - "learning_rate": 0.0010078206527623181, - "loss": 0.8827, - "step": 7884 - }, - { - "epoch": 0.6064918083224368, - "learning_rate": 0.0010074782709177594, - "loss": 1.1687, - "step": 7885 - }, - { - "epoch": 0.6065687254826552, - "learning_rate": 0.001007135917832058, - "loss": 1.2557, - "step": 7886 - }, - { - "epoch": 0.6066456426428736, - "learning_rate": 0.001006793593525204, - "loss": 1.2157, - "step": 7887 - }, - { - "epoch": 0.6067225598030921, - "learning_rate": 0.0010064512980171856, - "loss": 1.1743, - "step": 7888 - }, - { - "epoch": 0.6067994769633105, - "learning_rate": 0.001006109031327991, - "loss": 1.2562, - "step": 7889 - }, - { - "epoch": 0.606876394123529, - "learning_rate": 0.001005766793477604, - "loss": 1.0796, - "step": 7890 - }, - { - "epoch": 0.6069533112837474, - "learning_rate": 0.0010054245844860095, - "loss": 1.1306, - "step": 7891 - }, - { - "epoch": 0.6070302284439658, - "learning_rate": 0.0010050824043731893, - "loss": 1.0124, - "step": 7892 - }, - { - "epoch": 0.6071071456041843, - "learning_rate": 0.0010047402531591224, - "loss": 1.2791, - "step": 7893 - }, - { - "epoch": 0.6071840627644027, - "learning_rate": 0.001004398130863789, - "loss": 1.3058, - "step": 7894 - }, - { - "epoch": 0.6072609799246211, - "learning_rate": 0.0010040560375071653, - "loss": 1.0634, - "step": 7895 - }, - { - "epoch": 0.6073378970848397, - "learning_rate": 0.0010037139731092258, - "loss": 1.2333, - "step": 7896 - }, - { - "epoch": 0.607414814245058, - "learning_rate": 0.001003371937689945, - "loss": 1.1136, - "step": 7897 - }, - { - "epoch": 0.6074917314052766, - "learning_rate": 0.0010030299312692942, - "loss": 1.0772, - "step": 7898 - }, - { - "epoch": 0.607568648565495, - "learning_rate": 0.0010026879538672438, - "loss": 1.3217, - "step": 7899 - }, - { - "epoch": 0.6076455657257134, - "learning_rate": 0.001002346005503762, - "loss": 0.9078, - "step": 7900 - }, - { - "epoch": 0.6077224828859319, - "learning_rate": 0.0010020040861988156, - "loss": 1.3689, - "step": 7901 - }, - { - "epoch": 0.6077994000461503, - "learning_rate": 0.0010016621959723696, - "loss": 1.2904, - "step": 7902 - }, - { - "epoch": 0.6078763172063687, - "learning_rate": 0.0010013203348443877, - "loss": 1.0072, - "step": 7903 - }, - { - "epoch": 0.6079532343665872, - "learning_rate": 0.0010009785028348302, - "loss": 0.9837, - "step": 7904 - }, - { - "epoch": 0.6080301515268056, - "learning_rate": 0.001000636699963659, - "loss": 1.3249, - "step": 7905 - }, - { - "epoch": 0.608107068687024, - "learning_rate": 0.001000294926250831, - "loss": 1.1906, - "step": 7906 - }, - { - "epoch": 0.6081839858472425, - "learning_rate": 0.0009999531817163026, - "loss": 1.3993, - "step": 7907 - }, - { - "epoch": 0.6082609030074609, - "learning_rate": 0.0009996114663800295, - "loss": 1.2132, - "step": 7908 - }, - { - "epoch": 0.6083378201676795, - "learning_rate": 0.0009992697802619639, - "loss": 0.8712, - "step": 7909 - }, - { - "epoch": 0.6084147373278979, - "learning_rate": 0.000998928123382058, - "loss": 1.2964, - "step": 7910 - }, - { - "epoch": 0.6084916544881163, - "learning_rate": 0.0009985864957602615, - "loss": 1.3279, - "step": 7911 - }, - { - "epoch": 0.6085685716483348, - "learning_rate": 0.0009982448974165216, - "loss": 1.19, - "step": 7912 - }, - { - "epoch": 0.6086454888085532, - "learning_rate": 0.0009979033283707853, - "loss": 1.4645, - "step": 7913 - }, - { - "epoch": 0.6087224059687716, - "learning_rate": 0.0009975617886429972, - "loss": 1.7533, - "step": 7914 - }, - { - "epoch": 0.6087993231289901, - "learning_rate": 0.000997220278253099, - "loss": 0.7365, - "step": 7915 - }, - { - "epoch": 0.6088762402892085, - "learning_rate": 0.0009968787972210336, - "loss": 1.1548, - "step": 7916 - }, - { - "epoch": 0.608953157449427, - "learning_rate": 0.0009965373455667389, - "loss": 0.9278, - "step": 7917 - }, - { - "epoch": 0.6090300746096454, - "learning_rate": 0.0009961959233101539, - "loss": 0.9917, - "step": 7918 - }, - { - "epoch": 0.6091069917698638, - "learning_rate": 0.0009958545304712138, - "loss": 1.0208, - "step": 7919 - }, - { - "epoch": 0.6091839089300823, - "learning_rate": 0.000995513167069853, - "loss": 1.0637, - "step": 7920 - }, - { - "epoch": 0.6092608260903007, - "learning_rate": 0.0009951718331260042, - "loss": 1.2475, - "step": 7921 - }, - { - "epoch": 0.6093377432505191, - "learning_rate": 0.000994830528659598, - "loss": 1.2193, - "step": 7922 - }, - { - "epoch": 0.6094146604107377, - "learning_rate": 0.0009944892536905637, - "loss": 1.1543, - "step": 7923 - }, - { - "epoch": 0.6094915775709561, - "learning_rate": 0.000994148008238829, - "loss": 1.5268, - "step": 7924 - }, - { - "epoch": 0.6095684947311745, - "learning_rate": 0.0009938067923243195, - "loss": 1.1637, - "step": 7925 - }, - { - "epoch": 0.609645411891393, - "learning_rate": 0.0009934656059669578, - "loss": 1.1411, - "step": 7926 - }, - { - "epoch": 0.6097223290516114, - "learning_rate": 0.0009931244491866685, - "loss": 1.1863, - "step": 7927 - }, - { - "epoch": 0.6097992462118299, - "learning_rate": 0.00099278332200337, - "loss": 0.9868, - "step": 7928 - }, - { - "epoch": 0.6098761633720483, - "learning_rate": 0.000992442224436983, - "loss": 1.2126, - "step": 7929 - }, - { - "epoch": 0.6099530805322667, - "learning_rate": 0.0009921011565074227, - "loss": 1.0634, - "step": 7930 - }, - { - "epoch": 0.6100299976924852, - "learning_rate": 0.000991760118234605, - "loss": 1.1871, - "step": 7931 - }, - { - "epoch": 0.6101069148527036, - "learning_rate": 0.000991419109638444, - "loss": 1.1458, - "step": 7932 - }, - { - "epoch": 0.610183832012922, - "learning_rate": 0.0009910781307388514, - "loss": 0.8921, - "step": 7933 - }, - { - "epoch": 0.6102607491731405, - "learning_rate": 0.0009907371815557368, - "loss": 1.3179, - "step": 7934 - }, - { - "epoch": 0.610337666333359, - "learning_rate": 0.000990396262109009, - "loss": 1.1721, - "step": 7935 - }, - { - "epoch": 0.6104145834935775, - "learning_rate": 0.0009900553724185742, - "loss": 1.1723, - "step": 7936 - }, - { - "epoch": 0.6104915006537959, - "learning_rate": 0.0009897145125043383, - "loss": 1.1278, - "step": 7937 - }, - { - "epoch": 0.6105684178140143, - "learning_rate": 0.000989373682386204, - "loss": 1.3781, - "step": 7938 - }, - { - "epoch": 0.6106453349742328, - "learning_rate": 0.0009890328820840715, - "loss": 1.0431, - "step": 7939 - }, - { - "epoch": 0.6107222521344512, - "learning_rate": 0.0009886921116178426, - "loss": 1.6225, - "step": 7940 - }, - { - "epoch": 0.6107991692946696, - "learning_rate": 0.0009883513710074138, - "loss": 1.5153, - "step": 7941 - }, - { - "epoch": 0.6108760864548881, - "learning_rate": 0.0009880106602726815, - "loss": 1.1906, - "step": 7942 - }, - { - "epoch": 0.6109530036151065, - "learning_rate": 0.0009876699794335404, - "loss": 1.0122, - "step": 7943 - }, - { - "epoch": 0.6110299207753249, - "learning_rate": 0.0009873293285098833, - "loss": 1.2095, - "step": 7944 - }, - { - "epoch": 0.6111068379355434, - "learning_rate": 0.0009869887075216006, - "loss": 1.3353, - "step": 7945 - }, - { - "epoch": 0.6111837550957618, - "learning_rate": 0.0009866481164885821, - "loss": 1.1863, - "step": 7946 - }, - { - "epoch": 0.6112606722559804, - "learning_rate": 0.000986307555430715, - "loss": 1.2304, - "step": 7947 - }, - { - "epoch": 0.6113375894161988, - "learning_rate": 0.0009859670243678855, - "loss": 0.8882, - "step": 7948 - }, - { - "epoch": 0.6114145065764172, - "learning_rate": 0.0009856265233199773, - "loss": 1.0656, - "step": 7949 - }, - { - "epoch": 0.6114914237366357, - "learning_rate": 0.0009852860523068714, - "loss": 1.01, - "step": 7950 - }, - { - "epoch": 0.6115683408968541, - "learning_rate": 0.0009849456113484505, - "loss": 0.9631, - "step": 7951 - }, - { - "epoch": 0.6116452580570725, - "learning_rate": 0.0009846052004645917, - "loss": 1.7442, - "step": 7952 - }, - { - "epoch": 0.611722175217291, - "learning_rate": 0.000984264819675172, - "loss": 1.1071, - "step": 7953 - }, - { - "epoch": 0.6117990923775094, - "learning_rate": 0.0009839244690000675, - "loss": 1.0134, - "step": 7954 - }, - { - "epoch": 0.6118760095377279, - "learning_rate": 0.0009835841484591506, - "loss": 1.2044, - "step": 7955 - }, - { - "epoch": 0.6119529266979463, - "learning_rate": 0.000983243858072294, - "loss": 1.4278, - "step": 7956 - }, - { - "epoch": 0.6120298438581647, - "learning_rate": 0.0009829035978593669, - "loss": 1.6517, - "step": 7957 - }, - { - "epoch": 0.6121067610183832, - "learning_rate": 0.000982563367840237, - "loss": 1.0621, - "step": 7958 - }, - { - "epoch": 0.6121836781786016, - "learning_rate": 0.000982223168034772, - "loss": 1.0888, - "step": 7959 - }, - { - "epoch": 0.61226059533882, - "learning_rate": 0.0009818829984628362, - "loss": 1.0146, - "step": 7960 - }, - { - "epoch": 0.6123375124990386, - "learning_rate": 0.000981542859144291, - "loss": 0.7239, - "step": 7961 - }, - { - "epoch": 0.612414429659257, - "learning_rate": 0.0009812027500989997, - "loss": 1.0811, - "step": 7962 - }, - { - "epoch": 0.6124913468194755, - "learning_rate": 0.0009808626713468194, - "loss": 0.8919, - "step": 7963 - }, - { - "epoch": 0.6125682639796939, - "learning_rate": 0.0009805226229076096, - "loss": 1.3856, - "step": 7964 - }, - { - "epoch": 0.6126451811399123, - "learning_rate": 0.000980182604801225, - "loss": 0.9222, - "step": 7965 - }, - { - "epoch": 0.6127220983001308, - "learning_rate": 0.0009798426170475197, - "loss": 1.085, - "step": 7966 - }, - { - "epoch": 0.6127990154603492, - "learning_rate": 0.0009795026596663462, - "loss": 1.1534, - "step": 7967 - }, - { - "epoch": 0.6128759326205676, - "learning_rate": 0.000979162732677555, - "loss": 1.3604, - "step": 7968 - }, - { - "epoch": 0.6129528497807861, - "learning_rate": 0.000978822836100994, - "loss": 1.1482, - "step": 7969 - }, - { - "epoch": 0.6130297669410045, - "learning_rate": 0.0009784829699565115, - "loss": 1.1764, - "step": 7970 - }, - { - "epoch": 0.6131066841012229, - "learning_rate": 0.000978143134263952, - "loss": 1.4959, - "step": 7971 - }, - { - "epoch": 0.6131836012614414, - "learning_rate": 0.000977803329043158, - "loss": 1.2298, - "step": 7972 - }, - { - "epoch": 0.6132605184216599, - "learning_rate": 0.0009774635543139726, - "loss": 0.8295, - "step": 7973 - }, - { - "epoch": 0.6133374355818784, - "learning_rate": 0.000977123810096234, - "loss": 1.0659, - "step": 7974 - }, - { - "epoch": 0.6134143527420968, - "learning_rate": 0.0009767840964097817, - "loss": 1.3188, - "step": 7975 - }, - { - "epoch": 0.6134912699023152, - "learning_rate": 0.0009764444132744513, - "loss": 1.2851, - "step": 7976 - }, - { - "epoch": 0.6135681870625337, - "learning_rate": 0.0009761047607100767, - "loss": 0.8915, - "step": 7977 - }, - { - "epoch": 0.6136451042227521, - "learning_rate": 0.0009757651387364916, - "loss": 0.7588, - "step": 7978 - }, - { - "epoch": 0.6137220213829705, - "learning_rate": 0.0009754255473735263, - "loss": 1.1247, - "step": 7979 - }, - { - "epoch": 0.613798938543189, - "learning_rate": 0.0009750859866410097, - "loss": 1.2587, - "step": 7980 - }, - { - "epoch": 0.6138758557034074, - "learning_rate": 0.0009747464565587698, - "loss": 1.2248, - "step": 7981 - }, - { - "epoch": 0.6139527728636259, - "learning_rate": 0.0009744069571466314, - "loss": 1.265, - "step": 7982 - }, - { - "epoch": 0.6140296900238443, - "learning_rate": 0.0009740674884244186, - "loss": 1.3845, - "step": 7983 - }, - { - "epoch": 0.6141066071840627, - "learning_rate": 0.0009737280504119539, - "loss": 1.1153, - "step": 7984 - }, - { - "epoch": 0.6141835243442813, - "learning_rate": 0.0009733886431290557, - "loss": 1.1525, - "step": 7985 - }, - { - "epoch": 0.6142604415044997, - "learning_rate": 0.0009730492665955443, - "loss": 1.2444, - "step": 7986 - }, - { - "epoch": 0.6143373586647181, - "learning_rate": 0.0009727099208312354, - "loss": 1.5033, - "step": 7987 - }, - { - "epoch": 0.6144142758249366, - "learning_rate": 0.0009723706058559431, - "loss": 1.3061, - "step": 7988 - }, - { - "epoch": 0.614491192985155, - "learning_rate": 0.0009720313216894813, - "loss": 1.4493, - "step": 7989 - }, - { - "epoch": 0.6145681101453734, - "learning_rate": 0.0009716920683516612, - "loss": 1.0055, - "step": 7990 - }, - { - "epoch": 0.6146450273055919, - "learning_rate": 0.0009713528458622911, - "loss": 1.2471, - "step": 7991 - }, - { - "epoch": 0.6147219444658103, - "learning_rate": 0.0009710136542411798, - "loss": 1.2029, - "step": 7992 - }, - { - "epoch": 0.6147988616260288, - "learning_rate": 0.0009706744935081323, - "loss": 0.9112, - "step": 7993 - }, - { - "epoch": 0.6148757787862472, - "learning_rate": 0.000970335363682953, - "loss": 1.1652, - "step": 7994 - }, - { - "epoch": 0.6149526959464656, - "learning_rate": 0.0009699962647854441, - "loss": 0.9244, - "step": 7995 - }, - { - "epoch": 0.6150296131066841, - "learning_rate": 0.0009696571968354048, - "loss": 1.2673, - "step": 7996 - }, - { - "epoch": 0.6151065302669025, - "learning_rate": 0.0009693181598526353, - "loss": 1.3341, - "step": 7997 - }, - { - "epoch": 0.6151834474271209, - "learning_rate": 0.0009689791538569313, - "loss": 1.091, - "step": 7998 - }, - { - "epoch": 0.6152603645873395, - "learning_rate": 0.0009686401788680875, - "loss": 1.5006, - "step": 7999 - }, - { - "epoch": 0.6153372817475579, - "learning_rate": 0.0009683012349058979, - "loss": 1.2326, - "step": 8000 - }, - { - "epoch": 0.6154141989077764, - "learning_rate": 0.0009679623219901529, - "loss": 1.1959, - "step": 8001 - }, - { - "epoch": 0.6154911160679948, - "learning_rate": 0.0009676234401406427, - "loss": 1.3407, - "step": 8002 - }, - { - "epoch": 0.6155680332282132, - "learning_rate": 0.0009672845893771546, - "loss": 1.1276, - "step": 8003 - }, - { - "epoch": 0.6156449503884317, - "learning_rate": 0.0009669457697194744, - "loss": 1.2509, - "step": 8004 - }, - { - "epoch": 0.6157218675486501, - "learning_rate": 0.0009666069811873863, - "loss": 1.461, - "step": 8005 - }, - { - "epoch": 0.6157987847088685, - "learning_rate": 0.000966268223800673, - "loss": 1.1217, - "step": 8006 - }, - { - "epoch": 0.615875701869087, - "learning_rate": 0.0009659294975791134, - "loss": 0.864, - "step": 8007 - }, - { - "epoch": 0.6159526190293054, - "learning_rate": 0.0009655908025424879, - "loss": 1.0173, - "step": 8008 - }, - { - "epoch": 0.6160295361895238, - "learning_rate": 0.0009652521387105719, - "loss": 1.4209, - "step": 8009 - }, - { - "epoch": 0.6161064533497423, - "learning_rate": 0.0009649135061031409, - "loss": 1.2029, - "step": 8010 - }, - { - "epoch": 0.6161833705099607, - "learning_rate": 0.0009645749047399679, - "loss": 1.157, - "step": 8011 - }, - { - "epoch": 0.6162602876701793, - "learning_rate": 0.000964236334640824, - "loss": 1.4025, - "step": 8012 - }, - { - "epoch": 0.6163372048303977, - "learning_rate": 0.0009638977958254792, - "loss": 1.1947, - "step": 8013 - }, - { - "epoch": 0.6164141219906161, - "learning_rate": 0.0009635592883137008, - "loss": 0.9063, - "step": 8014 - }, - { - "epoch": 0.6164910391508346, - "learning_rate": 0.0009632208121252544, - "loss": 1.0217, - "step": 8015 - }, - { - "epoch": 0.616567956311053, - "learning_rate": 0.0009628823672799045, - "loss": 0.8972, - "step": 8016 - }, - { - "epoch": 0.6166448734712714, - "learning_rate": 0.0009625439537974129, - "loss": 0.9665, - "step": 8017 - }, - { - "epoch": 0.6167217906314899, - "learning_rate": 0.0009622055716975396, - "loss": 1.0596, - "step": 8018 - }, - { - "epoch": 0.6167987077917083, - "learning_rate": 0.0009618672210000439, - "loss": 0.9571, - "step": 8019 - }, - { - "epoch": 0.6168756249519268, - "learning_rate": 0.0009615289017246816, - "loss": 1.1336, - "step": 8020 - }, - { - "epoch": 0.6169525421121452, - "learning_rate": 0.0009611906138912084, - "loss": 1.3222, - "step": 8021 - }, - { - "epoch": 0.6170294592723636, - "learning_rate": 0.0009608523575193765, - "loss": 1.5397, - "step": 8022 - }, - { - "epoch": 0.6171063764325821, - "learning_rate": 0.0009605141326289372, - "loss": 1.0812, - "step": 8023 - }, - { - "epoch": 0.6171832935928006, - "learning_rate": 0.0009601759392396401, - "loss": 1.4692, - "step": 8024 - }, - { - "epoch": 0.617260210753019, - "learning_rate": 0.0009598377773712327, - "loss": 0.9726, - "step": 8025 - }, - { - "epoch": 0.6173371279132375, - "learning_rate": 0.00095949964704346, - "loss": 0.8921, - "step": 8026 - }, - { - "epoch": 0.6174140450734559, - "learning_rate": 0.0009591615482760664, - "loss": 1.3792, - "step": 8027 - }, - { - "epoch": 0.6174909622336743, - "learning_rate": 0.0009588234810887937, - "loss": 1.0445, - "step": 8028 - }, - { - "epoch": 0.6175678793938928, - "learning_rate": 0.0009584854455013817, - "loss": 0.7489, - "step": 8029 - }, - { - "epoch": 0.6176447965541112, - "learning_rate": 0.000958147441533569, - "loss": 1.0757, - "step": 8030 - }, - { - "epoch": 0.6177217137143297, - "learning_rate": 0.0009578094692050918, - "loss": 1.0493, - "step": 8031 - }, - { - "epoch": 0.6177986308745481, - "learning_rate": 0.0009574715285356849, - "loss": 1.0729, - "step": 8032 - }, - { - "epoch": 0.6178755480347665, - "learning_rate": 0.0009571336195450813, - "loss": 1.4625, - "step": 8033 - }, - { - "epoch": 0.617952465194985, - "learning_rate": 0.0009567957422530109, - "loss": 1.0553, - "step": 8034 - }, - { - "epoch": 0.6180293823552034, - "learning_rate": 0.0009564578966792035, - "loss": 1.4732, - "step": 8035 - }, - { - "epoch": 0.6181062995154218, - "learning_rate": 0.0009561200828433858, - "loss": 1.2348, - "step": 8036 - }, - { - "epoch": 0.6181832166756404, - "learning_rate": 0.000955782300765283, - "loss": 1.2797, - "step": 8037 - }, - { - "epoch": 0.6182601338358588, - "learning_rate": 0.0009554445504646193, - "loss": 0.8568, - "step": 8038 - }, - { - "epoch": 0.6183370509960773, - "learning_rate": 0.0009551068319611153, - "loss": 1.1488, - "step": 8039 - }, - { - "epoch": 0.6184139681562957, - "learning_rate": 0.0009547691452744917, - "loss": 1.1749, - "step": 8040 - }, - { - "epoch": 0.6184908853165141, - "learning_rate": 0.0009544314904244658, - "loss": 1.3081, - "step": 8041 - }, - { - "epoch": 0.6185678024767326, - "learning_rate": 0.0009540938674307536, - "loss": 1.0842, - "step": 8042 - }, - { - "epoch": 0.618644719636951, - "learning_rate": 0.0009537562763130697, - "loss": 0.9682, - "step": 8043 - }, - { - "epoch": 0.6187216367971694, - "learning_rate": 0.0009534187170911263, - "loss": 0.7129, - "step": 8044 - }, - { - "epoch": 0.6187985539573879, - "learning_rate": 0.0009530811897846328, - "loss": 1.0388, - "step": 8045 - }, - { - "epoch": 0.6188754711176063, - "learning_rate": 0.0009527436944132996, - "loss": 1.1447, - "step": 8046 - }, - { - "epoch": 0.6189523882778247, - "learning_rate": 0.0009524062309968321, - "loss": 0.8394, - "step": 8047 - }, - { - "epoch": 0.6190293054380432, - "learning_rate": 0.000952068799554935, - "loss": 1.229, - "step": 8048 - }, - { - "epoch": 0.6191062225982616, - "learning_rate": 0.0009517314001073121, - "loss": 1.0077, - "step": 8049 - }, - { - "epoch": 0.6191831397584802, - "learning_rate": 0.000951394032673664, - "loss": 1.2297, - "step": 8050 - }, - { - "epoch": 0.6192600569186986, - "learning_rate": 0.0009510566972736901, - "loss": 1.1122, - "step": 8051 - }, - { - "epoch": 0.619336974078917, - "learning_rate": 0.000950719393927088, - "loss": 0.9952, - "step": 8052 - }, - { - "epoch": 0.6194138912391355, - "learning_rate": 0.0009503821226535525, - "loss": 0.8947, - "step": 8053 - }, - { - "epoch": 0.6194908083993539, - "learning_rate": 0.000950044883472778, - "loss": 1.2, - "step": 8054 - }, - { - "epoch": 0.6195677255595723, - "learning_rate": 0.0009497076764044561, - "loss": 1.0742, - "step": 8055 - }, - { - "epoch": 0.6196446427197908, - "learning_rate": 0.0009493705014682757, - "loss": 1.3496, - "step": 8056 - }, - { - "epoch": 0.6197215598800092, - "learning_rate": 0.0009490333586839266, - "loss": 1.0115, - "step": 8057 - }, - { - "epoch": 0.6197984770402277, - "learning_rate": 0.0009486962480710927, - "loss": 1.3346, - "step": 8058 - }, - { - "epoch": 0.6198753942004461, - "learning_rate": 0.0009483591696494606, - "loss": 0.9056, - "step": 8059 - }, - { - "epoch": 0.6199523113606645, - "learning_rate": 0.0009480221234387108, - "loss": 1.3529, - "step": 8060 - }, - { - "epoch": 0.620029228520883, - "learning_rate": 0.0009476851094585246, - "loss": 1.3914, - "step": 8061 - }, - { - "epoch": 0.6201061456811015, - "learning_rate": 0.0009473481277285805, - "loss": 1.0591, - "step": 8062 - }, - { - "epoch": 0.6201830628413199, - "learning_rate": 0.0009470111782685554, - "loss": 1.2498, - "step": 8063 - }, - { - "epoch": 0.6202599800015384, - "learning_rate": 0.0009466742610981235, - "loss": 1.0873, - "step": 8064 - }, - { - "epoch": 0.6203368971617568, - "learning_rate": 0.0009463373762369584, - "loss": 1.4817, - "step": 8065 - }, - { - "epoch": 0.6204138143219753, - "learning_rate": 0.0009460005237047306, - "loss": 1.1098, - "step": 8066 - }, - { - "epoch": 0.6204907314821937, - "learning_rate": 0.00094566370352111, - "loss": 1.1081, - "step": 8067 - }, - { - "epoch": 0.6205676486424121, - "learning_rate": 0.0009453269157057637, - "loss": 1.1561, - "step": 8068 - }, - { - "epoch": 0.6206445658026306, - "learning_rate": 0.0009449901602783558, - "loss": 0.9915, - "step": 8069 - }, - { - "epoch": 0.620721482962849, - "learning_rate": 0.000944653437258552, - "loss": 1.3577, - "step": 8070 - }, - { - "epoch": 0.6207984001230674, - "learning_rate": 0.0009443167466660124, - "loss": 1.1501, - "step": 8071 - }, - { - "epoch": 0.6208753172832859, - "learning_rate": 0.0009439800885203968, - "loss": 1.0286, - "step": 8072 - }, - { - "epoch": 0.6209522344435043, - "learning_rate": 0.0009436434628413636, - "loss": 1.3252, - "step": 8073 - }, - { - "epoch": 0.6210291516037227, - "learning_rate": 0.0009433068696485684, - "loss": 1.2765, - "step": 8074 - }, - { - "epoch": 0.6211060687639413, - "learning_rate": 0.0009429703089616649, - "loss": 1.0048, - "step": 8075 - }, - { - "epoch": 0.6211829859241597, - "learning_rate": 0.0009426337808003058, - "loss": 1.0308, - "step": 8076 - }, - { - "epoch": 0.6212599030843782, - "learning_rate": 0.0009422972851841411, - "loss": 1.1436, - "step": 8077 - }, - { - "epoch": 0.6213368202445966, - "learning_rate": 0.0009419608221328191, - "loss": 1.4121, - "step": 8078 - }, - { - "epoch": 0.621413737404815, - "learning_rate": 0.0009416243916659869, - "loss": 1.1917, - "step": 8079 - }, - { - "epoch": 0.6214906545650335, - "learning_rate": 0.0009412879938032872, - "loss": 1.535, - "step": 8080 - }, - { - "epoch": 0.6215675717252519, - "learning_rate": 0.000940951628564365, - "loss": 1.042, - "step": 8081 - }, - { - "epoch": 0.6216444888854703, - "learning_rate": 0.0009406152959688593, - "loss": 0.7926, - "step": 8082 - }, - { - "epoch": 0.6217214060456888, - "learning_rate": 0.0009402789960364093, - "loss": 0.8164, - "step": 8083 - }, - { - "epoch": 0.6217983232059072, - "learning_rate": 0.0009399427287866524, - "loss": 1.0363, - "step": 8084 - }, - { - "epoch": 0.6218752403661257, - "learning_rate": 0.0009396064942392228, - "loss": 0.9432, - "step": 8085 - }, - { - "epoch": 0.6219521575263441, - "learning_rate": 0.0009392702924137543, - "loss": 1.1427, - "step": 8086 - }, - { - "epoch": 0.6220290746865625, - "learning_rate": 0.0009389341233298779, - "loss": 1.3593, - "step": 8087 - }, - { - "epoch": 0.6221059918467811, - "learning_rate": 0.0009385979870072225, - "loss": 1.2726, - "step": 8088 - }, - { - "epoch": 0.6221829090069995, - "learning_rate": 0.000938261883465416, - "loss": 1.041, - "step": 8089 - }, - { - "epoch": 0.6222598261672179, - "learning_rate": 0.0009379258127240838, - "loss": 0.8908, - "step": 8090 - }, - { - "epoch": 0.6223367433274364, - "learning_rate": 0.0009375897748028481, - "loss": 1.0971, - "step": 8091 - }, - { - "epoch": 0.6224136604876548, - "learning_rate": 0.0009372537697213328, - "loss": 0.9342, - "step": 8092 - }, - { - "epoch": 0.6224905776478732, - "learning_rate": 0.0009369177974991558, - "loss": 1.1111, - "step": 8093 - }, - { - "epoch": 0.6225674948080917, - "learning_rate": 0.0009365818581559354, - "loss": 1.1114, - "step": 8094 - }, - { - "epoch": 0.6226444119683101, - "learning_rate": 0.0009362459517112875, - "loss": 1.1003, - "step": 8095 - }, - { - "epoch": 0.6227213291285286, - "learning_rate": 0.0009359100781848259, - "loss": 0.9977, - "step": 8096 - }, - { - "epoch": 0.622798246288747, - "learning_rate": 0.000935574237596163, - "loss": 0.8794, - "step": 8097 - }, - { - "epoch": 0.6228751634489654, - "learning_rate": 0.0009352384299649086, - "loss": 1.4536, - "step": 8098 - }, - { - "epoch": 0.622952080609184, - "learning_rate": 0.0009349026553106707, - "loss": 1.2938, - "step": 8099 - }, - { - "epoch": 0.6230289977694023, - "learning_rate": 0.0009345669136530556, - "loss": 0.8505, - "step": 8100 - }, - { - "epoch": 0.6231059149296208, - "learning_rate": 0.0009342312050116683, - "loss": 0.7771, - "step": 8101 - }, - { - "epoch": 0.6231828320898393, - "learning_rate": 0.0009338955294061095, - "loss": 0.9755, - "step": 8102 - }, - { - "epoch": 0.6232597492500577, - "learning_rate": 0.0009335598868559817, - "loss": 0.7584, - "step": 8103 - }, - { - "epoch": 0.6233366664102762, - "learning_rate": 0.0009332242773808815, - "loss": 1.0345, - "step": 8104 - }, - { - "epoch": 0.6234135835704946, - "learning_rate": 0.0009328887010004075, - "loss": 1.3528, - "step": 8105 - }, - { - "epoch": 0.623490500730713, - "learning_rate": 0.0009325531577341527, - "loss": 1.3813, - "step": 8106 - }, - { - "epoch": 0.6235674178909315, - "learning_rate": 0.0009322176476017103, - "loss": 1.2704, - "step": 8107 - }, - { - "epoch": 0.6236443350511499, - "learning_rate": 0.0009318821706226715, - "loss": 1.2574, - "step": 8108 - }, - { - "epoch": 0.6237212522113683, - "learning_rate": 0.0009315467268166247, - "loss": 0.9548, - "step": 8109 - }, - { - "epoch": 0.6237981693715868, - "learning_rate": 0.0009312113162031566, - "loss": 1.5025, - "step": 8110 - }, - { - "epoch": 0.6238750865318052, - "learning_rate": 0.0009308759388018528, - "loss": 0.971, - "step": 8111 - }, - { - "epoch": 0.6239520036920236, - "learning_rate": 0.0009305405946322966, - "loss": 1.3512, - "step": 8112 - }, - { - "epoch": 0.6240289208522422, - "learning_rate": 0.0009302052837140676, - "loss": 1.215, - "step": 8113 - }, - { - "epoch": 0.6241058380124606, - "learning_rate": 0.0009298700060667468, - "loss": 1.0269, - "step": 8114 - }, - { - "epoch": 0.6241827551726791, - "learning_rate": 0.0009295347617099094, - "loss": 1.1533, - "step": 8115 - }, - { - "epoch": 0.6242596723328975, - "learning_rate": 0.0009291995506631328, - "loss": 1.1212, - "step": 8116 - }, - { - "epoch": 0.6243365894931159, - "learning_rate": 0.0009288643729459891, - "loss": 1.1638, - "step": 8117 - }, - { - "epoch": 0.6244135066533344, - "learning_rate": 0.0009285292285780496, - "loss": 1.1108, - "step": 8118 - }, - { - "epoch": 0.6244904238135528, - "learning_rate": 0.0009281941175788844, - "loss": 1.2896, - "step": 8119 - }, - { - "epoch": 0.6245673409737712, - "learning_rate": 0.0009278590399680605, - "loss": 1.2024, - "step": 8120 - }, - { - "epoch": 0.6246442581339897, - "learning_rate": 0.0009275239957651431, - "loss": 1.2816, - "step": 8121 - }, - { - "epoch": 0.6247211752942081, - "learning_rate": 0.0009271889849896966, - "loss": 1.5103, - "step": 8122 - }, - { - "epoch": 0.6247980924544266, - "learning_rate": 0.000926854007661282, - "loss": 1.2158, - "step": 8123 - }, - { - "epoch": 0.624875009614645, - "learning_rate": 0.0009265190637994594, - "loss": 0.923, - "step": 8124 - }, - { - "epoch": 0.6249519267748634, - "learning_rate": 0.0009261841534237867, - "loss": 0.9093, - "step": 8125 - }, - { - "epoch": 0.625028843935082, - "learning_rate": 0.0009258492765538183, - "loss": 1.359, - "step": 8126 - }, - { - "epoch": 0.6251057610953004, - "learning_rate": 0.0009255144332091101, - "loss": 0.9265, - "step": 8127 - }, - { - "epoch": 0.6251826782555188, - "learning_rate": 0.0009251796234092123, - "loss": 1.3336, - "step": 8128 - }, - { - "epoch": 0.6252595954157373, - "learning_rate": 0.0009248448471736753, - "loss": 1.2168, - "step": 8129 - }, - { - "epoch": 0.6253365125759557, - "learning_rate": 0.0009245101045220472, - "loss": 1.0361, - "step": 8130 - }, - { - "epoch": 0.6254134297361741, - "learning_rate": 0.000924175395473874, - "loss": 0.9382, - "step": 8131 - }, - { - "epoch": 0.6254903468963926, - "learning_rate": 0.0009238407200486992, - "loss": 1.1063, - "step": 8132 - }, - { - "epoch": 0.625567264056611, - "learning_rate": 0.0009235060782660655, - "loss": 1.5559, - "step": 8133 - }, - { - "epoch": 0.6256441812168295, - "learning_rate": 0.0009231714701455123, - "loss": 1.0572, - "step": 8134 - }, - { - "epoch": 0.6257210983770479, - "learning_rate": 0.0009228368957065788, - "loss": 1.2197, - "step": 8135 - }, - { - "epoch": 0.6257980155372663, - "learning_rate": 0.0009225023549688005, - "loss": 1.1472, - "step": 8136 - }, - { - "epoch": 0.6258749326974848, - "learning_rate": 0.0009221678479517106, - "loss": 1.4049, - "step": 8137 - }, - { - "epoch": 0.6259518498577032, - "learning_rate": 0.0009218333746748432, - "loss": 1.0489, - "step": 8138 - }, - { - "epoch": 0.6260287670179217, - "learning_rate": 0.0009214989351577274, - "loss": 1.3332, - "step": 8139 - }, - { - "epoch": 0.6261056841781402, - "learning_rate": 0.0009211645294198912, - "loss": 1.7117, - "step": 8140 - }, - { - "epoch": 0.6261826013383586, - "learning_rate": 0.0009208301574808618, - "loss": 1.2354, - "step": 8141 - }, - { - "epoch": 0.6262595184985771, - "learning_rate": 0.0009204958193601627, - "loss": 1.0934, - "step": 8142 - }, - { - "epoch": 0.6263364356587955, - "learning_rate": 0.0009201615150773169, - "loss": 1.2804, - "step": 8143 - }, - { - "epoch": 0.6264133528190139, - "learning_rate": 0.0009198272446518449, - "loss": 0.9547, - "step": 8144 - }, - { - "epoch": 0.6264902699792324, - "learning_rate": 0.0009194930081032641, - "loss": 0.735, - "step": 8145 - }, - { - "epoch": 0.6265671871394508, - "learning_rate": 0.000919158805451092, - "loss": 1.0617, - "step": 8146 - }, - { - "epoch": 0.6266441042996692, - "learning_rate": 0.0009188246367148426, - "loss": 1.3298, - "step": 8147 - }, - { - "epoch": 0.6267210214598877, - "learning_rate": 0.0009184905019140277, - "loss": 1.2708, - "step": 8148 - }, - { - "epoch": 0.6267979386201061, - "learning_rate": 0.0009181564010681593, - "loss": 0.9733, - "step": 8149 - }, - { - "epoch": 0.6268748557803245, - "learning_rate": 0.0009178223341967447, - "loss": 1.524, - "step": 8150 - }, - { - "epoch": 0.626951772940543, - "learning_rate": 0.0009174883013192904, - "loss": 1.04, - "step": 8151 - }, - { - "epoch": 0.6270286901007615, - "learning_rate": 0.0009171543024553016, - "loss": 0.8358, - "step": 8152 - }, - { - "epoch": 0.62710560726098, - "learning_rate": 0.0009168203376242803, - "loss": 1.2077, - "step": 8153 - }, - { - "epoch": 0.6271825244211984, - "learning_rate": 0.0009164864068457275, - "loss": 1.3779, - "step": 8154 - }, - { - "epoch": 0.6272594415814168, - "learning_rate": 0.0009161525101391416, - "loss": 0.8769, - "step": 8155 - }, - { - "epoch": 0.6273363587416353, - "learning_rate": 0.0009158186475240187, - "loss": 1.0898, - "step": 8156 - }, - { - "epoch": 0.6274132759018537, - "learning_rate": 0.0009154848190198539, - "loss": 1.0027, - "step": 8157 - }, - { - "epoch": 0.6274901930620721, - "learning_rate": 0.0009151510246461403, - "loss": 1.0532, - "step": 8158 - }, - { - "epoch": 0.6275671102222906, - "learning_rate": 0.0009148172644223669, - "loss": 1.1443, - "step": 8159 - }, - { - "epoch": 0.627644027382509, - "learning_rate": 0.000914483538368024, - "loss": 1.1674, - "step": 8160 - }, - { - "epoch": 0.6277209445427275, - "learning_rate": 0.0009141498465025968, - "loss": 1.0831, - "step": 8161 - }, - { - "epoch": 0.6277978617029459, - "learning_rate": 0.0009138161888455716, - "loss": 1.1636, - "step": 8162 - }, - { - "epoch": 0.6278747788631643, - "learning_rate": 0.0009134825654164295, - "loss": 0.9123, - "step": 8163 - }, - { - "epoch": 0.6279516960233829, - "learning_rate": 0.0009131489762346515, - "loss": 1.1613, - "step": 8164 - }, - { - "epoch": 0.6280286131836013, - "learning_rate": 0.0009128154213197168, - "loss": 1.4738, - "step": 8165 - }, - { - "epoch": 0.6281055303438197, - "learning_rate": 0.0009124819006911015, - "loss": 1.2864, - "step": 8166 - }, - { - "epoch": 0.6281824475040382, - "learning_rate": 0.0009121484143682799, - "loss": 1.0098, - "step": 8167 - }, - { - "epoch": 0.6282593646642566, - "learning_rate": 0.0009118149623707254, - "loss": 1.2836, - "step": 8168 - }, - { - "epoch": 0.6283362818244751, - "learning_rate": 0.000911481544717908, - "loss": 0.5749, - "step": 8169 - }, - { - "epoch": 0.6284131989846935, - "learning_rate": 0.0009111481614292968, - "loss": 1.1781, - "step": 8170 - }, - { - "epoch": 0.6284901161449119, - "learning_rate": 0.0009108148125243584, - "loss": 0.7363, - "step": 8171 - }, - { - "epoch": 0.6285670333051304, - "learning_rate": 0.0009104814980225562, - "loss": 1.1923, - "step": 8172 - }, - { - "epoch": 0.6286439504653488, - "learning_rate": 0.0009101482179433548, - "loss": 1.1633, - "step": 8173 - }, - { - "epoch": 0.6287208676255672, - "learning_rate": 0.0009098149723062132, - "loss": 1.5104, - "step": 8174 - }, - { - "epoch": 0.6287977847857857, - "learning_rate": 0.0009094817611305902, - "loss": 1.0493, - "step": 8175 - }, - { - "epoch": 0.6288747019460041, - "learning_rate": 0.0009091485844359428, - "loss": 0.8758, - "step": 8176 - }, - { - "epoch": 0.6289516191062225, - "learning_rate": 0.0009088154422417255, - "loss": 1.1665, - "step": 8177 - }, - { - "epoch": 0.6290285362664411, - "learning_rate": 0.0009084823345673903, - "loss": 0.8523, - "step": 8178 - }, - { - "epoch": 0.6291054534266595, - "learning_rate": 0.0009081492614323882, - "loss": 1.1758, - "step": 8179 - }, - { - "epoch": 0.629182370586878, - "learning_rate": 0.0009078162228561674, - "loss": 0.9003, - "step": 8180 - }, - { - "epoch": 0.6292592877470964, - "learning_rate": 0.0009074832188581749, - "loss": 0.8993, - "step": 8181 - }, - { - "epoch": 0.6293362049073148, - "learning_rate": 0.0009071502494578545, - "loss": 1.4683, - "step": 8182 - }, - { - "epoch": 0.6294131220675333, - "learning_rate": 0.0009068173146746488, - "loss": 0.7721, - "step": 8183 - }, - { - "epoch": 0.6294900392277517, - "learning_rate": 0.0009064844145279985, - "loss": 1.0183, - "step": 8184 - }, - { - "epoch": 0.6295669563879701, - "learning_rate": 0.0009061515490373421, - "loss": 1.184, - "step": 8185 - }, - { - "epoch": 0.6296438735481886, - "learning_rate": 0.0009058187182221148, - "loss": 1.1737, - "step": 8186 - }, - { - "epoch": 0.629720790708407, - "learning_rate": 0.0009054859221017524, - "loss": 1.2641, - "step": 8187 - }, - { - "epoch": 0.6297977078686255, - "learning_rate": 0.0009051531606956863, - "loss": 1.1508, - "step": 8188 - }, - { - "epoch": 0.629874625028844, - "learning_rate": 0.0009048204340233474, - "loss": 1.3649, - "step": 8189 - }, - { - "epoch": 0.6299515421890624, - "learning_rate": 0.0009044877421041633, - "loss": 1.2373, - "step": 8190 - }, - { - "epoch": 0.6300284593492809, - "learning_rate": 0.0009041550849575605, - "loss": 1.1942, - "step": 8191 - }, - { - "epoch": 0.6301053765094993, - "learning_rate": 0.0009038224626029636, - "loss": 0.8716, - "step": 8192 - }, - { - "epoch": 0.6301822936697177, - "learning_rate": 0.0009034898750597945, - "loss": 1.1884, - "step": 8193 - }, - { - "epoch": 0.6302592108299362, - "learning_rate": 0.0009031573223474729, - "loss": 0.9751, - "step": 8194 - }, - { - "epoch": 0.6303361279901546, - "learning_rate": 0.0009028248044854175, - "loss": 1.0067, - "step": 8195 - }, - { - "epoch": 0.630413045150373, - "learning_rate": 0.0009024923214930443, - "loss": 0.8566, - "step": 8196 - }, - { - "epoch": 0.6304899623105915, - "learning_rate": 0.0009021598733897664, - "loss": 1.1764, - "step": 8197 - }, - { - "epoch": 0.6305668794708099, - "learning_rate": 0.0009018274601949975, - "loss": 1.1409, - "step": 8198 - }, - { - "epoch": 0.6306437966310284, - "learning_rate": 0.0009014950819281459, - "loss": 1.0688, - "step": 8199 - }, - { - "epoch": 0.6307207137912468, - "learning_rate": 0.0009011627386086206, - "loss": 0.8363, - "step": 8200 - }, - { - "epoch": 0.6307976309514652, - "learning_rate": 0.0009008304302558269, - "loss": 0.9203, - "step": 8201 - }, - { - "epoch": 0.6308745481116838, - "learning_rate": 0.0009004981568891688, - "loss": 1.0089, - "step": 8202 - }, - { - "epoch": 0.6309514652719022, - "learning_rate": 0.0009001659185280482, - "loss": 1.5066, - "step": 8203 - }, - { - "epoch": 0.6310283824321206, - "learning_rate": 0.0008998337151918649, - "loss": 1.5113, - "step": 8204 - }, - { - "epoch": 0.6311052995923391, - "learning_rate": 0.000899501546900016, - "loss": 1.0865, - "step": 8205 - }, - { - "epoch": 0.6311822167525575, - "learning_rate": 0.000899169413671898, - "loss": 1.0892, - "step": 8206 - }, - { - "epoch": 0.631259133912776, - "learning_rate": 0.0008988373155269037, - "loss": 1.3598, - "step": 8207 - }, - { - "epoch": 0.6313360510729944, - "learning_rate": 0.0008985052524844254, - "loss": 1.3512, - "step": 8208 - }, - { - "epoch": 0.6314129682332128, - "learning_rate": 0.0008981732245638528, - "loss": 1.119, - "step": 8209 - }, - { - "epoch": 0.6314898853934313, - "learning_rate": 0.0008978412317845717, - "loss": 0.883, - "step": 8210 - }, - { - "epoch": 0.6315668025536497, - "learning_rate": 0.0008975092741659694, - "loss": 1.031, - "step": 8211 - }, - { - "epoch": 0.6316437197138681, - "learning_rate": 0.0008971773517274284, - "loss": 0.9025, - "step": 8212 - }, - { - "epoch": 0.6317206368740866, - "learning_rate": 0.0008968454644883296, - "loss": 1.5185, - "step": 8213 - }, - { - "epoch": 0.631797554034305, - "learning_rate": 0.0008965136124680533, - "loss": 1.0785, - "step": 8214 - }, - { - "epoch": 0.6318744711945234, - "learning_rate": 0.0008961817956859759, - "loss": 1.4106, - "step": 8215 - }, - { - "epoch": 0.631951388354742, - "learning_rate": 0.0008958500141614725, - "loss": 0.8974, - "step": 8216 - }, - { - "epoch": 0.6320283055149604, - "learning_rate": 0.0008955182679139165, - "loss": 1.1247, - "step": 8217 - }, - { - "epoch": 0.6321052226751789, - "learning_rate": 0.0008951865569626788, - "loss": 1.3157, - "step": 8218 - }, - { - "epoch": 0.6321821398353973, - "learning_rate": 0.0008948548813271285, - "loss": 0.957, - "step": 8219 - }, - { - "epoch": 0.6322590569956157, - "learning_rate": 0.0008945232410266328, - "loss": 1.1534, - "step": 8220 - }, - { - "epoch": 0.6323359741558342, - "learning_rate": 0.0008941916360805551, - "loss": 1.1409, - "step": 8221 - }, - { - "epoch": 0.6324128913160526, - "learning_rate": 0.00089386006650826, - "loss": 1.2061, - "step": 8222 - }, - { - "epoch": 0.632489808476271, - "learning_rate": 0.0008935285323291069, - "loss": 1.0451, - "step": 8223 - }, - { - "epoch": 0.6325667256364895, - "learning_rate": 0.0008931970335624548, - "loss": 1.1052, - "step": 8224 - }, - { - "epoch": 0.6326436427967079, - "learning_rate": 0.0008928655702276605, - "loss": 1.0595, - "step": 8225 - }, - { - "epoch": 0.6327205599569264, - "learning_rate": 0.0008925341423440779, - "loss": 1.0323, - "step": 8226 - }, - { - "epoch": 0.6327974771171448, - "learning_rate": 0.0008922027499310602, - "loss": 0.9534, - "step": 8227 - }, - { - "epoch": 0.6328743942773633, - "learning_rate": 0.0008918713930079574, - "loss": 0.8965, - "step": 8228 - }, - { - "epoch": 0.6329513114375818, - "learning_rate": 0.0008915400715941174, - "loss": 1.2773, - "step": 8229 - }, - { - "epoch": 0.6330282285978002, - "learning_rate": 0.0008912087857088868, - "loss": 1.133, - "step": 8230 - }, - { - "epoch": 0.6331051457580186, - "learning_rate": 0.0008908775353716103, - "loss": 1.3332, - "step": 8231 - }, - { - "epoch": 0.6331820629182371, - "learning_rate": 0.0008905463206016283, - "loss": 1.1906, - "step": 8232 - }, - { - "epoch": 0.6332589800784555, - "learning_rate": 0.0008902151414182828, - "loss": 1.9048, - "step": 8233 - }, - { - "epoch": 0.6333358972386739, - "learning_rate": 0.0008898839978409103, - "loss": 1.2236, - "step": 8234 - }, - { - "epoch": 0.6334128143988924, - "learning_rate": 0.0008895528898888467, - "loss": 0.9474, - "step": 8235 - }, - { - "epoch": 0.6334897315591108, - "learning_rate": 0.0008892218175814265, - "loss": 1.0123, - "step": 8236 - }, - { - "epoch": 0.6335666487193293, - "learning_rate": 0.0008888907809379804, - "loss": 1.0698, - "step": 8237 - }, - { - "epoch": 0.6336435658795477, - "learning_rate": 0.0008885597799778389, - "loss": 1.343, - "step": 8238 - }, - { - "epoch": 0.6337204830397661, - "learning_rate": 0.0008882288147203289, - "loss": 1.156, - "step": 8239 - }, - { - "epoch": 0.6337974001999847, - "learning_rate": 0.0008878978851847759, - "loss": 1.2981, - "step": 8240 - }, - { - "epoch": 0.633874317360203, - "learning_rate": 0.0008875669913905034, - "loss": 1.0384, - "step": 8241 - }, - { - "epoch": 0.6339512345204215, - "learning_rate": 0.0008872361333568332, - "loss": 0.9956, - "step": 8242 - }, - { - "epoch": 0.63402815168064, - "learning_rate": 0.0008869053111030825, - "loss": 1.084, - "step": 8243 - }, - { - "epoch": 0.6341050688408584, - "learning_rate": 0.0008865745246485708, - "loss": 0.9186, - "step": 8244 - }, - { - "epoch": 0.6341819860010769, - "learning_rate": 0.0008862437740126107, - "loss": 0.9003, - "step": 8245 - }, - { - "epoch": 0.6342589031612953, - "learning_rate": 0.0008859130592145175, - "loss": 1.1746, - "step": 8246 - }, - { - "epoch": 0.6343358203215137, - "learning_rate": 0.0008855823802736001, - "loss": 1.2765, - "step": 8247 - }, - { - "epoch": 0.6344127374817322, - "learning_rate": 0.0008852517372091679, - "loss": 1.2865, - "step": 8248 - }, - { - "epoch": 0.6344896546419506, - "learning_rate": 0.0008849211300405278, - "loss": 1.1054, - "step": 8249 - }, - { - "epoch": 0.634566571802169, - "learning_rate": 0.0008845905587869835, - "loss": 1.122, - "step": 8250 - }, - { - "epoch": 0.6346434889623875, - "learning_rate": 0.0008842600234678379, - "loss": 0.8618, - "step": 8251 - }, - { - "epoch": 0.6347204061226059, - "learning_rate": 0.0008839295241023915, - "loss": 1.2543, - "step": 8252 - }, - { - "epoch": 0.6347973232828243, - "learning_rate": 0.0008835990607099423, - "loss": 1.0198, - "step": 8253 - }, - { - "epoch": 0.6348742404430429, - "learning_rate": 0.0008832686333097857, - "loss": 1.2164, - "step": 8254 - }, - { - "epoch": 0.6349511576032613, - "learning_rate": 0.0008829382419212171, - "loss": 1.1041, - "step": 8255 - }, - { - "epoch": 0.6350280747634798, - "learning_rate": 0.0008826078865635267, - "loss": 0.9281, - "step": 8256 - }, - { - "epoch": 0.6351049919236982, - "learning_rate": 0.0008822775672560064, - "loss": 0.991, - "step": 8257 - }, - { - "epoch": 0.6351819090839166, - "learning_rate": 0.0008819472840179422, - "loss": 0.8556, - "step": 8258 - }, - { - "epoch": 0.6352588262441351, - "learning_rate": 0.0008816170368686199, - "loss": 1.1821, - "step": 8259 - }, - { - "epoch": 0.6353357434043535, - "learning_rate": 0.0008812868258273236, - "loss": 1.1493, - "step": 8260 - }, - { - "epoch": 0.6354126605645719, - "learning_rate": 0.0008809566509133343, - "loss": 0.9212, - "step": 8261 - }, - { - "epoch": 0.6354895777247904, - "learning_rate": 0.000880626512145931, - "loss": 1.503, - "step": 8262 - }, - { - "epoch": 0.6355664948850088, - "learning_rate": 0.0008802964095443917, - "loss": 1.1168, - "step": 8263 - }, - { - "epoch": 0.6356434120452273, - "learning_rate": 0.0008799663431279903, - "loss": 1.1888, - "step": 8264 - }, - { - "epoch": 0.6357203292054457, - "learning_rate": 0.0008796363129160009, - "loss": 1.1403, - "step": 8265 - }, - { - "epoch": 0.6357972463656641, - "learning_rate": 0.0008793063189276937, - "loss": 1.4732, - "step": 8266 - }, - { - "epoch": 0.6358741635258827, - "learning_rate": 0.0008789763611823366, - "loss": 1.4032, - "step": 8267 - }, - { - "epoch": 0.6359510806861011, - "learning_rate": 0.000878646439699198, - "loss": 1.2138, - "step": 8268 - }, - { - "epoch": 0.6360279978463195, - "learning_rate": 0.000878316554497541, - "loss": 1.1688, - "step": 8269 - }, - { - "epoch": 0.636104915006538, - "learning_rate": 0.0008779867055966279, - "loss": 1.0932, - "step": 8270 - }, - { - "epoch": 0.6361818321667564, - "learning_rate": 0.0008776568930157198, - "loss": 1.4304, - "step": 8271 - }, - { - "epoch": 0.6362587493269748, - "learning_rate": 0.0008773271167740741, - "loss": 1.0514, - "step": 8272 - }, - { - "epoch": 0.6363356664871933, - "learning_rate": 0.0008769973768909467, - "loss": 0.9654, - "step": 8273 - }, - { - "epoch": 0.6364125836474117, - "learning_rate": 0.0008766676733855921, - "loss": 0.9579, - "step": 8274 - }, - { - "epoch": 0.6364895008076302, - "learning_rate": 0.0008763380062772614, - "loss": 0.7052, - "step": 8275 - }, - { - "epoch": 0.6365664179678486, - "learning_rate": 0.0008760083755852048, - "loss": 0.9583, - "step": 8276 - }, - { - "epoch": 0.636643335128067, - "learning_rate": 0.0008756787813286695, - "loss": 1.5324, - "step": 8277 - }, - { - "epoch": 0.6367202522882855, - "learning_rate": 0.0008753492235269007, - "loss": 1.0465, - "step": 8278 - }, - { - "epoch": 0.636797169448504, - "learning_rate": 0.0008750197021991417, - "loss": 0.9068, - "step": 8279 - }, - { - "epoch": 0.6368740866087224, - "learning_rate": 0.0008746902173646337, - "loss": 0.9917, - "step": 8280 - }, - { - "epoch": 0.6369510037689409, - "learning_rate": 0.000874360769042615, - "loss": 0.9032, - "step": 8281 - }, - { - "epoch": 0.6370279209291593, - "learning_rate": 0.0008740313572523236, - "loss": 1.0142, - "step": 8282 - }, - { - "epoch": 0.6371048380893778, - "learning_rate": 0.0008737019820129937, - "loss": 1.4506, - "step": 8283 - }, - { - "epoch": 0.6371817552495962, - "learning_rate": 0.0008733726433438577, - "loss": 1.026, - "step": 8284 - }, - { - "epoch": 0.6372586724098146, - "learning_rate": 0.0008730433412641461, - "loss": 0.8937, - "step": 8285 - }, - { - "epoch": 0.6373355895700331, - "learning_rate": 0.0008727140757930866, - "loss": 1.0528, - "step": 8286 - }, - { - "epoch": 0.6374125067302515, - "learning_rate": 0.0008723848469499072, - "loss": 1.2463, - "step": 8287 - }, - { - "epoch": 0.6374894238904699, - "learning_rate": 0.0008720556547538299, - "loss": 1.297, - "step": 8288 - }, - { - "epoch": 0.6375663410506884, - "learning_rate": 0.000871726499224077, - "loss": 1.1878, - "step": 8289 - }, - { - "epoch": 0.6376432582109068, - "learning_rate": 0.000871397380379869, - "loss": 1.2336, - "step": 8290 - }, - { - "epoch": 0.6377201753711254, - "learning_rate": 0.0008710682982404233, - "loss": 1.1063, - "step": 8291 - }, - { - "epoch": 0.6377970925313438, - "learning_rate": 0.0008707392528249548, - "loss": 0.9645, - "step": 8292 - }, - { - "epoch": 0.6378740096915622, - "learning_rate": 0.0008704102441526772, - "loss": 0.6542, - "step": 8293 - }, - { - "epoch": 0.6379509268517807, - "learning_rate": 0.0008700812722428011, - "loss": 1.0287, - "step": 8294 - }, - { - "epoch": 0.6380278440119991, - "learning_rate": 0.0008697523371145367, - "loss": 1.1559, - "step": 8295 - }, - { - "epoch": 0.6381047611722175, - "learning_rate": 0.0008694234387870906, - "loss": 1.142, - "step": 8296 - }, - { - "epoch": 0.638181678332436, - "learning_rate": 0.0008690945772796657, - "loss": 1.09, - "step": 8297 - }, - { - "epoch": 0.6382585954926544, - "learning_rate": 0.0008687657526114668, - "loss": 1.2456, - "step": 8298 - }, - { - "epoch": 0.6383355126528728, - "learning_rate": 0.0008684369648016935, - "loss": 1.1513, - "step": 8299 - }, - { - "epoch": 0.6384124298130913, - "learning_rate": 0.0008681082138695441, - "loss": 1.1071, - "step": 8300 - }, - { - "epoch": 0.6384893469733097, - "learning_rate": 0.0008677794998342144, - "loss": 0.9978, - "step": 8301 - }, - { - "epoch": 0.6385662641335282, - "learning_rate": 0.0008674508227148981, - "loss": 0.801, - "step": 8302 - }, - { - "epoch": 0.6386431812937466, - "learning_rate": 0.000867122182530788, - "loss": 1.0011, - "step": 8303 - }, - { - "epoch": 0.638720098453965, - "learning_rate": 0.0008667935793010733, - "loss": 1.017, - "step": 8304 - }, - { - "epoch": 0.6387970156141836, - "learning_rate": 0.0008664650130449414, - "loss": 1.3035, - "step": 8305 - }, - { - "epoch": 0.638873932774402, - "learning_rate": 0.0008661364837815777, - "loss": 1.1366, - "step": 8306 - }, - { - "epoch": 0.6389508499346204, - "learning_rate": 0.0008658079915301652, - "loss": 1.0112, - "step": 8307 - }, - { - "epoch": 0.6390277670948389, - "learning_rate": 0.0008654795363098845, - "loss": 1.348, - "step": 8308 - }, - { - "epoch": 0.6391046842550573, - "learning_rate": 0.0008651511181399163, - "loss": 1.2637, - "step": 8309 - }, - { - "epoch": 0.6391816014152758, - "learning_rate": 0.0008648227370394344, - "loss": 1.4518, - "step": 8310 - }, - { - "epoch": 0.6392585185754942, - "learning_rate": 0.0008644943930276157, - "loss": 0.9448, - "step": 8311 - }, - { - "epoch": 0.6393354357357126, - "learning_rate": 0.0008641660861236315, - "loss": 0.8564, - "step": 8312 - }, - { - "epoch": 0.6394123528959311, - "learning_rate": 0.0008638378163466524, - "loss": 1.0912, - "step": 8313 - }, - { - "epoch": 0.6394892700561495, - "learning_rate": 0.0008635095837158461, - "loss": 1.1062, - "step": 8314 - }, - { - "epoch": 0.6395661872163679, - "learning_rate": 0.0008631813882503785, - "loss": 1.1459, - "step": 8315 - }, - { - "epoch": 0.6396431043765864, - "learning_rate": 0.0008628532299694129, - "loss": 1.1181, - "step": 8316 - }, - { - "epoch": 0.6397200215368049, - "learning_rate": 0.0008625251088921114, - "loss": 0.8185, - "step": 8317 - }, - { - "epoch": 0.6397969386970233, - "learning_rate": 0.0008621970250376342, - "loss": 1.2591, - "step": 8318 - }, - { - "epoch": 0.6398738558572418, - "learning_rate": 0.0008618689784251362, - "loss": 1.5552, - "step": 8319 - }, - { - "epoch": 0.6399507730174602, - "learning_rate": 0.0008615409690737739, - "loss": 0.9654, - "step": 8320 - }, - { - "epoch": 0.6400276901776787, - "learning_rate": 0.0008612129970026991, - "loss": 0.9854, - "step": 8321 - }, - { - "epoch": 0.6401046073378971, - "learning_rate": 0.0008608850622310646, - "loss": 1.2055, - "step": 8322 - }, - { - "epoch": 0.6401815244981155, - "learning_rate": 0.0008605571647780168, - "loss": 1.1933, - "step": 8323 - }, - { - "epoch": 0.640258441658334, - "learning_rate": 0.000860229304662702, - "loss": 1.2742, - "step": 8324 - }, - { - "epoch": 0.6403353588185524, - "learning_rate": 0.0008599014819042653, - "loss": 1.0467, - "step": 8325 - }, - { - "epoch": 0.6404122759787708, - "learning_rate": 0.0008595736965218483, - "loss": 1.3372, - "step": 8326 - }, - { - "epoch": 0.6404891931389893, - "learning_rate": 0.0008592459485345905, - "loss": 0.9231, - "step": 8327 - }, - { - "epoch": 0.6405661102992077, - "learning_rate": 0.0008589182379616296, - "loss": 1.0623, - "step": 8328 - }, - { - "epoch": 0.6406430274594263, - "learning_rate": 0.0008585905648221004, - "loss": 0.6794, - "step": 8329 - }, - { - "epoch": 0.6407199446196447, - "learning_rate": 0.0008582629291351369, - "loss": 1.6995, - "step": 8330 - }, - { - "epoch": 0.6407968617798631, - "learning_rate": 0.0008579353309198705, - "loss": 0.9766, - "step": 8331 - }, - { - "epoch": 0.6408737789400816, - "learning_rate": 0.0008576077701954282, - "loss": 1.282, - "step": 8332 - }, - { - "epoch": 0.6409506961003, - "learning_rate": 0.0008572802469809381, - "loss": 1.0178, - "step": 8333 - }, - { - "epoch": 0.6410276132605184, - "learning_rate": 0.0008569527612955245, - "loss": 1.1893, - "step": 8334 - }, - { - "epoch": 0.6411045304207369, - "learning_rate": 0.000856625313158309, - "loss": 1.3076, - "step": 8335 - }, - { - "epoch": 0.6411814475809553, - "learning_rate": 0.0008562979025884121, - "loss": 1.2457, - "step": 8336 - }, - { - "epoch": 0.6412583647411737, - "learning_rate": 0.0008559705296049515, - "loss": 1.2937, - "step": 8337 - }, - { - "epoch": 0.6413352819013922, - "learning_rate": 0.0008556431942270422, - "loss": 1.1312, - "step": 8338 - }, - { - "epoch": 0.6414121990616106, - "learning_rate": 0.000855315896473799, - "loss": 1.4275, - "step": 8339 - }, - { - "epoch": 0.6414891162218291, - "learning_rate": 0.0008549886363643326, - "loss": 0.9869, - "step": 8340 - }, - { - "epoch": 0.6415660333820475, - "learning_rate": 0.0008546614139177518, - "loss": 1.3316, - "step": 8341 - }, - { - "epoch": 0.6416429505422659, - "learning_rate": 0.0008543342291531636, - "loss": 0.914, - "step": 8342 - }, - { - "epoch": 0.6417198677024845, - "learning_rate": 0.0008540070820896721, - "loss": 1.2597, - "step": 8343 - }, - { - "epoch": 0.6417967848627029, - "learning_rate": 0.0008536799727463815, - "loss": 1.0967, - "step": 8344 - }, - { - "epoch": 0.6418737020229213, - "learning_rate": 0.0008533529011423902, - "loss": 1.4358, - "step": 8345 - }, - { - "epoch": 0.6419506191831398, - "learning_rate": 0.0008530258672967963, - "loss": 1.2265, - "step": 8346 - }, - { - "epoch": 0.6420275363433582, - "learning_rate": 0.000852698871228697, - "loss": 1.0294, - "step": 8347 - }, - { - "epoch": 0.6421044535035767, - "learning_rate": 0.0008523719129571852, - "loss": 0.8969, - "step": 8348 - }, - { - "epoch": 0.6421813706637951, - "learning_rate": 0.0008520449925013524, - "loss": 1.1363, - "step": 8349 - }, - { - "epoch": 0.6422582878240135, - "learning_rate": 0.0008517181098802875, - "loss": 0.9958, - "step": 8350 - }, - { - "epoch": 0.642335204984232, - "learning_rate": 0.0008513912651130774, - "loss": 0.9373, - "step": 8351 - }, - { - "epoch": 0.6424121221444504, - "learning_rate": 0.0008510644582188078, - "loss": 1.4266, - "step": 8352 - }, - { - "epoch": 0.6424890393046688, - "learning_rate": 0.0008507376892165616, - "loss": 1.178, - "step": 8353 - }, - { - "epoch": 0.6425659564648873, - "learning_rate": 0.000850410958125417, - "loss": 1.2896, - "step": 8354 - }, - { - "epoch": 0.6426428736251057, - "learning_rate": 0.0008500842649644541, - "loss": 1.1034, - "step": 8355 - }, - { - "epoch": 0.6427197907853242, - "learning_rate": 0.0008497576097527483, - "loss": 1.2321, - "step": 8356 - }, - { - "epoch": 0.6427967079455427, - "learning_rate": 0.0008494309925093735, - "loss": 1.1474, - "step": 8357 - }, - { - "epoch": 0.6428736251057611, - "learning_rate": 0.0008491044132534008, - "loss": 1.0116, - "step": 8358 - }, - { - "epoch": 0.6429505422659796, - "learning_rate": 0.0008487778720038993, - "loss": 1.0924, - "step": 8359 - }, - { - "epoch": 0.643027459426198, - "learning_rate": 0.0008484513687799372, - "loss": 1.1276, - "step": 8360 - }, - { - "epoch": 0.6431043765864164, - "learning_rate": 0.0008481249036005786, - "loss": 0.8738, - "step": 8361 - }, - { - "epoch": 0.6431812937466349, - "learning_rate": 0.0008477984764848864, - "loss": 1.0035, - "step": 8362 - }, - { - "epoch": 0.6432582109068533, - "learning_rate": 0.0008474720874519208, - "loss": 1.0124, - "step": 8363 - }, - { - "epoch": 0.6433351280670717, - "learning_rate": 0.00084714573652074, - "loss": 1.0515, - "step": 8364 - }, - { - "epoch": 0.6434120452272902, - "learning_rate": 0.0008468194237103994, - "loss": 1.1871, - "step": 8365 - }, - { - "epoch": 0.6434889623875086, - "learning_rate": 0.0008464931490399546, - "loss": 1.1569, - "step": 8366 - }, - { - "epoch": 0.6435658795477271, - "learning_rate": 0.0008461669125284548, - "loss": 1.3059, - "step": 8367 - }, - { - "epoch": 0.6436427967079456, - "learning_rate": 0.000845840714194951, - "loss": 1.0381, - "step": 8368 - }, - { - "epoch": 0.643719713868164, - "learning_rate": 0.0008455145540584894, - "loss": 1.3407, - "step": 8369 - }, - { - "epoch": 0.6437966310283825, - "learning_rate": 0.0008451884321381151, - "loss": 1.3718, - "step": 8370 - }, - { - "epoch": 0.6438735481886009, - "learning_rate": 0.0008448623484528708, - "loss": 1.267, - "step": 8371 - }, - { - "epoch": 0.6439504653488193, - "learning_rate": 0.000844536303021797, - "loss": 0.8983, - "step": 8372 - }, - { - "epoch": 0.6440273825090378, - "learning_rate": 0.0008442102958639305, - "loss": 1.2889, - "step": 8373 - }, - { - "epoch": 0.6441042996692562, - "learning_rate": 0.0008438843269983092, - "loss": 0.8949, - "step": 8374 - }, - { - "epoch": 0.6441812168294746, - "learning_rate": 0.0008435583964439663, - "loss": 0.898, - "step": 8375 - }, - { - "epoch": 0.6442581339896931, - "learning_rate": 0.0008432325042199317, - "loss": 1.0528, - "step": 8376 - }, - { - "epoch": 0.6443350511499115, - "learning_rate": 0.0008429066503452359, - "loss": 0.7661, - "step": 8377 - }, - { - "epoch": 0.64441196831013, - "learning_rate": 0.0008425808348389054, - "loss": 1.3702, - "step": 8378 - }, - { - "epoch": 0.6444888854703484, - "learning_rate": 0.000842255057719966, - "loss": 0.8219, - "step": 8379 - }, - { - "epoch": 0.6445658026305668, - "learning_rate": 0.0008419293190074389, - "loss": 1.3809, - "step": 8380 - }, - { - "epoch": 0.6446427197907854, - "learning_rate": 0.000841603618720344, - "loss": 1.9077, - "step": 8381 - }, - { - "epoch": 0.6447196369510038, - "learning_rate": 0.0008412779568777005, - "loss": 0.8228, - "step": 8382 - }, - { - "epoch": 0.6447965541112222, - "learning_rate": 0.0008409523334985238, - "loss": 1.405, - "step": 8383 - }, - { - "epoch": 0.6448734712714407, - "learning_rate": 0.0008406267486018268, - "loss": 1.0319, - "step": 8384 - }, - { - "epoch": 0.6449503884316591, - "learning_rate": 0.0008403012022066215, - "loss": 1.0342, - "step": 8385 - }, - { - "epoch": 0.6450273055918776, - "learning_rate": 0.0008399756943319157, - "loss": 1.2664, - "step": 8386 - }, - { - "epoch": 0.645104222752096, - "learning_rate": 0.0008396502249967177, - "loss": 0.888, - "step": 8387 - }, - { - "epoch": 0.6451811399123144, - "learning_rate": 0.000839324794220032, - "loss": 1.3294, - "step": 8388 - }, - { - "epoch": 0.6452580570725329, - "learning_rate": 0.0008389994020208587, - "loss": 1.1977, - "step": 8389 - }, - { - "epoch": 0.6453349742327513, - "learning_rate": 0.0008386740484181998, - "loss": 1.0861, - "step": 8390 - }, - { - "epoch": 0.6454118913929697, - "learning_rate": 0.0008383487334310523, - "loss": 1.0666, - "step": 8391 - }, - { - "epoch": 0.6454888085531882, - "learning_rate": 0.000838023457078412, - "loss": 0.9194, - "step": 8392 - }, - { - "epoch": 0.6455657257134066, - "learning_rate": 0.0008376982193792718, - "loss": 1.4254, - "step": 8393 - }, - { - "epoch": 0.6456426428736252, - "learning_rate": 0.0008373730203526226, - "loss": 1.1746, - "step": 8394 - }, - { - "epoch": 0.6457195600338436, - "learning_rate": 0.0008370478600174535, - "loss": 0.8879, - "step": 8395 - }, - { - "epoch": 0.645796477194062, - "learning_rate": 0.0008367227383927508, - "loss": 1.0596, - "step": 8396 - }, - { - "epoch": 0.6458733943542805, - "learning_rate": 0.0008363976554974987, - "loss": 1.005, - "step": 8397 - }, - { - "epoch": 0.6459503115144989, - "learning_rate": 0.0008360726113506792, - "loss": 1.2018, - "step": 8398 - }, - { - "epoch": 0.6460272286747173, - "learning_rate": 0.0008357476059712718, - "loss": 1.1786, - "step": 8399 - }, - { - "epoch": 0.6461041458349358, - "learning_rate": 0.0008354226393782534, - "loss": 1.2608, - "step": 8400 - }, - { - "epoch": 0.6461810629951542, - "learning_rate": 0.0008350977115906007, - "loss": 1.0666, - "step": 8401 - }, - { - "epoch": 0.6462579801553726, - "learning_rate": 0.0008347728226272851, - "loss": 1.3131, - "step": 8402 - }, - { - "epoch": 0.6463348973155911, - "learning_rate": 0.000834447972507277, - "loss": 1.5519, - "step": 8403 - }, - { - "epoch": 0.6464118144758095, - "learning_rate": 0.0008341231612495461, - "loss": 1.1245, - "step": 8404 - }, - { - "epoch": 0.646488731636028, - "learning_rate": 0.000833798388873058, - "loss": 0.9058, - "step": 8405 - }, - { - "epoch": 0.6465656487962465, - "learning_rate": 0.0008334736553967759, - "loss": 1.3004, - "step": 8406 - }, - { - "epoch": 0.6466425659564649, - "learning_rate": 0.0008331489608396618, - "loss": 1.427, - "step": 8407 - }, - { - "epoch": 0.6467194831166834, - "learning_rate": 0.0008328243052206743, - "loss": 1.0121, - "step": 8408 - }, - { - "epoch": 0.6467964002769018, - "learning_rate": 0.0008324996885587714, - "loss": 1.2156, - "step": 8409 - }, - { - "epoch": 0.6468733174371202, - "learning_rate": 0.0008321751108729075, - "loss": 1.3845, - "step": 8410 - }, - { - "epoch": 0.6469502345973387, - "learning_rate": 0.0008318505721820348, - "loss": 0.8668, - "step": 8411 - }, - { - "epoch": 0.6470271517575571, - "learning_rate": 0.0008315260725051036, - "loss": 1.0659, - "step": 8412 - }, - { - "epoch": 0.6471040689177756, - "learning_rate": 0.000831201611861061, - "loss": 1.1882, - "step": 8413 - }, - { - "epoch": 0.647180986077994, - "learning_rate": 0.0008308771902688544, - "loss": 0.8096, - "step": 8414 - }, - { - "epoch": 0.6472579032382124, - "learning_rate": 0.0008305528077474255, - "loss": 0.9452, - "step": 8415 - }, - { - "epoch": 0.6473348203984309, - "learning_rate": 0.0008302284643157153, - "loss": 1.2173, - "step": 8416 - }, - { - "epoch": 0.6474117375586493, - "learning_rate": 0.0008299041599926636, - "loss": 0.9444, - "step": 8417 - }, - { - "epoch": 0.6474886547188677, - "learning_rate": 0.0008295798947972063, - "loss": 0.9073, - "step": 8418 - }, - { - "epoch": 0.6475655718790863, - "learning_rate": 0.0008292556687482778, - "loss": 1.0288, - "step": 8419 - }, - { - "epoch": 0.6476424890393047, - "learning_rate": 0.0008289314818648098, - "loss": 1.2546, - "step": 8420 - }, - { - "epoch": 0.6477194061995231, - "learning_rate": 0.0008286073341657317, - "loss": 1.1336, - "step": 8421 - }, - { - "epoch": 0.6477963233597416, - "learning_rate": 0.0008282832256699709, - "loss": 0.7539, - "step": 8422 - }, - { - "epoch": 0.64787324051996, - "learning_rate": 0.0008279591563964528, - "loss": 0.8988, - "step": 8423 - }, - { - "epoch": 0.6479501576801785, - "learning_rate": 0.0008276351263641002, - "loss": 1.029, - "step": 8424 - }, - { - "epoch": 0.6480270748403969, - "learning_rate": 0.0008273111355918332, - "loss": 0.9604, - "step": 8425 - }, - { - "epoch": 0.6481039920006153, - "learning_rate": 0.0008269871840985701, - "loss": 1.4591, - "step": 8426 - }, - { - "epoch": 0.6481809091608338, - "learning_rate": 0.000826663271903226, - "loss": 1.145, - "step": 8427 - }, - { - "epoch": 0.6482578263210522, - "learning_rate": 0.0008263393990247165, - "loss": 1.0779, - "step": 8428 - }, - { - "epoch": 0.6483347434812706, - "learning_rate": 0.000826015565481951, - "loss": 1.0784, - "step": 8429 - }, - { - "epoch": 0.6484116606414891, - "learning_rate": 0.0008256917712938382, - "loss": 1.0924, - "step": 8430 - }, - { - "epoch": 0.6484885778017075, - "learning_rate": 0.0008253680164792865, - "loss": 1.3725, - "step": 8431 - }, - { - "epoch": 0.6485654949619261, - "learning_rate": 0.0008250443010571994, - "loss": 1.3314, - "step": 8432 - }, - { - "epoch": 0.6486424121221445, - "learning_rate": 0.0008247206250464788, - "loss": 0.8478, - "step": 8433 - }, - { - "epoch": 0.6487193292823629, - "learning_rate": 0.0008243969884660247, - "loss": 1.0771, - "step": 8434 - }, - { - "epoch": 0.6487962464425814, - "learning_rate": 0.0008240733913347341, - "loss": 0.9964, - "step": 8435 - }, - { - "epoch": 0.6488731636027998, - "learning_rate": 0.000823749833671503, - "loss": 1.3977, - "step": 8436 - }, - { - "epoch": 0.6489500807630182, - "learning_rate": 0.0008234263154952245, - "loss": 0.9625, - "step": 8437 - }, - { - "epoch": 0.6490269979232367, - "learning_rate": 0.0008231028368247876, - "loss": 0.9187, - "step": 8438 - }, - { - "epoch": 0.6491039150834551, - "learning_rate": 0.0008227793976790818, - "loss": 1.3996, - "step": 8439 - }, - { - "epoch": 0.6491808322436735, - "learning_rate": 0.0008224559980769928, - "loss": 0.9809, - "step": 8440 - }, - { - "epoch": 0.649257749403892, - "learning_rate": 0.0008221326380374044, - "loss": 1.2785, - "step": 8441 - }, - { - "epoch": 0.6493346665641104, - "learning_rate": 0.0008218093175791976, - "loss": 1.2468, - "step": 8442 - }, - { - "epoch": 0.649411583724329, - "learning_rate": 0.0008214860367212509, - "loss": 1.1019, - "step": 8443 - }, - { - "epoch": 0.6494885008845473, - "learning_rate": 0.0008211627954824422, - "loss": 1.2127, - "step": 8444 - }, - { - "epoch": 0.6495654180447658, - "learning_rate": 0.0008208395938816455, - "loss": 1.3368, - "step": 8445 - }, - { - "epoch": 0.6496423352049843, - "learning_rate": 0.0008205164319377327, - "loss": 1.2483, - "step": 8446 - }, - { - "epoch": 0.6497192523652027, - "learning_rate": 0.0008201933096695735, - "loss": 1.2783, - "step": 8447 - }, - { - "epoch": 0.6497961695254211, - "learning_rate": 0.0008198702270960354, - "loss": 1.3545, - "step": 8448 - }, - { - "epoch": 0.6498730866856396, - "learning_rate": 0.000819547184235983, - "loss": 1.2456, - "step": 8449 - }, - { - "epoch": 0.649950003845858, - "learning_rate": 0.0008192241811082808, - "loss": 1.492, - "step": 8450 - }, - { - "epoch": 0.6500269210060765, - "learning_rate": 0.0008189012177317869, - "loss": 1.0569, - "step": 8451 - }, - { - "epoch": 0.6501038381662949, - "learning_rate": 0.0008185782941253615, - "loss": 1.1524, - "step": 8452 - }, - { - "epoch": 0.6501807553265133, - "learning_rate": 0.0008182554103078596, - "loss": 1.4448, - "step": 8453 - }, - { - "epoch": 0.6502576724867318, - "learning_rate": 0.0008179325662981347, - "loss": 1.024, - "step": 8454 - }, - { - "epoch": 0.6503345896469502, - "learning_rate": 0.0008176097621150382, - "loss": 1.0922, - "step": 8455 - }, - { - "epoch": 0.6504115068071686, - "learning_rate": 0.000817286997777419, - "loss": 2.074, - "step": 8456 - }, - { - "epoch": 0.6504884239673872, - "learning_rate": 0.0008169642733041226, - "loss": 1.0375, - "step": 8457 - }, - { - "epoch": 0.6505653411276056, - "learning_rate": 0.000816641588713995, - "loss": 1.3542, - "step": 8458 - }, - { - "epoch": 0.650642258287824, - "learning_rate": 0.0008163189440258777, - "loss": 0.7715, - "step": 8459 - }, - { - "epoch": 0.6507191754480425, - "learning_rate": 0.0008159963392586084, - "loss": 1.0151, - "step": 8460 - }, - { - "epoch": 0.6507960926082609, - "learning_rate": 0.0008156737744310266, - "loss": 1.2908, - "step": 8461 - }, - { - "epoch": 0.6508730097684794, - "learning_rate": 0.0008153512495619654, - "loss": 1.152, - "step": 8462 - }, - { - "epoch": 0.6509499269286978, - "learning_rate": 0.0008150287646702597, - "loss": 0.6677, - "step": 8463 - }, - { - "epoch": 0.6510268440889162, - "learning_rate": 0.0008147063197747376, - "loss": 1.2038, - "step": 8464 - }, - { - "epoch": 0.6511037612491347, - "learning_rate": 0.0008143839148942271, - "loss": 0.8156, - "step": 8465 - }, - { - "epoch": 0.6511806784093531, - "learning_rate": 0.000814061550047555, - "loss": 1.3697, - "step": 8466 - }, - { - "epoch": 0.6512575955695715, - "learning_rate": 0.0008137392252535436, - "loss": 0.9923, - "step": 8467 - }, - { - "epoch": 0.65133451272979, - "learning_rate": 0.0008134169405310144, - "loss": 1.8218, - "step": 8468 - }, - { - "epoch": 0.6514114298900084, - "learning_rate": 0.0008130946958987855, - "loss": 1.0249, - "step": 8469 - }, - { - "epoch": 0.651488347050227, - "learning_rate": 0.0008127724913756724, - "loss": 1.2627, - "step": 8470 - }, - { - "epoch": 0.6515652642104454, - "learning_rate": 0.0008124503269804905, - "loss": 0.9481, - "step": 8471 - }, - { - "epoch": 0.6516421813706638, - "learning_rate": 0.0008121282027320509, - "loss": 0.8972, - "step": 8472 - }, - { - "epoch": 0.6517190985308823, - "learning_rate": 0.0008118061186491616, - "loss": 1.292, - "step": 8473 - }, - { - "epoch": 0.6517960156911007, - "learning_rate": 0.0008114840747506307, - "loss": 1.2915, - "step": 8474 - }, - { - "epoch": 0.6518729328513191, - "learning_rate": 0.000811162071055262, - "loss": 1.2656, - "step": 8475 - }, - { - "epoch": 0.6519498500115376, - "learning_rate": 0.0008108401075818582, - "loss": 1.2514, - "step": 8476 - }, - { - "epoch": 0.652026767171756, - "learning_rate": 0.0008105181843492184, - "loss": 1.3778, - "step": 8477 - }, - { - "epoch": 0.6521036843319744, - "learning_rate": 0.0008101963013761407, - "loss": 1.355, - "step": 8478 - }, - { - "epoch": 0.6521806014921929, - "learning_rate": 0.0008098744586814192, - "loss": 1.2155, - "step": 8479 - }, - { - "epoch": 0.6522575186524113, - "learning_rate": 0.0008095526562838476, - "loss": 0.9347, - "step": 8480 - }, - { - "epoch": 0.6523344358126298, - "learning_rate": 0.0008092308942022163, - "loss": 1.192, - "step": 8481 - }, - { - "epoch": 0.6524113529728482, - "learning_rate": 0.000808909172455313, - "loss": 1.2006, - "step": 8482 - }, - { - "epoch": 0.6524882701330667, - "learning_rate": 0.0008085874910619232, - "loss": 0.7732, - "step": 8483 - }, - { - "epoch": 0.6525651872932852, - "learning_rate": 0.0008082658500408298, - "loss": 0.8882, - "step": 8484 - }, - { - "epoch": 0.6526421044535036, - "learning_rate": 0.0008079442494108155, - "loss": 0.8389, - "step": 8485 - }, - { - "epoch": 0.652719021613722, - "learning_rate": 0.0008076226891906573, - "loss": 0.9276, - "step": 8486 - }, - { - "epoch": 0.6527959387739405, - "learning_rate": 0.0008073011693991309, - "loss": 1.6867, - "step": 8487 - }, - { - "epoch": 0.6528728559341589, - "learning_rate": 0.0008069796900550119, - "loss": 1.0525, - "step": 8488 - }, - { - "epoch": 0.6529497730943774, - "learning_rate": 0.0008066582511770712, - "loss": 1.1896, - "step": 8489 - }, - { - "epoch": 0.6530266902545958, - "learning_rate": 0.0008063368527840777, - "loss": 1.534, - "step": 8490 - }, - { - "epoch": 0.6531036074148142, - "learning_rate": 0.0008060154948947982, - "loss": 1.292, - "step": 8491 - }, - { - "epoch": 0.6531805245750327, - "learning_rate": 0.0008056941775279967, - "loss": 1.1956, - "step": 8492 - }, - { - "epoch": 0.6532574417352511, - "learning_rate": 0.0008053729007024363, - "loss": 1.2427, - "step": 8493 - }, - { - "epoch": 0.6533343588954695, - "learning_rate": 0.0008050516644368769, - "loss": 1.2715, - "step": 8494 - }, - { - "epoch": 0.653411276055688, - "learning_rate": 0.0008047304687500737, - "loss": 1.4022, - "step": 8495 - }, - { - "epoch": 0.6534881932159065, - "learning_rate": 0.0008044093136607836, - "loss": 1.4004, - "step": 8496 - }, - { - "epoch": 0.6535651103761249, - "learning_rate": 0.0008040881991877588, - "loss": 1.005, - "step": 8497 - }, - { - "epoch": 0.6536420275363434, - "learning_rate": 0.0008037671253497492, - "loss": 0.9758, - "step": 8498 - }, - { - "epoch": 0.6537189446965618, - "learning_rate": 0.0008034460921655027, - "loss": 1.2197, - "step": 8499 - }, - { - "epoch": 0.6537958618567803, - "learning_rate": 0.0008031250996537642, - "loss": 1.2212, - "step": 8500 - }, - { - "epoch": 0.6538727790169987, - "learning_rate": 0.000802804147833278, - "loss": 1.3095, - "step": 8501 - }, - { - "epoch": 0.6539496961772171, - "learning_rate": 0.0008024832367227844, - "loss": 0.8511, - "step": 8502 - }, - { - "epoch": 0.6540266133374356, - "learning_rate": 0.0008021623663410215, - "loss": 0.9117, - "step": 8503 - }, - { - "epoch": 0.654103530497654, - "learning_rate": 0.0008018415367067251, - "loss": 1.19, - "step": 8504 - }, - { - "epoch": 0.6541804476578724, - "learning_rate": 0.0008015207478386291, - "loss": 1.0384, - "step": 8505 - }, - { - "epoch": 0.6542573648180909, - "learning_rate": 0.000801199999755464, - "loss": 1.12, - "step": 8506 - }, - { - "epoch": 0.6543342819783093, - "learning_rate": 0.0008008792924759602, - "loss": 0.9334, - "step": 8507 - }, - { - "epoch": 0.6544111991385279, - "learning_rate": 0.0008005586260188422, - "loss": 1.1652, - "step": 8508 - }, - { - "epoch": 0.6544881162987463, - "learning_rate": 0.0008002380004028352, - "loss": 0.9915, - "step": 8509 - }, - { - "epoch": 0.6545650334589647, - "learning_rate": 0.000799917415646661, - "loss": 1.2801, - "step": 8510 - }, - { - "epoch": 0.6546419506191832, - "learning_rate": 0.0007995968717690383, - "loss": 1.2619, - "step": 8511 - }, - { - "epoch": 0.6547188677794016, - "learning_rate": 0.0007992763687886843, - "loss": 0.9215, - "step": 8512 - }, - { - "epoch": 0.65479578493962, - "learning_rate": 0.0007989559067243133, - "loss": 1.0984, - "step": 8513 - }, - { - "epoch": 0.6548727020998385, - "learning_rate": 0.0007986354855946371, - "loss": 1.2901, - "step": 8514 - }, - { - "epoch": 0.6549496192600569, - "learning_rate": 0.0007983151054183662, - "loss": 1.2948, - "step": 8515 - }, - { - "epoch": 0.6550265364202754, - "learning_rate": 0.0007979947662142076, - "loss": 1.1017, - "step": 8516 - }, - { - "epoch": 0.6551034535804938, - "learning_rate": 0.0007976744680008665, - "loss": 1.2741, - "step": 8517 - }, - { - "epoch": 0.6551803707407122, - "learning_rate": 0.0007973542107970449, - "loss": 1.1714, - "step": 8518 - }, - { - "epoch": 0.6552572879009307, - "learning_rate": 0.0007970339946214427, - "loss": 1.087, - "step": 8519 - }, - { - "epoch": 0.6553342050611491, - "learning_rate": 0.0007967138194927594, - "loss": 1.3374, - "step": 8520 - }, - { - "epoch": 0.6554111222213675, - "learning_rate": 0.0007963936854296884, - "loss": 0.9367, - "step": 8521 - }, - { - "epoch": 0.6554880393815861, - "learning_rate": 0.0007960735924509229, - "loss": 1.2661, - "step": 8522 - }, - { - "epoch": 0.6555649565418045, - "learning_rate": 0.0007957535405751544, - "loss": 1.1345, - "step": 8523 - }, - { - "epoch": 0.6556418737020229, - "learning_rate": 0.0007954335298210708, - "loss": 1.5253, - "step": 8524 - }, - { - "epoch": 0.6557187908622414, - "learning_rate": 0.0007951135602073577, - "loss": 0.8936, - "step": 8525 - }, - { - "epoch": 0.6557957080224598, - "learning_rate": 0.0007947936317526982, - "loss": 0.944, - "step": 8526 - }, - { - "epoch": 0.6558726251826783, - "learning_rate": 0.000794473744475773, - "loss": 1.3554, - "step": 8527 - }, - { - "epoch": 0.6559495423428967, - "learning_rate": 0.0007941538983952617, - "loss": 1.6405, - "step": 8528 - }, - { - "epoch": 0.6560264595031151, - "learning_rate": 0.0007938340935298407, - "loss": 1.3003, - "step": 8529 - }, - { - "epoch": 0.6561033766633336, - "learning_rate": 0.0007935143298981816, - "loss": 1.0776, - "step": 8530 - }, - { - "epoch": 0.656180293823552, - "learning_rate": 0.0007931946075189576, - "loss": 0.9144, - "step": 8531 - }, - { - "epoch": 0.6562572109837704, - "learning_rate": 0.000792874926410837, - "loss": 1.6682, - "step": 8532 - }, - { - "epoch": 0.656334128143989, - "learning_rate": 0.0007925552865924867, - "loss": 1.0407, - "step": 8533 - }, - { - "epoch": 0.6564110453042074, - "learning_rate": 0.0007922356880825702, - "loss": 1.3451, - "step": 8534 - }, - { - "epoch": 0.6564879624644259, - "learning_rate": 0.000791916130899749, - "loss": 1.165, - "step": 8535 - }, - { - "epoch": 0.6565648796246443, - "learning_rate": 0.0007915966150626836, - "loss": 0.8988, - "step": 8536 - }, - { - "epoch": 0.6566417967848627, - "learning_rate": 0.0007912771405900299, - "loss": 1.065, - "step": 8537 - }, - { - "epoch": 0.6567187139450812, - "learning_rate": 0.0007909577075004427, - "loss": 1.1627, - "step": 8538 - }, - { - "epoch": 0.6567956311052996, - "learning_rate": 0.000790638315812574, - "loss": 1.3178, - "step": 8539 - }, - { - "epoch": 0.656872548265518, - "learning_rate": 0.000790318965545073, - "loss": 0.6202, - "step": 8540 - }, - { - "epoch": 0.6569494654257365, - "learning_rate": 0.0007899996567165869, - "loss": 1.1995, - "step": 8541 - }, - { - "epoch": 0.6570263825859549, - "learning_rate": 0.0007896803893457618, - "loss": 1.2792, - "step": 8542 - }, - { - "epoch": 0.6571032997461733, - "learning_rate": 0.0007893611634512386, - "loss": 1.3113, - "step": 8543 - }, - { - "epoch": 0.6571802169063918, - "learning_rate": 0.000789041979051657, - "loss": 1.0086, - "step": 8544 - }, - { - "epoch": 0.6572571340666102, - "learning_rate": 0.0007887228361656559, - "loss": 1.2499, - "step": 8545 - }, - { - "epoch": 0.6573340512268288, - "learning_rate": 0.0007884037348118696, - "loss": 1.4176, - "step": 8546 - }, - { - "epoch": 0.6574109683870472, - "learning_rate": 0.0007880846750089307, - "loss": 0.8643, - "step": 8547 - }, - { - "epoch": 0.6574878855472656, - "learning_rate": 0.0007877656567754697, - "loss": 1.1159, - "step": 8548 - }, - { - "epoch": 0.6575648027074841, - "learning_rate": 0.0007874466801301136, - "loss": 1.2298, - "step": 8549 - }, - { - "epoch": 0.6576417198677025, - "learning_rate": 0.000787127745091489, - "loss": 1.4595, - "step": 8550 - }, - { - "epoch": 0.6577186370279209, - "learning_rate": 0.0007868088516782184, - "loss": 1.1378, - "step": 8551 - }, - { - "epoch": 0.6577955541881394, - "learning_rate": 0.0007864899999089223, - "loss": 1.0557, - "step": 8552 - }, - { - "epoch": 0.6578724713483578, - "learning_rate": 0.0007861711898022185, - "loss": 1.337, - "step": 8553 - }, - { - "epoch": 0.6579493885085763, - "learning_rate": 0.0007858524213767225, - "loss": 1.1447, - "step": 8554 - }, - { - "epoch": 0.6580263056687947, - "learning_rate": 0.0007855336946510491, - "loss": 1.3924, - "step": 8555 - }, - { - "epoch": 0.6581032228290131, - "learning_rate": 0.0007852150096438069, - "loss": 1.3481, - "step": 8556 - }, - { - "epoch": 0.6581801399892316, - "learning_rate": 0.000784896366373605, - "loss": 0.9431, - "step": 8557 - }, - { - "epoch": 0.65825705714945, - "learning_rate": 0.00078457776485905, - "loss": 1.3165, - "step": 8558 - }, - { - "epoch": 0.6583339743096684, - "learning_rate": 0.0007842592051187448, - "loss": 1.1005, - "step": 8559 - }, - { - "epoch": 0.658410891469887, - "learning_rate": 0.0007839406871712908, - "loss": 0.8452, - "step": 8560 - }, - { - "epoch": 0.6584878086301054, - "learning_rate": 0.000783622211035286, - "loss": 0.9317, - "step": 8561 - }, - { - "epoch": 0.6585647257903238, - "learning_rate": 0.0007833037767293271, - "loss": 1.0621, - "step": 8562 - }, - { - "epoch": 0.6586416429505423, - "learning_rate": 0.000782985384272007, - "loss": 1.3057, - "step": 8563 - }, - { - "epoch": 0.6587185601107607, - "learning_rate": 0.0007826670336819181, - "loss": 0.9818, - "step": 8564 - }, - { - "epoch": 0.6587954772709792, - "learning_rate": 0.0007823487249776488, - "loss": 1.0354, - "step": 8565 - }, - { - "epoch": 0.6588723944311976, - "learning_rate": 0.0007820304581777851, - "loss": 0.7924, - "step": 8566 - }, - { - "epoch": 0.658949311591416, - "learning_rate": 0.0007817122333009114, - "loss": 1.5939, - "step": 8567 - }, - { - "epoch": 0.6590262287516345, - "learning_rate": 0.0007813940503656091, - "loss": 1.0248, - "step": 8568 - }, - { - "epoch": 0.6591031459118529, - "learning_rate": 0.0007810759093904568, - "loss": 1.3644, - "step": 8569 - }, - { - "epoch": 0.6591800630720713, - "learning_rate": 0.0007807578103940317, - "loss": 1.3638, - "step": 8570 - }, - { - "epoch": 0.6592569802322898, - "learning_rate": 0.000780439753394907, - "loss": 1.1535, - "step": 8571 - }, - { - "epoch": 0.6593338973925083, - "learning_rate": 0.0007801217384116553, - "loss": 0.7405, - "step": 8572 - }, - { - "epoch": 0.6594108145527268, - "learning_rate": 0.0007798037654628458, - "loss": 1.2405, - "step": 8573 - }, - { - "epoch": 0.6594877317129452, - "learning_rate": 0.0007794858345670448, - "loss": 1.1254, - "step": 8574 - }, - { - "epoch": 0.6595646488731636, - "learning_rate": 0.0007791679457428171, - "loss": 1.2448, - "step": 8575 - }, - { - "epoch": 0.6596415660333821, - "learning_rate": 0.0007788500990087234, - "loss": 1.2192, - "step": 8576 - }, - { - "epoch": 0.6597184831936005, - "learning_rate": 0.0007785322943833248, - "loss": 1.0965, - "step": 8577 - }, - { - "epoch": 0.6597954003538189, - "learning_rate": 0.0007782145318851779, - "loss": 1.0793, - "step": 8578 - }, - { - "epoch": 0.6598723175140374, - "learning_rate": 0.0007778968115328356, - "loss": 1.3679, - "step": 8579 - }, - { - "epoch": 0.6599492346742558, - "learning_rate": 0.0007775791333448516, - "loss": 1.194, - "step": 8580 - }, - { - "epoch": 0.6600261518344742, - "learning_rate": 0.000777261497339775, - "loss": 1.0274, - "step": 8581 - }, - { - "epoch": 0.6601030689946927, - "learning_rate": 0.0007769439035361527, - "loss": 1.1253, - "step": 8582 - }, - { - "epoch": 0.6601799861549111, - "learning_rate": 0.0007766263519525292, - "loss": 1.2471, - "step": 8583 - }, - { - "epoch": 0.6602569033151297, - "learning_rate": 0.0007763088426074466, - "loss": 1.1612, - "step": 8584 - }, - { - "epoch": 0.660333820475348, - "learning_rate": 0.0007759913755194455, - "loss": 1.2863, - "step": 8585 - }, - { - "epoch": 0.6604107376355665, - "learning_rate": 0.0007756739507070622, - "loss": 0.919, - "step": 8586 - }, - { - "epoch": 0.660487654795785, - "learning_rate": 0.0007753565681888321, - "loss": 1.322, - "step": 8587 - }, - { - "epoch": 0.6605645719560034, - "learning_rate": 0.0007750392279832869, - "loss": 0.8747, - "step": 8588 - }, - { - "epoch": 0.6606414891162218, - "learning_rate": 0.0007747219301089571, - "loss": 1.2222, - "step": 8589 - }, - { - "epoch": 0.6607184062764403, - "learning_rate": 0.000774404674584369, - "loss": 1.0856, - "step": 8590 - }, - { - "epoch": 0.6607953234366587, - "learning_rate": 0.000774087461428049, - "loss": 1.197, - "step": 8591 - }, - { - "epoch": 0.6608722405968772, - "learning_rate": 0.0007737702906585178, - "loss": 1.3886, - "step": 8592 - }, - { - "epoch": 0.6609491577570956, - "learning_rate": 0.0007734531622942965, - "loss": 1.0896, - "step": 8593 - }, - { - "epoch": 0.661026074917314, - "learning_rate": 0.0007731360763539024, - "loss": 1.1717, - "step": 8594 - }, - { - "epoch": 0.6611029920775325, - "learning_rate": 0.0007728190328558504, - "loss": 1.1633, - "step": 8595 - }, - { - "epoch": 0.6611799092377509, - "learning_rate": 0.0007725020318186529, - "loss": 1.4007, - "step": 8596 - }, - { - "epoch": 0.6612568263979693, - "learning_rate": 0.0007721850732608199, - "loss": 0.8127, - "step": 8597 - }, - { - "epoch": 0.6613337435581879, - "learning_rate": 0.0007718681572008583, - "loss": 1.1896, - "step": 8598 - }, - { - "epoch": 0.6614106607184063, - "learning_rate": 0.0007715512836572744, - "loss": 1.0697, - "step": 8599 - }, - { - "epoch": 0.6614875778786247, - "learning_rate": 0.0007712344526485711, - "loss": 1.4127, - "step": 8600 - }, - { - "epoch": 0.6615644950388432, - "learning_rate": 0.0007709176641932462, - "loss": 1.2476, - "step": 8601 - }, - { - "epoch": 0.6616414121990616, - "learning_rate": 0.0007706009183097993, - "loss": 1.0648, - "step": 8602 - }, - { - "epoch": 0.6617183293592801, - "learning_rate": 0.0007702842150167245, - "loss": 1.2622, - "step": 8603 - }, - { - "epoch": 0.6617952465194985, - "learning_rate": 0.000769967554332516, - "loss": 0.8255, - "step": 8604 - }, - { - "epoch": 0.6618721636797169, - "learning_rate": 0.0007696509362756622, - "loss": 1.1466, - "step": 8605 - }, - { - "epoch": 0.6619490808399354, - "learning_rate": 0.0007693343608646508, - "loss": 1.3083, - "step": 8606 - }, - { - "epoch": 0.6620259980001538, - "learning_rate": 0.0007690178281179682, - "loss": 1.3135, - "step": 8607 - }, - { - "epoch": 0.6621029151603722, - "learning_rate": 0.0007687013380540961, - "loss": 1.053, - "step": 8608 - }, - { - "epoch": 0.6621798323205907, - "learning_rate": 0.0007683848906915153, - "loss": 0.8834, - "step": 8609 - }, - { - "epoch": 0.6622567494808091, - "learning_rate": 0.0007680684860487034, - "loss": 1.067, - "step": 8610 - }, - { - "epoch": 0.6623336666410277, - "learning_rate": 0.0007677521241441344, - "loss": 1.2828, - "step": 8611 - }, - { - "epoch": 0.6624105838012461, - "learning_rate": 0.000767435804996283, - "loss": 1.2955, - "step": 8612 - }, - { - "epoch": 0.6624875009614645, - "learning_rate": 0.0007671195286236186, - "loss": 1.196, - "step": 8613 - }, - { - "epoch": 0.662564418121683, - "learning_rate": 0.0007668032950446077, - "loss": 1.2471, - "step": 8614 - }, - { - "epoch": 0.6626413352819014, - "learning_rate": 0.0007664871042777172, - "loss": 1.2154, - "step": 8615 - }, - { - "epoch": 0.6627182524421198, - "learning_rate": 0.0007661709563414091, - "loss": 1.3028, - "step": 8616 - }, - { - "epoch": 0.6627951696023383, - "learning_rate": 0.0007658548512541435, - "loss": 1.4589, - "step": 8617 - }, - { - "epoch": 0.6628720867625567, - "learning_rate": 0.0007655387890343785, - "loss": 1.0832, - "step": 8618 - }, - { - "epoch": 0.6629490039227752, - "learning_rate": 0.000765222769700568, - "loss": 1.2397, - "step": 8619 - }, - { - "epoch": 0.6630259210829936, - "learning_rate": 0.0007649067932711668, - "loss": 1.276, - "step": 8620 - }, - { - "epoch": 0.663102838243212, - "learning_rate": 0.0007645908597646239, - "loss": 1.0063, - "step": 8621 - }, - { - "epoch": 0.6631797554034305, - "learning_rate": 0.0007642749691993871, - "loss": 0.8444, - "step": 8622 - }, - { - "epoch": 0.663256672563649, - "learning_rate": 0.0007639591215939015, - "loss": 1.1295, - "step": 8623 - }, - { - "epoch": 0.6633335897238674, - "learning_rate": 0.00076364331696661, - "loss": 0.9346, - "step": 8624 - }, - { - "epoch": 0.6634105068840859, - "learning_rate": 0.0007633275553359519, - "loss": 1.3005, - "step": 8625 - }, - { - "epoch": 0.6634874240443043, - "learning_rate": 0.000763011836720367, - "loss": 1.2666, - "step": 8626 - }, - { - "epoch": 0.6635643412045227, - "learning_rate": 0.0007626961611382882, - "loss": 1.3495, - "step": 8627 - }, - { - "epoch": 0.6636412583647412, - "learning_rate": 0.0007623805286081485, - "loss": 1.2139, - "step": 8628 - }, - { - "epoch": 0.6637181755249596, - "learning_rate": 0.0007620649391483791, - "loss": 1.0073, - "step": 8629 - }, - { - "epoch": 0.6637950926851781, - "learning_rate": 0.0007617493927774069, - "loss": 1.8874, - "step": 8630 - }, - { - "epoch": 0.6638720098453965, - "learning_rate": 0.0007614338895136569, - "loss": 1.246, - "step": 8631 - }, - { - "epoch": 0.6639489270056149, - "learning_rate": 0.0007611184293755519, - "loss": 1.1016, - "step": 8632 - }, - { - "epoch": 0.6640258441658334, - "learning_rate": 0.0007608030123815114, - "loss": 0.963, - "step": 8633 - }, - { - "epoch": 0.6641027613260518, - "learning_rate": 0.0007604876385499538, - "loss": 0.8887, - "step": 8634 - }, - { - "epoch": 0.6641796784862702, - "learning_rate": 0.0007601723078992943, - "loss": 1.2444, - "step": 8635 - }, - { - "epoch": 0.6642565956464888, - "learning_rate": 0.0007598570204479435, - "loss": 1.2917, - "step": 8636 - }, - { - "epoch": 0.6643335128067072, - "learning_rate": 0.0007595417762143135, - "loss": 1.231, - "step": 8637 - }, - { - "epoch": 0.6644104299669257, - "learning_rate": 0.0007592265752168102, - "loss": 1.2965, - "step": 8638 - }, - { - "epoch": 0.6644873471271441, - "learning_rate": 0.0007589114174738402, - "loss": 0.9876, - "step": 8639 - }, - { - "epoch": 0.6645642642873625, - "learning_rate": 0.0007585963030038046, - "loss": 1.1763, - "step": 8640 - }, - { - "epoch": 0.664641181447581, - "learning_rate": 0.0007582812318251026, - "loss": 1.6656, - "step": 8641 - }, - { - "epoch": 0.6647180986077994, - "learning_rate": 0.0007579662039561332, - "loss": 1.1732, - "step": 8642 - }, - { - "epoch": 0.6647950157680178, - "learning_rate": 0.0007576512194152905, - "loss": 1.2624, - "step": 8643 - }, - { - "epoch": 0.6648719329282363, - "learning_rate": 0.0007573362782209668, - "loss": 0.695, - "step": 8644 - }, - { - "epoch": 0.6649488500884547, - "learning_rate": 0.0007570213803915518, - "loss": 1.1702, - "step": 8645 - }, - { - "epoch": 0.6650257672486731, - "learning_rate": 0.0007567065259454325, - "loss": 1.5539, - "step": 8646 - }, - { - "epoch": 0.6651026844088916, - "learning_rate": 0.0007563917149009935, - "loss": 0.9346, - "step": 8647 - }, - { - "epoch": 0.66517960156911, - "learning_rate": 0.0007560769472766181, - "loss": 1.0809, - "step": 8648 - }, - { - "epoch": 0.6652565187293286, - "learning_rate": 0.0007557622230906839, - "loss": 0.9848, - "step": 8649 - }, - { - "epoch": 0.665333435889547, - "learning_rate": 0.00075544754236157, - "loss": 1.0547, - "step": 8650 - }, - { - "epoch": 0.6654103530497654, - "learning_rate": 0.0007551329051076499, - "loss": 0.9818, - "step": 8651 - }, - { - "epoch": 0.6654872702099839, - "learning_rate": 0.0007548183113472958, - "loss": 1.2576, - "step": 8652 - }, - { - "epoch": 0.6655641873702023, - "learning_rate": 0.0007545037610988772, - "loss": 1.4696, - "step": 8653 - }, - { - "epoch": 0.6656411045304207, - "learning_rate": 0.0007541892543807606, - "loss": 1.1655, - "step": 8654 - }, - { - "epoch": 0.6657180216906392, - "learning_rate": 0.0007538747912113104, - "loss": 1.3117, - "step": 8655 - }, - { - "epoch": 0.6657949388508576, - "learning_rate": 0.0007535603716088892, - "loss": 1.2257, - "step": 8656 - }, - { - "epoch": 0.6658718560110761, - "learning_rate": 0.000753245995591856, - "loss": 0.9942, - "step": 8657 - }, - { - "epoch": 0.6659487731712945, - "learning_rate": 0.000752931663178567, - "loss": 1.17, - "step": 8658 - }, - { - "epoch": 0.6660256903315129, - "learning_rate": 0.000752617374387377, - "loss": 1.0842, - "step": 8659 - }, - { - "epoch": 0.6661026074917314, - "learning_rate": 0.0007523031292366369, - "loss": 1.1858, - "step": 8660 - }, - { - "epoch": 0.6661795246519499, - "learning_rate": 0.0007519889277446973, - "loss": 1.1992, - "step": 8661 - }, - { - "epoch": 0.6662564418121683, - "learning_rate": 0.0007516747699299034, - "loss": 1.3494, - "step": 8662 - }, - { - "epoch": 0.6663333589723868, - "learning_rate": 0.0007513606558105987, - "loss": 1.2976, - "step": 8663 - }, - { - "epoch": 0.6664102761326052, - "learning_rate": 0.0007510465854051264, - "loss": 0.881, - "step": 8664 - }, - { - "epoch": 0.6664871932928236, - "learning_rate": 0.0007507325587318244, - "loss": 1.3611, - "step": 8665 - }, - { - "epoch": 0.6665641104530421, - "learning_rate": 0.0007504185758090291, - "loss": 1.1902, - "step": 8666 - }, - { - "epoch": 0.6666410276132605, - "learning_rate": 0.0007501046366550746, - "loss": 1.2523, - "step": 8667 - }, - { - "epoch": 0.666717944773479, - "learning_rate": 0.0007497907412882909, - "loss": 1.0071, - "step": 8668 - }, - { - "epoch": 0.6667948619336974, - "learning_rate": 0.0007494768897270084, - "loss": 1.1752, - "step": 8669 - }, - { - "epoch": 0.6668717790939158, - "learning_rate": 0.0007491630819895532, - "loss": 1.0591, - "step": 8670 - }, - { - "epoch": 0.6669486962541343, - "learning_rate": 0.0007488493180942469, - "loss": 0.9658, - "step": 8671 - }, - { - "epoch": 0.6670256134143527, - "learning_rate": 0.0007485355980594124, - "loss": 1.0712, - "step": 8672 - }, - { - "epoch": 0.6671025305745711, - "learning_rate": 0.0007482219219033674, - "loss": 1.1305, - "step": 8673 - }, - { - "epoch": 0.6671794477347897, - "learning_rate": 0.0007479082896444278, - "loss": 1.2732, - "step": 8674 - }, - { - "epoch": 0.6672563648950081, - "learning_rate": 0.0007475947013009071, - "loss": 1.1556, - "step": 8675 - }, - { - "epoch": 0.6673332820552266, - "learning_rate": 0.0007472811568911155, - "loss": 1.1323, - "step": 8676 - }, - { - "epoch": 0.667410199215445, - "learning_rate": 0.000746967656433362, - "loss": 1.2265, - "step": 8677 - }, - { - "epoch": 0.6674871163756634, - "learning_rate": 0.0007466541999459517, - "loss": 1.0317, - "step": 8678 - }, - { - "epoch": 0.6675640335358819, - "learning_rate": 0.0007463407874471879, - "loss": 0.9159, - "step": 8679 - }, - { - "epoch": 0.6676409506961003, - "learning_rate": 0.000746027418955371, - "loss": 1.0863, - "step": 8680 - }, - { - "epoch": 0.6677178678563187, - "learning_rate": 0.0007457140944887989, - "loss": 0.951, - "step": 8681 - }, - { - "epoch": 0.6677947850165372, - "learning_rate": 0.0007454008140657663, - "loss": 0.9111, - "step": 8682 - }, - { - "epoch": 0.6678717021767556, - "learning_rate": 0.0007450875777045677, - "loss": 0.9591, - "step": 8683 - }, - { - "epoch": 0.667948619336974, - "learning_rate": 0.0007447743854234918, - "loss": 1.2639, - "step": 8684 - }, - { - "epoch": 0.6680255364971925, - "learning_rate": 0.0007444612372408258, - "loss": 0.9367, - "step": 8685 - }, - { - "epoch": 0.6681024536574109, - "learning_rate": 0.0007441481331748562, - "loss": 1.1169, - "step": 8686 - }, - { - "epoch": 0.6681793708176295, - "learning_rate": 0.000743835073243865, - "loss": 1.1504, - "step": 8687 - }, - { - "epoch": 0.6682562879778479, - "learning_rate": 0.0007435220574661316, - "loss": 1.211, - "step": 8688 - }, - { - "epoch": 0.6683332051380663, - "learning_rate": 0.0007432090858599341, - "loss": 1.1638, - "step": 8689 - }, - { - "epoch": 0.6684101222982848, - "learning_rate": 0.0007428961584435459, - "loss": 1.0729, - "step": 8690 - }, - { - "epoch": 0.6684870394585032, - "learning_rate": 0.0007425832752352407, - "loss": 1.3794, - "step": 8691 - }, - { - "epoch": 0.6685639566187216, - "learning_rate": 0.000742270436253288, - "loss": 1.3861, - "step": 8692 - }, - { - "epoch": 0.6686408737789401, - "learning_rate": 0.0007419576415159529, - "loss": 1.3238, - "step": 8693 - }, - { - "epoch": 0.6687177909391585, - "learning_rate": 0.0007416448910415018, - "loss": 1.3932, - "step": 8694 - }, - { - "epoch": 0.668794708099377, - "learning_rate": 0.0007413321848481954, - "loss": 1.407, - "step": 8695 - }, - { - "epoch": 0.6688716252595954, - "learning_rate": 0.0007410195229542945, - "loss": 1.0167, - "step": 8696 - }, - { - "epoch": 0.6689485424198138, - "learning_rate": 0.0007407069053780539, - "loss": 0.8776, - "step": 8697 - }, - { - "epoch": 0.6690254595800323, - "learning_rate": 0.000740394332137728, - "loss": 1.0069, - "step": 8698 - }, - { - "epoch": 0.6691023767402507, - "learning_rate": 0.0007400818032515692, - "loss": 0.7575, - "step": 8699 - }, - { - "epoch": 0.6691792939004692, - "learning_rate": 0.0007397693187378259, - "loss": 1.0696, - "step": 8700 - }, - { - "epoch": 0.6692562110606877, - "learning_rate": 0.0007394568786147444, - "loss": 1.0674, - "step": 8701 - }, - { - "epoch": 0.6693331282209061, - "learning_rate": 0.0007391444829005684, - "loss": 1.4395, - "step": 8702 - }, - { - "epoch": 0.6694100453811245, - "learning_rate": 0.000738832131613539, - "loss": 1.1392, - "step": 8703 - }, - { - "epoch": 0.669486962541343, - "learning_rate": 0.0007385198247718941, - "loss": 0.9501, - "step": 8704 - }, - { - "epoch": 0.6695638797015614, - "learning_rate": 0.0007382075623938712, - "loss": 1.457, - "step": 8705 - }, - { - "epoch": 0.6696407968617799, - "learning_rate": 0.0007378953444977017, - "loss": 1.1341, - "step": 8706 - }, - { - "epoch": 0.6697177140219983, - "learning_rate": 0.0007375831711016179, - "loss": 1.1324, - "step": 8707 - }, - { - "epoch": 0.6697946311822167, - "learning_rate": 0.0007372710422238471, - "loss": 1.078, - "step": 8708 - }, - { - "epoch": 0.6698715483424352, - "learning_rate": 0.0007369589578826151, - "loss": 1.3785, - "step": 8709 - }, - { - "epoch": 0.6699484655026536, - "learning_rate": 0.0007366469180961449, - "loss": 1.2788, - "step": 8710 - }, - { - "epoch": 0.670025382662872, - "learning_rate": 0.0007363349228826565, - "loss": 1.4992, - "step": 8711 - }, - { - "epoch": 0.6701022998230906, - "learning_rate": 0.0007360229722603675, - "loss": 1.3291, - "step": 8712 - }, - { - "epoch": 0.670179216983309, - "learning_rate": 0.0007357110662474938, - "loss": 1.2314, - "step": 8713 - }, - { - "epoch": 0.6702561341435275, - "learning_rate": 0.0007353992048622475, - "loss": 1.1453, - "step": 8714 - }, - { - "epoch": 0.6703330513037459, - "learning_rate": 0.0007350873881228384, - "loss": 1.0049, - "step": 8715 - }, - { - "epoch": 0.6704099684639643, - "learning_rate": 0.0007347756160474739, - "loss": 1.227, - "step": 8716 - }, - { - "epoch": 0.6704868856241828, - "learning_rate": 0.0007344638886543582, - "loss": 1.1893, - "step": 8717 - }, - { - "epoch": 0.6705638027844012, - "learning_rate": 0.0007341522059616944, - "loss": 1.3653, - "step": 8718 - }, - { - "epoch": 0.6706407199446196, - "learning_rate": 0.000733840567987682, - "loss": 1.4323, - "step": 8719 - }, - { - "epoch": 0.6707176371048381, - "learning_rate": 0.0007335289747505161, - "loss": 1.0225, - "step": 8720 - }, - { - "epoch": 0.6707945542650565, - "learning_rate": 0.0007332174262683926, - "loss": 0.6422, - "step": 8721 - }, - { - "epoch": 0.6708714714252749, - "learning_rate": 0.000732905922559503, - "loss": 0.9006, - "step": 8722 - }, - { - "epoch": 0.6709483885854934, - "learning_rate": 0.0007325944636420358, - "loss": 1.0171, - "step": 8723 - }, - { - "epoch": 0.6710253057457118, - "learning_rate": 0.0007322830495341779, - "loss": 0.9103, - "step": 8724 - }, - { - "epoch": 0.6711022229059304, - "learning_rate": 0.000731971680254112, - "loss": 1.1405, - "step": 8725 - }, - { - "epoch": 0.6711791400661488, - "learning_rate": 0.0007316603558200209, - "loss": 1.1034, - "step": 8726 - }, - { - "epoch": 0.6712560572263672, - "learning_rate": 0.0007313490762500822, - "loss": 1.3074, - "step": 8727 - }, - { - "epoch": 0.6713329743865857, - "learning_rate": 0.000731037841562472, - "loss": 0.9205, - "step": 8728 - }, - { - "epoch": 0.6714098915468041, - "learning_rate": 0.0007307266517753638, - "loss": 1.4002, - "step": 8729 - }, - { - "epoch": 0.6714868087070225, - "learning_rate": 0.0007304155069069279, - "loss": 1.4464, - "step": 8730 - }, - { - "epoch": 0.671563725867241, - "learning_rate": 0.0007301044069753322, - "loss": 1.2686, - "step": 8731 - }, - { - "epoch": 0.6716406430274594, - "learning_rate": 0.0007297933519987436, - "loss": 0.9774, - "step": 8732 - }, - { - "epoch": 0.6717175601876779, - "learning_rate": 0.0007294823419953226, - "loss": 1.4144, - "step": 8733 - }, - { - "epoch": 0.6717944773478963, - "learning_rate": 0.0007291713769832316, - "loss": 0.6688, - "step": 8734 - }, - { - "epoch": 0.6718713945081147, - "learning_rate": 0.000728860456980627, - "loss": 1.3451, - "step": 8735 - }, - { - "epoch": 0.6719483116683332, - "learning_rate": 0.0007285495820056639, - "loss": 1.0131, - "step": 8736 - }, - { - "epoch": 0.6720252288285516, - "learning_rate": 0.0007282387520764948, - "loss": 1.3469, - "step": 8737 - }, - { - "epoch": 0.67210214598877, - "learning_rate": 0.0007279279672112693, - "loss": 0.9187, - "step": 8738 - }, - { - "epoch": 0.6721790631489886, - "learning_rate": 0.0007276172274281337, - "loss": 1.411, - "step": 8739 - }, - { - "epoch": 0.672255980309207, - "learning_rate": 0.0007273065327452339, - "loss": 1.0844, - "step": 8740 - }, - { - "epoch": 0.6723328974694255, - "learning_rate": 0.0007269958831807109, - "loss": 1.2339, - "step": 8741 - }, - { - "epoch": 0.6724098146296439, - "learning_rate": 0.0007266852787527038, - "loss": 1.1215, - "step": 8742 - }, - { - "epoch": 0.6724867317898623, - "learning_rate": 0.0007263747194793493, - "loss": 0.9118, - "step": 8743 - }, - { - "epoch": 0.6725636489500808, - "learning_rate": 0.0007260642053787806, - "loss": 1.1623, - "step": 8744 - }, - { - "epoch": 0.6726405661102992, - "learning_rate": 0.0007257537364691306, - "loss": 1.271, - "step": 8745 - }, - { - "epoch": 0.6727174832705176, - "learning_rate": 0.0007254433127685263, - "loss": 1.3033, - "step": 8746 - }, - { - "epoch": 0.6727944004307361, - "learning_rate": 0.0007251329342950935, - "loss": 1.141, - "step": 8747 - }, - { - "epoch": 0.6728713175909545, - "learning_rate": 0.0007248226010669569, - "loss": 1.0358, - "step": 8748 - }, - { - "epoch": 0.6729482347511729, - "learning_rate": 0.0007245123131022366, - "loss": 1.2178, - "step": 8749 - }, - { - "epoch": 0.6730251519113915, - "learning_rate": 0.0007242020704190504, - "loss": 1.1747, - "step": 8750 - }, - { - "epoch": 0.6731020690716099, - "learning_rate": 0.0007238918730355138, - "loss": 1.2216, - "step": 8751 - }, - { - "epoch": 0.6731789862318284, - "learning_rate": 0.0007235817209697391, - "loss": 1.3866, - "step": 8752 - }, - { - "epoch": 0.6732559033920468, - "learning_rate": 0.0007232716142398374, - "loss": 1.4212, - "step": 8753 - }, - { - "epoch": 0.6733328205522652, - "learning_rate": 0.0007229615528639162, - "loss": 1.1867, - "step": 8754 - }, - { - "epoch": 0.6734097377124837, - "learning_rate": 0.0007226515368600786, - "loss": 1.7131, - "step": 8755 - }, - { - "epoch": 0.6734866548727021, - "learning_rate": 0.0007223415662464286, - "loss": 0.9497, - "step": 8756 - }, - { - "epoch": 0.6735635720329205, - "learning_rate": 0.0007220316410410647, - "loss": 1.1333, - "step": 8757 - }, - { - "epoch": 0.673640489193139, - "learning_rate": 0.0007217217612620842, - "loss": 0.8965, - "step": 8758 - }, - { - "epoch": 0.6737174063533574, - "learning_rate": 0.0007214119269275813, - "loss": 1.562, - "step": 8759 - }, - { - "epoch": 0.6737943235135759, - "learning_rate": 0.0007211021380556467, - "loss": 1.2326, - "step": 8760 - }, - { - "epoch": 0.6738712406737943, - "learning_rate": 0.0007207923946643707, - "loss": 1.2517, - "step": 8761 - }, - { - "epoch": 0.6739481578340127, - "learning_rate": 0.0007204826967718386, - "loss": 1.0405, - "step": 8762 - }, - { - "epoch": 0.6740250749942313, - "learning_rate": 0.0007201730443961345, - "loss": 1.126, - "step": 8763 - }, - { - "epoch": 0.6741019921544497, - "learning_rate": 0.0007198634375553389, - "loss": 1.2274, - "step": 8764 - }, - { - "epoch": 0.6741789093146681, - "learning_rate": 0.0007195538762675302, - "loss": 0.9536, - "step": 8765 - }, - { - "epoch": 0.6742558264748866, - "learning_rate": 0.0007192443605507836, - "loss": 0.9043, - "step": 8766 - }, - { - "epoch": 0.674332743635105, - "learning_rate": 0.0007189348904231734, - "loss": 1.4426, - "step": 8767 - }, - { - "epoch": 0.6744096607953234, - "learning_rate": 0.0007186254659027685, - "loss": 1.2582, - "step": 8768 - }, - { - "epoch": 0.6744865779555419, - "learning_rate": 0.0007183160870076365, - "loss": 0.8202, - "step": 8769 - }, - { - "epoch": 0.6745634951157603, - "learning_rate": 0.0007180067537558433, - "loss": 0.9519, - "step": 8770 - }, - { - "epoch": 0.6746404122759788, - "learning_rate": 0.0007176974661654507, - "loss": 1.1611, - "step": 8771 - }, - { - "epoch": 0.6747173294361972, - "learning_rate": 0.0007173882242545184, - "loss": 1.5816, - "step": 8772 - }, - { - "epoch": 0.6747942465964156, - "learning_rate": 0.0007170790280411033, - "loss": 1.1072, - "step": 8773 - }, - { - "epoch": 0.6748711637566341, - "learning_rate": 0.0007167698775432591, - "loss": 1.2849, - "step": 8774 - }, - { - "epoch": 0.6749480809168525, - "learning_rate": 0.0007164607727790387, - "loss": 1.091, - "step": 8775 - }, - { - "epoch": 0.675024998077071, - "learning_rate": 0.0007161517137664908, - "loss": 1.4821, - "step": 8776 - }, - { - "epoch": 0.6751019152372895, - "learning_rate": 0.0007158427005236601, - "loss": 0.7069, - "step": 8777 - }, - { - "epoch": 0.6751788323975079, - "learning_rate": 0.0007155337330685919, - "loss": 1.1757, - "step": 8778 - }, - { - "epoch": 0.6752557495577264, - "learning_rate": 0.0007152248114193261, - "loss": 0.9925, - "step": 8779 - }, - { - "epoch": 0.6753326667179448, - "learning_rate": 0.0007149159355939025, - "loss": 0.915, - "step": 8780 - }, - { - "epoch": 0.6754095838781632, - "learning_rate": 0.000714607105610355, - "loss": 1.007, - "step": 8781 - }, - { - "epoch": 0.6754865010383817, - "learning_rate": 0.0007142983214867167, - "loss": 1.2613, - "step": 8782 - }, - { - "epoch": 0.6755634181986001, - "learning_rate": 0.0007139895832410188, - "loss": 1.2682, - "step": 8783 - }, - { - "epoch": 0.6756403353588185, - "learning_rate": 0.0007136808908912881, - "loss": 1.1472, - "step": 8784 - }, - { - "epoch": 0.675717252519037, - "learning_rate": 0.00071337224445555, - "loss": 1.0913, - "step": 8785 - }, - { - "epoch": 0.6757941696792554, - "learning_rate": 0.0007130636439518259, - "loss": 1.4439, - "step": 8786 - }, - { - "epoch": 0.6758710868394738, - "learning_rate": 0.0007127550893981361, - "loss": 1.1878, - "step": 8787 - }, - { - "epoch": 0.6759480039996923, - "learning_rate": 0.0007124465808124963, - "loss": 1.1368, - "step": 8788 - }, - { - "epoch": 0.6760249211599108, - "learning_rate": 0.0007121381182129228, - "loss": 0.6754, - "step": 8789 - }, - { - "epoch": 0.6761018383201293, - "learning_rate": 0.0007118297016174244, - "loss": 1.1726, - "step": 8790 - }, - { - "epoch": 0.6761787554803477, - "learning_rate": 0.0007115213310440118, - "loss": 1.3301, - "step": 8791 - }, - { - "epoch": 0.6762556726405661, - "learning_rate": 0.0007112130065106901, - "loss": 1.1458, - "step": 8792 - }, - { - "epoch": 0.6763325898007846, - "learning_rate": 0.0007109047280354633, - "loss": 0.8939, - "step": 8793 - }, - { - "epoch": 0.676409506961003, - "learning_rate": 0.0007105964956363318, - "loss": 0.815, - "step": 8794 - }, - { - "epoch": 0.6764864241212214, - "learning_rate": 0.0007102883093312934, - "loss": 0.9993, - "step": 8795 - }, - { - "epoch": 0.6765633412814399, - "learning_rate": 0.0007099801691383433, - "loss": 1.1996, - "step": 8796 - }, - { - "epoch": 0.6766402584416583, - "learning_rate": 0.0007096720750754749, - "loss": 1.2154, - "step": 8797 - }, - { - "epoch": 0.6767171756018768, - "learning_rate": 0.0007093640271606777, - "loss": 1.2682, - "step": 8798 - }, - { - "epoch": 0.6767940927620952, - "learning_rate": 0.000709056025411939, - "loss": 1.1586, - "step": 8799 - }, - { - "epoch": 0.6768710099223136, - "learning_rate": 0.0007087480698472433, - "loss": 1.3004, - "step": 8800 - }, - { - "epoch": 0.6769479270825322, - "learning_rate": 0.0007084401604845717, - "loss": 1.5114, - "step": 8801 - }, - { - "epoch": 0.6770248442427506, - "learning_rate": 0.0007081322973419054, - "loss": 1.2009, - "step": 8802 - }, - { - "epoch": 0.677101761402969, - "learning_rate": 0.0007078244804372187, - "loss": 1.0807, - "step": 8803 - }, - { - "epoch": 0.6771786785631875, - "learning_rate": 0.0007075167097884857, - "loss": 0.7348, - "step": 8804 - }, - { - "epoch": 0.6772555957234059, - "learning_rate": 0.0007072089854136787, - "loss": 0.7291, - "step": 8805 - }, - { - "epoch": 0.6773325128836243, - "learning_rate": 0.0007069013073307651, - "loss": 1.2954, - "step": 8806 - }, - { - "epoch": 0.6774094300438428, - "learning_rate": 0.0007065936755577107, - "loss": 0.7671, - "step": 8807 - }, - { - "epoch": 0.6774863472040612, - "learning_rate": 0.0007062860901124785, - "loss": 1.0619, - "step": 8808 - }, - { - "epoch": 0.6775632643642797, - "learning_rate": 0.0007059785510130282, - "loss": 1.1243, - "step": 8809 - }, - { - "epoch": 0.6776401815244981, - "learning_rate": 0.0007056710582773184, - "loss": 1.1076, - "step": 8810 - }, - { - "epoch": 0.6777170986847165, - "learning_rate": 0.0007053636119233037, - "loss": 1.1666, - "step": 8811 - }, - { - "epoch": 0.677794015844935, - "learning_rate": 0.0007050562119689347, - "loss": 1.1684, - "step": 8812 - }, - { - "epoch": 0.6778709330051534, - "learning_rate": 0.0007047488584321628, - "loss": 1.2276, - "step": 8813 - }, - { - "epoch": 0.6779478501653718, - "learning_rate": 0.0007044415513309335, - "loss": 1.07, - "step": 8814 - }, - { - "epoch": 0.6780247673255904, - "learning_rate": 0.0007041342906831913, - "loss": 1.4927, - "step": 8815 - }, - { - "epoch": 0.6781016844858088, - "learning_rate": 0.0007038270765068774, - "loss": 1.3837, - "step": 8816 - }, - { - "epoch": 0.6781786016460273, - "learning_rate": 0.0007035199088199296, - "loss": 1.326, - "step": 8817 - }, - { - "epoch": 0.6782555188062457, - "learning_rate": 0.000703212787640285, - "loss": 1.3295, - "step": 8818 - }, - { - "epoch": 0.6783324359664641, - "learning_rate": 0.000702905712985876, - "loss": 1.4315, - "step": 8819 - }, - { - "epoch": 0.6784093531266826, - "learning_rate": 0.0007025986848746333, - "loss": 1.2604, - "step": 8820 - }, - { - "epoch": 0.678486270286901, - "learning_rate": 0.0007022917033244843, - "loss": 1.132, - "step": 8821 - }, - { - "epoch": 0.6785631874471194, - "learning_rate": 0.0007019847683533541, - "loss": 1.0873, - "step": 8822 - }, - { - "epoch": 0.6786401046073379, - "learning_rate": 0.0007016778799791643, - "loss": 1.1496, - "step": 8823 - }, - { - "epoch": 0.6787170217675563, - "learning_rate": 0.0007013710382198365, - "loss": 0.6258, - "step": 8824 - }, - { - "epoch": 0.6787939389277747, - "learning_rate": 0.0007010642430932852, - "loss": 0.93, - "step": 8825 - }, - { - "epoch": 0.6788708560879932, - "learning_rate": 0.0007007574946174249, - "loss": 1.0889, - "step": 8826 - }, - { - "epoch": 0.6789477732482117, - "learning_rate": 0.0007004507928101681, - "loss": 1.2397, - "step": 8827 - }, - { - "epoch": 0.6790246904084302, - "learning_rate": 0.0007001441376894229, - "loss": 1.3494, - "step": 8828 - }, - { - "epoch": 0.6791016075686486, - "learning_rate": 0.0006998375292730949, - "loss": 1.0884, - "step": 8829 - }, - { - "epoch": 0.679178524728867, - "learning_rate": 0.0006995309675790875, - "loss": 1.1061, - "step": 8830 - }, - { - "epoch": 0.6792554418890855, - "learning_rate": 0.0006992244526253005, - "loss": 1.322, - "step": 8831 - }, - { - "epoch": 0.6793323590493039, - "learning_rate": 0.0006989179844296328, - "loss": 1.5245, - "step": 8832 - }, - { - "epoch": 0.6794092762095223, - "learning_rate": 0.0006986115630099797, - "loss": 1.1052, - "step": 8833 - }, - { - "epoch": 0.6794861933697408, - "learning_rate": 0.0006983051883842313, - "loss": 0.8898, - "step": 8834 - }, - { - "epoch": 0.6795631105299592, - "learning_rate": 0.000697998860570279, - "loss": 1.2782, - "step": 8835 - }, - { - "epoch": 0.6796400276901777, - "learning_rate": 0.0006976925795860085, - "loss": 1.2881, - "step": 8836 - }, - { - "epoch": 0.6797169448503961, - "learning_rate": 0.0006973863454493055, - "loss": 0.9359, - "step": 8837 - }, - { - "epoch": 0.6797938620106145, - "learning_rate": 0.0006970801581780499, - "loss": 0.9424, - "step": 8838 - }, - { - "epoch": 0.679870779170833, - "learning_rate": 0.0006967740177901198, - "loss": 0.7891, - "step": 8839 - }, - { - "epoch": 0.6799476963310515, - "learning_rate": 0.0006964679243033926, - "loss": 1.3828, - "step": 8840 - }, - { - "epoch": 0.6800246134912699, - "learning_rate": 0.0006961618777357408, - "loss": 0.9813, - "step": 8841 - }, - { - "epoch": 0.6801015306514884, - "learning_rate": 0.0006958558781050348, - "loss": 1.0776, - "step": 8842 - }, - { - "epoch": 0.6801784478117068, - "learning_rate": 0.0006955499254291419, - "loss": 1.3601, - "step": 8843 - }, - { - "epoch": 0.6802553649719253, - "learning_rate": 0.0006952440197259266, - "loss": 1.0327, - "step": 8844 - }, - { - "epoch": 0.6803322821321437, - "learning_rate": 0.0006949381610132525, - "loss": 1.1075, - "step": 8845 - }, - { - "epoch": 0.6804091992923621, - "learning_rate": 0.0006946323493089785, - "loss": 1.1752, - "step": 8846 - }, - { - "epoch": 0.6804861164525806, - "learning_rate": 0.0006943265846309601, - "loss": 1.2647, - "step": 8847 - }, - { - "epoch": 0.680563033612799, - "learning_rate": 0.0006940208669970524, - "loss": 1.1476, - "step": 8848 - }, - { - "epoch": 0.6806399507730174, - "learning_rate": 0.0006937151964251065, - "loss": 1.2381, - "step": 8849 - }, - { - "epoch": 0.6807168679332359, - "learning_rate": 0.0006934095729329705, - "loss": 1.0425, - "step": 8850 - }, - { - "epoch": 0.6807937850934543, - "learning_rate": 0.0006931039965384899, - "loss": 1.1445, - "step": 8851 - }, - { - "epoch": 0.6808707022536727, - "learning_rate": 0.0006927984672595081, - "loss": 1.3931, - "step": 8852 - }, - { - "epoch": 0.6809476194138913, - "learning_rate": 0.0006924929851138644, - "loss": 1.1622, - "step": 8853 - }, - { - "epoch": 0.6810245365741097, - "learning_rate": 0.0006921875501193974, - "loss": 0.9951, - "step": 8854 - }, - { - "epoch": 0.6811014537343282, - "learning_rate": 0.0006918821622939413, - "loss": 1.3899, - "step": 8855 - }, - { - "epoch": 0.6811783708945466, - "learning_rate": 0.0006915768216553278, - "loss": 1.1107, - "step": 8856 - }, - { - "epoch": 0.681255288054765, - "learning_rate": 0.0006912715282213866, - "loss": 1.1527, - "step": 8857 - }, - { - "epoch": 0.6813322052149835, - "learning_rate": 0.0006909662820099428, - "loss": 1.335, - "step": 8858 - }, - { - "epoch": 0.6814091223752019, - "learning_rate": 0.0006906610830388221, - "loss": 1.2401, - "step": 8859 - }, - { - "epoch": 0.6814860395354203, - "learning_rate": 0.0006903559313258437, - "loss": 0.9982, - "step": 8860 - }, - { - "epoch": 0.6815629566956388, - "learning_rate": 0.0006900508268888257, - "loss": 1.1287, - "step": 8861 - }, - { - "epoch": 0.6816398738558572, - "learning_rate": 0.0006897457697455845, - "loss": 1.123, - "step": 8862 - }, - { - "epoch": 0.6817167910160757, - "learning_rate": 0.0006894407599139324, - "loss": 1.1541, - "step": 8863 - }, - { - "epoch": 0.6817937081762941, - "learning_rate": 0.000689135797411679, - "loss": 0.884, - "step": 8864 - }, - { - "epoch": 0.6818706253365125, - "learning_rate": 0.0006888308822566316, - "loss": 1.2041, - "step": 8865 - }, - { - "epoch": 0.6819475424967311, - "learning_rate": 0.0006885260144665936, - "loss": 1.3779, - "step": 8866 - }, - { - "epoch": 0.6820244596569495, - "learning_rate": 0.0006882211940593678, - "loss": 1.1393, - "step": 8867 - }, - { - "epoch": 0.6821013768171679, - "learning_rate": 0.0006879164210527528, - "loss": 0.8734, - "step": 8868 - }, - { - "epoch": 0.6821782939773864, - "learning_rate": 0.000687611695464544, - "loss": 1.1687, - "step": 8869 - }, - { - "epoch": 0.6822552111376048, - "learning_rate": 0.0006873070173125352, - "loss": 1.2036, - "step": 8870 - }, - { - "epoch": 0.6823321282978232, - "learning_rate": 0.0006870023866145165, - "loss": 1.1798, - "step": 8871 - }, - { - "epoch": 0.6824090454580417, - "learning_rate": 0.0006866978033882756, - "loss": 1.2519, - "step": 8872 - }, - { - "epoch": 0.6824859626182601, - "learning_rate": 0.0006863932676515978, - "loss": 1.0979, - "step": 8873 - }, - { - "epoch": 0.6825628797784786, - "learning_rate": 0.0006860887794222644, - "loss": 1.2568, - "step": 8874 - }, - { - "epoch": 0.682639796938697, - "learning_rate": 0.0006857843387180559, - "loss": 1.4729, - "step": 8875 - }, - { - "epoch": 0.6827167140989154, - "learning_rate": 0.0006854799455567484, - "loss": 1.1827, - "step": 8876 - }, - { - "epoch": 0.682793631259134, - "learning_rate": 0.0006851755999561159, - "loss": 1.2076, - "step": 8877 - }, - { - "epoch": 0.6828705484193524, - "learning_rate": 0.0006848713019339291, - "loss": 0.7952, - "step": 8878 - }, - { - "epoch": 0.6829474655795708, - "learning_rate": 0.0006845670515079567, - "loss": 1.3458, - "step": 8879 - }, - { - "epoch": 0.6830243827397893, - "learning_rate": 0.0006842628486959633, - "loss": 0.9705, - "step": 8880 - }, - { - "epoch": 0.6831012999000077, - "learning_rate": 0.000683958693515713, - "loss": 0.8039, - "step": 8881 - }, - { - "epoch": 0.6831782170602262, - "learning_rate": 0.000683654585984965, - "loss": 1.0404, - "step": 8882 - }, - { - "epoch": 0.6832551342204446, - "learning_rate": 0.0006833505261214765, - "loss": 1.0742, - "step": 8883 - }, - { - "epoch": 0.683332051380663, - "learning_rate": 0.0006830465139430016, - "loss": 1.5145, - "step": 8884 - }, - { - "epoch": 0.6834089685408815, - "learning_rate": 0.0006827425494672925, - "loss": 1.3263, - "step": 8885 - }, - { - "epoch": 0.6834858857010999, - "learning_rate": 0.0006824386327120975, - "loss": 1.1496, - "step": 8886 - }, - { - "epoch": 0.6835628028613183, - "learning_rate": 0.0006821347636951627, - "loss": 0.8693, - "step": 8887 - }, - { - "epoch": 0.6836397200215368, - "learning_rate": 0.000681830942434231, - "loss": 1.0798, - "step": 8888 - }, - { - "epoch": 0.6837166371817552, - "learning_rate": 0.0006815271689470437, - "loss": 1.3729, - "step": 8889 - }, - { - "epoch": 0.6837935543419736, - "learning_rate": 0.0006812234432513381, - "loss": 0.9765, - "step": 8890 - }, - { - "epoch": 0.6838704715021922, - "learning_rate": 0.000680919765364849, - "loss": 1.0981, - "step": 8891 - }, - { - "epoch": 0.6839473886624106, - "learning_rate": 0.0006806161353053084, - "loss": 1.072, - "step": 8892 - }, - { - "epoch": 0.6840243058226291, - "learning_rate": 0.0006803125530904448, - "loss": 1.1629, - "step": 8893 - }, - { - "epoch": 0.6841012229828475, - "learning_rate": 0.0006800090187379863, - "loss": 1.1544, - "step": 8894 - }, - { - "epoch": 0.6841781401430659, - "learning_rate": 0.0006797055322656562, - "loss": 1.0568, - "step": 8895 - }, - { - "epoch": 0.6842550573032844, - "learning_rate": 0.0006794020936911739, - "loss": 1.0024, - "step": 8896 - }, - { - "epoch": 0.6843319744635028, - "learning_rate": 0.0006790987030322591, - "loss": 1.1567, - "step": 8897 - }, - { - "epoch": 0.6844088916237212, - "learning_rate": 0.0006787953603066267, - "loss": 1.0324, - "step": 8898 - }, - { - "epoch": 0.6844858087839397, - "learning_rate": 0.0006784920655319887, - "loss": 1.3817, - "step": 8899 - }, - { - "epoch": 0.6845627259441581, - "learning_rate": 0.0006781888187260555, - "loss": 1.2668, - "step": 8900 - }, - { - "epoch": 0.6846396431043766, - "learning_rate": 0.0006778856199065329, - "loss": 0.9629, - "step": 8901 - }, - { - "epoch": 0.684716560264595, - "learning_rate": 0.0006775824690911264, - "loss": 1.3683, - "step": 8902 - }, - { - "epoch": 0.6847934774248134, - "learning_rate": 0.0006772793662975366, - "loss": 1.2192, - "step": 8903 - }, - { - "epoch": 0.684870394585032, - "learning_rate": 0.0006769763115434621, - "loss": 1.145, - "step": 8904 - }, - { - "epoch": 0.6849473117452504, - "learning_rate": 0.0006766733048465983, - "loss": 1.0979, - "step": 8905 - }, - { - "epoch": 0.6850242289054688, - "learning_rate": 0.0006763703462246384, - "loss": 1.3081, - "step": 8906 - }, - { - "epoch": 0.6851011460656873, - "learning_rate": 0.0006760674356952718, - "loss": 1.1022, - "step": 8907 - }, - { - "epoch": 0.6851780632259057, - "learning_rate": 0.0006757645732761873, - "loss": 0.8378, - "step": 8908 - }, - { - "epoch": 0.6852549803861241, - "learning_rate": 0.0006754617589850681, - "loss": 1.3486, - "step": 8909 - }, - { - "epoch": 0.6853318975463426, - "learning_rate": 0.0006751589928395953, - "loss": 1.3753, - "step": 8910 - }, - { - "epoch": 0.685408814706561, - "learning_rate": 0.0006748562748574491, - "loss": 1.0124, - "step": 8911 - }, - { - "epoch": 0.6854857318667795, - "learning_rate": 0.000674553605056305, - "loss": 1.3546, - "step": 8912 - }, - { - "epoch": 0.6855626490269979, - "learning_rate": 0.0006742509834538362, - "loss": 0.9445, - "step": 8913 - }, - { - "epoch": 0.6856395661872163, - "learning_rate": 0.000673948410067713, - "loss": 0.9991, - "step": 8914 - }, - { - "epoch": 0.6857164833474348, - "learning_rate": 0.0006736458849156026, - "loss": 1.0037, - "step": 8915 - }, - { - "epoch": 0.6857934005076533, - "learning_rate": 0.0006733434080151707, - "loss": 0.9724, - "step": 8916 - }, - { - "epoch": 0.6858703176678717, - "learning_rate": 0.0006730409793840791, - "loss": 1.0462, - "step": 8917 - }, - { - "epoch": 0.6859472348280902, - "learning_rate": 0.0006727385990399857, - "loss": 0.8091, - "step": 8918 - }, - { - "epoch": 0.6860241519883086, - "learning_rate": 0.000672436267000548, - "loss": 1.1152, - "step": 8919 - }, - { - "epoch": 0.6861010691485271, - "learning_rate": 0.0006721339832834186, - "loss": 1.4705, - "step": 8920 - }, - { - "epoch": 0.6861779863087455, - "learning_rate": 0.0006718317479062499, - "loss": 1.2821, - "step": 8921 - }, - { - "epoch": 0.6862549034689639, - "learning_rate": 0.0006715295608866878, - "loss": 1.1767, - "step": 8922 - }, - { - "epoch": 0.6863318206291824, - "learning_rate": 0.0006712274222423777, - "loss": 1.3717, - "step": 8923 - }, - { - "epoch": 0.6864087377894008, - "learning_rate": 0.0006709253319909627, - "loss": 1.3316, - "step": 8924 - }, - { - "epoch": 0.6864856549496192, - "learning_rate": 0.0006706232901500814, - "loss": 1.103, - "step": 8925 - }, - { - "epoch": 0.6865625721098377, - "learning_rate": 0.0006703212967373708, - "loss": 1.4027, - "step": 8926 - }, - { - "epoch": 0.6866394892700561, - "learning_rate": 0.0006700193517704641, - "loss": 1.0511, - "step": 8927 - }, - { - "epoch": 0.6867164064302745, - "learning_rate": 0.0006697174552669926, - "loss": 1.2876, - "step": 8928 - }, - { - "epoch": 0.686793323590493, - "learning_rate": 0.0006694156072445836, - "loss": 0.8889, - "step": 8929 - }, - { - "epoch": 0.6868702407507115, - "learning_rate": 0.0006691138077208641, - "loss": 0.9983, - "step": 8930 - }, - { - "epoch": 0.68694715791093, - "learning_rate": 0.0006688120567134541, - "loss": 1.0804, - "step": 8931 - }, - { - "epoch": 0.6870240750711484, - "learning_rate": 0.0006685103542399749, - "loss": 1.1354, - "step": 8932 - }, - { - "epoch": 0.6871009922313668, - "learning_rate": 0.0006682087003180427, - "loss": 1.2051, - "step": 8933 - }, - { - "epoch": 0.6871779093915853, - "learning_rate": 0.0006679070949652714, - "loss": 1.5191, - "step": 8934 - }, - { - "epoch": 0.6872548265518037, - "learning_rate": 0.0006676055381992718, - "loss": 1.4377, - "step": 8935 - }, - { - "epoch": 0.6873317437120221, - "learning_rate": 0.0006673040300376526, - "loss": 1.211, - "step": 8936 - }, - { - "epoch": 0.6874086608722406, - "learning_rate": 0.0006670025704980181, - "loss": 1.0946, - "step": 8937 - }, - { - "epoch": 0.687485578032459, - "learning_rate": 0.0006667011595979723, - "loss": 0.8467, - "step": 8938 - }, - { - "epoch": 0.6875624951926775, - "learning_rate": 0.0006663997973551144, - "loss": 1.3303, - "step": 8939 - }, - { - "epoch": 0.6876394123528959, - "learning_rate": 0.0006660984837870409, - "loss": 1.2592, - "step": 8940 - }, - { - "epoch": 0.6877163295131143, - "learning_rate": 0.000665797218911346, - "loss": 1.1655, - "step": 8941 - }, - { - "epoch": 0.6877932466733329, - "learning_rate": 0.0006654960027456204, - "loss": 1.1318, - "step": 8942 - }, - { - "epoch": 0.6878701638335513, - "learning_rate": 0.0006651948353074539, - "loss": 1.2717, - "step": 8943 - }, - { - "epoch": 0.6879470809937697, - "learning_rate": 0.0006648937166144305, - "loss": 0.7865, - "step": 8944 - }, - { - "epoch": 0.6880239981539882, - "learning_rate": 0.0006645926466841328, - "loss": 1.4333, - "step": 8945 - }, - { - "epoch": 0.6881009153142066, - "learning_rate": 0.0006642916255341415, - "loss": 1.3917, - "step": 8946 - }, - { - "epoch": 0.6881778324744251, - "learning_rate": 0.0006639906531820334, - "loss": 1.0905, - "step": 8947 - }, - { - "epoch": 0.6882547496346435, - "learning_rate": 0.0006636897296453821, - "loss": 1.5172, - "step": 8948 - }, - { - "epoch": 0.6883316667948619, - "learning_rate": 0.0006633888549417591, - "loss": 1.0846, - "step": 8949 - }, - { - "epoch": 0.6884085839550804, - "learning_rate": 0.0006630880290887323, - "loss": 1.1195, - "step": 8950 - }, - { - "epoch": 0.6884855011152988, - "learning_rate": 0.0006627872521038682, - "loss": 1.0971, - "step": 8951 - }, - { - "epoch": 0.6885624182755172, - "learning_rate": 0.0006624865240047295, - "loss": 1.135, - "step": 8952 - }, - { - "epoch": 0.6886393354357357, - "learning_rate": 0.0006621858448088745, - "loss": 1.0745, - "step": 8953 - }, - { - "epoch": 0.6887162525959541, - "learning_rate": 0.0006618852145338617, - "loss": 1.1851, - "step": 8954 - }, - { - "epoch": 0.6887931697561726, - "learning_rate": 0.0006615846331972445, - "loss": 1.2612, - "step": 8955 - }, - { - "epoch": 0.6888700869163911, - "learning_rate": 0.0006612841008165744, - "loss": 1.1767, - "step": 8956 - }, - { - "epoch": 0.6889470040766095, - "learning_rate": 0.0006609836174093998, - "loss": 1.0124, - "step": 8957 - }, - { - "epoch": 0.689023921236828, - "learning_rate": 0.0006606831829932657, - "loss": 1.03, - "step": 8958 - }, - { - "epoch": 0.6891008383970464, - "learning_rate": 0.0006603827975857157, - "loss": 1.1947, - "step": 8959 - }, - { - "epoch": 0.6891777555572648, - "learning_rate": 0.0006600824612042894, - "loss": 1.1655, - "step": 8960 - }, - { - "epoch": 0.6892546727174833, - "learning_rate": 0.0006597821738665233, - "loss": 1.2997, - "step": 8961 - }, - { - "epoch": 0.6893315898777017, - "learning_rate": 0.0006594819355899517, - "loss": 1.5447, - "step": 8962 - }, - { - "epoch": 0.6894085070379201, - "learning_rate": 0.000659181746392106, - "loss": 1.4082, - "step": 8963 - }, - { - "epoch": 0.6894854241981386, - "learning_rate": 0.0006588816062905137, - "loss": 1.1373, - "step": 8964 - }, - { - "epoch": 0.689562341358357, - "learning_rate": 0.0006585815153027021, - "loss": 1.2028, - "step": 8965 - }, - { - "epoch": 0.6896392585185755, - "learning_rate": 0.0006582814734461917, - "loss": 1.1343, - "step": 8966 - }, - { - "epoch": 0.689716175678794, - "learning_rate": 0.000657981480738504, - "loss": 1.079, - "step": 8967 - }, - { - "epoch": 0.6897930928390124, - "learning_rate": 0.0006576815371971549, - "loss": 1.0593, - "step": 8968 - }, - { - "epoch": 0.6898700099992309, - "learning_rate": 0.0006573816428396589, - "loss": 1.5121, - "step": 8969 - }, - { - "epoch": 0.6899469271594493, - "learning_rate": 0.0006570817976835269, - "loss": 0.6794, - "step": 8970 - }, - { - "epoch": 0.6900238443196677, - "learning_rate": 0.000656782001746267, - "loss": 1.1135, - "step": 8971 - }, - { - "epoch": 0.6901007614798862, - "learning_rate": 0.0006564822550453845, - "loss": 0.9261, - "step": 8972 - }, - { - "epoch": 0.6901776786401046, - "learning_rate": 0.0006561825575983825, - "loss": 1.6339, - "step": 8973 - }, - { - "epoch": 0.690254595800323, - "learning_rate": 0.0006558829094227612, - "loss": 1.1517, - "step": 8974 - }, - { - "epoch": 0.6903315129605415, - "learning_rate": 0.0006555833105360153, - "loss": 1.0657, - "step": 8975 - }, - { - "epoch": 0.6904084301207599, - "learning_rate": 0.0006552837609556404, - "loss": 1.1853, - "step": 8976 - }, - { - "epoch": 0.6904853472809784, - "learning_rate": 0.0006549842606991265, - "loss": 0.9982, - "step": 8977 - }, - { - "epoch": 0.6905622644411968, - "learning_rate": 0.0006546848097839635, - "loss": 0.9886, - "step": 8978 - }, - { - "epoch": 0.6906391816014152, - "learning_rate": 0.0006543854082276348, - "loss": 1.267, - "step": 8979 - }, - { - "epoch": 0.6907160987616338, - "learning_rate": 0.0006540860560476228, - "loss": 1.11, - "step": 8980 - }, - { - "epoch": 0.6907930159218522, - "learning_rate": 0.0006537867532614083, - "loss": 1.1821, - "step": 8981 - }, - { - "epoch": 0.6908699330820706, - "learning_rate": 0.0006534874998864671, - "loss": 1.3343, - "step": 8982 - }, - { - "epoch": 0.6909468502422891, - "learning_rate": 0.0006531882959402729, - "loss": 0.9674, - "step": 8983 - }, - { - "epoch": 0.6910237674025075, - "learning_rate": 0.0006528891414402967, - "loss": 1.2381, - "step": 8984 - }, - { - "epoch": 0.691100684562726, - "learning_rate": 0.0006525900364040059, - "loss": 1.0584, - "step": 8985 - }, - { - "epoch": 0.6911776017229444, - "learning_rate": 0.0006522909808488664, - "loss": 1.3482, - "step": 8986 - }, - { - "epoch": 0.6912545188831628, - "learning_rate": 0.0006519919747923406, - "loss": 1.2494, - "step": 8987 - }, - { - "epoch": 0.6913314360433813, - "learning_rate": 0.0006516930182518861, - "loss": 1.1126, - "step": 8988 - }, - { - "epoch": 0.6914083532035997, - "learning_rate": 0.000651394111244961, - "loss": 1.2531, - "step": 8989 - }, - { - "epoch": 0.6914852703638181, - "learning_rate": 0.0006510952537890179, - "loss": 1.2582, - "step": 8990 - }, - { - "epoch": 0.6915621875240366, - "learning_rate": 0.0006507964459015078, - "loss": 1.3781, - "step": 8991 - }, - { - "epoch": 0.691639104684255, - "learning_rate": 0.0006504976875998783, - "loss": 1.3166, - "step": 8992 - }, - { - "epoch": 0.6917160218444734, - "learning_rate": 0.0006501989789015739, - "loss": 1.2769, - "step": 8993 - }, - { - "epoch": 0.691792939004692, - "learning_rate": 0.0006499003198240364, - "loss": 1.0458, - "step": 8994 - }, - { - "epoch": 0.6918698561649104, - "learning_rate": 0.0006496017103847058, - "loss": 1.1119, - "step": 8995 - }, - { - "epoch": 0.6919467733251289, - "learning_rate": 0.0006493031506010174, - "loss": 1.2924, - "step": 8996 - }, - { - "epoch": 0.6920236904853473, - "learning_rate": 0.0006490046404904048, - "loss": 1.3174, - "step": 8997 - }, - { - "epoch": 0.6921006076455657, - "learning_rate": 0.0006487061800702981, - "loss": 0.9987, - "step": 8998 - }, - { - "epoch": 0.6921775248057842, - "learning_rate": 0.0006484077693581239, - "loss": 0.9899, - "step": 8999 - }, - { - "epoch": 0.6922544419660026, - "learning_rate": 0.0006481094083713089, - "loss": 1.1364, - "step": 9000 - }, - { - "epoch": 0.692331359126221, - "learning_rate": 0.0006478110971272729, - "loss": 1.058, - "step": 9001 - }, - { - "epoch": 0.6924082762864395, - "learning_rate": 0.0006475128356434345, - "loss": 1.2429, - "step": 9002 - }, - { - "epoch": 0.6924851934466579, - "learning_rate": 0.0006472146239372104, - "loss": 1.052, - "step": 9003 - }, - { - "epoch": 0.6925621106068764, - "learning_rate": 0.0006469164620260132, - "loss": 0.8947, - "step": 9004 - }, - { - "epoch": 0.6926390277670949, - "learning_rate": 0.0006466183499272529, - "loss": 1.4427, - "step": 9005 - }, - { - "epoch": 0.6927159449273133, - "learning_rate": 0.0006463202876583367, - "loss": 1.3991, - "step": 9006 - }, - { - "epoch": 0.6927928620875318, - "learning_rate": 0.0006460222752366679, - "loss": 0.8358, - "step": 9007 - }, - { - "epoch": 0.6928697792477502, - "learning_rate": 0.0006457243126796489, - "loss": 1.2745, - "step": 9008 - }, - { - "epoch": 0.6929466964079686, - "learning_rate": 0.0006454264000046778, - "loss": 1.1079, - "step": 9009 - }, - { - "epoch": 0.6930236135681871, - "learning_rate": 0.0006451285372291496, - "loss": 0.9257, - "step": 9010 - }, - { - "epoch": 0.6931005307284055, - "learning_rate": 0.000644830724370457, - "loss": 1.387, - "step": 9011 - }, - { - "epoch": 0.6931774478886239, - "learning_rate": 0.00064453296144599, - "loss": 1.0353, - "step": 9012 - }, - { - "epoch": 0.6932543650488424, - "learning_rate": 0.0006442352484731346, - "loss": 1.0631, - "step": 9013 - }, - { - "epoch": 0.6933312822090608, - "learning_rate": 0.000643937585469275, - "loss": 1.3631, - "step": 9014 - }, - { - "epoch": 0.6934081993692793, - "learning_rate": 0.0006436399724517915, - "loss": 0.8408, - "step": 9015 - }, - { - "epoch": 0.6934851165294977, - "learning_rate": 0.0006433424094380628, - "loss": 1.1477, - "step": 9016 - }, - { - "epoch": 0.6935620336897161, - "learning_rate": 0.0006430448964454638, - "loss": 1.5772, - "step": 9017 - }, - { - "epoch": 0.6936389508499347, - "learning_rate": 0.0006427474334913664, - "loss": 1.3088, - "step": 9018 - }, - { - "epoch": 0.6937158680101531, - "learning_rate": 0.0006424500205931395, - "loss": 1.3001, - "step": 9019 - }, - { - "epoch": 0.6937927851703715, - "learning_rate": 0.00064215265776815, - "loss": 1.0358, - "step": 9020 - }, - { - "epoch": 0.69386970233059, - "learning_rate": 0.00064185534503376, - "loss": 1.4033, - "step": 9021 - }, - { - "epoch": 0.6939466194908084, - "learning_rate": 0.0006415580824073314, - "loss": 1.3163, - "step": 9022 - }, - { - "epoch": 0.6940235366510269, - "learning_rate": 0.000641260869906221, - "loss": 1.162, - "step": 9023 - }, - { - "epoch": 0.6941004538112453, - "learning_rate": 0.0006409637075477834, - "loss": 0.9415, - "step": 9024 - }, - { - "epoch": 0.6941773709714637, - "learning_rate": 0.0006406665953493701, - "loss": 1.0079, - "step": 9025 - }, - { - "epoch": 0.6942542881316822, - "learning_rate": 0.00064036953332833, - "loss": 1.2808, - "step": 9026 - }, - { - "epoch": 0.6943312052919006, - "learning_rate": 0.0006400725215020087, - "loss": 0.6978, - "step": 9027 - }, - { - "epoch": 0.694408122452119, - "learning_rate": 0.0006397755598877488, - "loss": 0.9175, - "step": 9028 - }, - { - "epoch": 0.6944850396123375, - "learning_rate": 0.0006394786485028901, - "loss": 0.9635, - "step": 9029 - }, - { - "epoch": 0.6945619567725559, - "learning_rate": 0.0006391817873647707, - "loss": 1.3319, - "step": 9030 - }, - { - "epoch": 0.6946388739327743, - "learning_rate": 0.0006388849764907236, - "loss": 1.0064, - "step": 9031 - }, - { - "epoch": 0.6947157910929929, - "learning_rate": 0.0006385882158980802, - "loss": 1.2103, - "step": 9032 - }, - { - "epoch": 0.6947927082532113, - "learning_rate": 0.0006382915056041686, - "loss": 1.1953, - "step": 9033 - }, - { - "epoch": 0.6948696254134298, - "learning_rate": 0.0006379948456263137, - "loss": 1.0738, - "step": 9034 - }, - { - "epoch": 0.6949465425736482, - "learning_rate": 0.0006376982359818384, - "loss": 0.757, - "step": 9035 - }, - { - "epoch": 0.6950234597338666, - "learning_rate": 0.0006374016766880625, - "loss": 1.3447, - "step": 9036 - }, - { - "epoch": 0.6951003768940851, - "learning_rate": 0.0006371051677623004, - "loss": 1.0917, - "step": 9037 - }, - { - "epoch": 0.6951772940543035, - "learning_rate": 0.0006368087092218675, - "loss": 1.1666, - "step": 9038 - }, - { - "epoch": 0.6952542112145219, - "learning_rate": 0.0006365123010840737, - "loss": 1.1845, - "step": 9039 - }, - { - "epoch": 0.6953311283747404, - "learning_rate": 0.0006362159433662265, - "loss": 1.0628, - "step": 9040 - }, - { - "epoch": 0.6954080455349588, - "learning_rate": 0.0006359196360856305, - "loss": 0.8804, - "step": 9041 - }, - { - "epoch": 0.6954849626951773, - "learning_rate": 0.000635623379259587, - "loss": 1.5282, - "step": 9042 - }, - { - "epoch": 0.6955618798553957, - "learning_rate": 0.0006353271729053957, - "loss": 1.197, - "step": 9043 - }, - { - "epoch": 0.6956387970156142, - "learning_rate": 0.0006350310170403522, - "loss": 1.1779, - "step": 9044 - }, - { - "epoch": 0.6957157141758327, - "learning_rate": 0.0006347349116817487, - "loss": 1.1069, - "step": 9045 - }, - { - "epoch": 0.6957926313360511, - "learning_rate": 0.0006344388568468757, - "loss": 0.9396, - "step": 9046 - }, - { - "epoch": 0.6958695484962695, - "learning_rate": 0.0006341428525530198, - "loss": 1.3949, - "step": 9047 - }, - { - "epoch": 0.695946465656488, - "learning_rate": 0.0006338468988174646, - "loss": 1.337, - "step": 9048 - }, - { - "epoch": 0.6960233828167064, - "learning_rate": 0.0006335509956574928, - "loss": 0.8997, - "step": 9049 - }, - { - "epoch": 0.6961002999769248, - "learning_rate": 0.0006332551430903808, - "loss": 0.7967, - "step": 9050 - }, - { - "epoch": 0.6961772171371433, - "learning_rate": 0.0006329593411334037, - "loss": 1.2711, - "step": 9051 - }, - { - "epoch": 0.6962541342973617, - "learning_rate": 0.0006326635898038347, - "loss": 1.3538, - "step": 9052 - }, - { - "epoch": 0.6963310514575802, - "learning_rate": 0.0006323678891189429, - "loss": 1.4564, - "step": 9053 - }, - { - "epoch": 0.6964079686177986, - "learning_rate": 0.0006320722390959941, - "loss": 1.0628, - "step": 9054 - }, - { - "epoch": 0.696484885778017, - "learning_rate": 0.0006317766397522517, - "loss": 0.4444, - "step": 9055 - }, - { - "epoch": 0.6965618029382356, - "learning_rate": 0.0006314810911049758, - "loss": 1.34, - "step": 9056 - }, - { - "epoch": 0.696638720098454, - "learning_rate": 0.0006311855931714244, - "loss": 1.4358, - "step": 9057 - }, - { - "epoch": 0.6967156372586724, - "learning_rate": 0.0006308901459688526, - "loss": 1.3789, - "step": 9058 - }, - { - "epoch": 0.6967925544188909, - "learning_rate": 0.0006305947495145096, - "loss": 1.0355, - "step": 9059 - }, - { - "epoch": 0.6968694715791093, - "learning_rate": 0.0006302994038256458, - "loss": 1.0603, - "step": 9060 - }, - { - "epoch": 0.6969463887393278, - "learning_rate": 0.0006300041089195057, - "loss": 1.0929, - "step": 9061 - }, - { - "epoch": 0.6970233058995462, - "learning_rate": 0.0006297088648133336, - "loss": 0.8972, - "step": 9062 - }, - { - "epoch": 0.6971002230597646, - "learning_rate": 0.000629413671524367, - "loss": 1.1272, - "step": 9063 - }, - { - "epoch": 0.6971771402199831, - "learning_rate": 0.0006291185290698433, - "loss": 1.2359, - "step": 9064 - }, - { - "epoch": 0.6972540573802015, - "learning_rate": 0.0006288234374669966, - "loss": 1.0291, - "step": 9065 - }, - { - "epoch": 0.6973309745404199, - "learning_rate": 0.0006285283967330573, - "loss": 1.2536, - "step": 9066 - }, - { - "epoch": 0.6974078917006384, - "learning_rate": 0.0006282334068852531, - "loss": 1.2681, - "step": 9067 - }, - { - "epoch": 0.6974848088608568, - "learning_rate": 0.0006279384679408089, - "loss": 1.262, - "step": 9068 - }, - { - "epoch": 0.6975617260210754, - "learning_rate": 0.0006276435799169458, - "loss": 1.1998, - "step": 9069 - }, - { - "epoch": 0.6976386431812938, - "learning_rate": 0.0006273487428308837, - "loss": 1.0804, - "step": 9070 - }, - { - "epoch": 0.6977155603415122, - "learning_rate": 0.0006270539566998386, - "loss": 1.2336, - "step": 9071 - }, - { - "epoch": 0.6977924775017307, - "learning_rate": 0.0006267592215410217, - "loss": 1.2207, - "step": 9072 - }, - { - "epoch": 0.6978693946619491, - "learning_rate": 0.0006264645373716444, - "loss": 1.2066, - "step": 9073 - }, - { - "epoch": 0.6979463118221675, - "learning_rate": 0.000626169904208913, - "loss": 1.2549, - "step": 9074 - }, - { - "epoch": 0.698023228982386, - "learning_rate": 0.0006258753220700318, - "loss": 1.4719, - "step": 9075 - }, - { - "epoch": 0.6981001461426044, - "learning_rate": 0.0006255807909722015, - "loss": 1.0409, - "step": 9076 - }, - { - "epoch": 0.6981770633028228, - "learning_rate": 0.0006252863109326201, - "loss": 1.3939, - "step": 9077 - }, - { - "epoch": 0.6982539804630413, - "learning_rate": 0.000624991881968482, - "loss": 1.1164, - "step": 9078 - }, - { - "epoch": 0.6983308976232597, - "learning_rate": 0.0006246975040969804, - "loss": 1.4007, - "step": 9079 - }, - { - "epoch": 0.6984078147834782, - "learning_rate": 0.0006244031773353037, - "loss": 1.0795, - "step": 9080 - }, - { - "epoch": 0.6984847319436966, - "learning_rate": 0.0006241089017006382, - "loss": 1.3028, - "step": 9081 - }, - { - "epoch": 0.698561649103915, - "learning_rate": 0.0006238146772101665, - "loss": 1.1655, - "step": 9082 - }, - { - "epoch": 0.6986385662641336, - "learning_rate": 0.0006235205038810685, - "loss": 1.1177, - "step": 9083 - }, - { - "epoch": 0.698715483424352, - "learning_rate": 0.0006232263817305228, - "loss": 1.3024, - "step": 9084 - }, - { - "epoch": 0.6987924005845704, - "learning_rate": 0.000622932310775702, - "loss": 1.2065, - "step": 9085 - }, - { - "epoch": 0.6988693177447889, - "learning_rate": 0.000622638291033777, - "loss": 1.229, - "step": 9086 - }, - { - "epoch": 0.6989462349050073, - "learning_rate": 0.0006223443225219169, - "loss": 1.0655, - "step": 9087 - }, - { - "epoch": 0.6990231520652258, - "learning_rate": 0.0006220504052572867, - "loss": 0.8928, - "step": 9088 - }, - { - "epoch": 0.6991000692254442, - "learning_rate": 0.0006217565392570483, - "loss": 0.8444, - "step": 9089 - }, - { - "epoch": 0.6991769863856626, - "learning_rate": 0.0006214627245383609, - "loss": 1.1482, - "step": 9090 - }, - { - "epoch": 0.6992539035458811, - "learning_rate": 0.0006211689611183799, - "loss": 1.3874, - "step": 9091 - }, - { - "epoch": 0.6993308207060995, - "learning_rate": 0.0006208752490142597, - "loss": 1.3886, - "step": 9092 - }, - { - "epoch": 0.6994077378663179, - "learning_rate": 0.0006205815882431506, - "loss": 1.3982, - "step": 9093 - }, - { - "epoch": 0.6994846550265365, - "learning_rate": 0.000620287978822198, - "loss": 0.9534, - "step": 9094 - }, - { - "epoch": 0.6995615721867549, - "learning_rate": 0.0006199944207685475, - "loss": 1.1033, - "step": 9095 - }, - { - "epoch": 0.6996384893469733, - "learning_rate": 0.0006197009140993401, - "loss": 1.2295, - "step": 9096 - }, - { - "epoch": 0.6997154065071918, - "learning_rate": 0.0006194074588317137, - "loss": 0.8809, - "step": 9097 - }, - { - "epoch": 0.6997923236674102, - "learning_rate": 0.0006191140549828034, - "loss": 0.9768, - "step": 9098 - }, - { - "epoch": 0.6998692408276287, - "learning_rate": 0.000618820702569741, - "loss": 1.5388, - "step": 9099 - }, - { - "epoch": 0.6999461579878471, - "learning_rate": 0.0006185274016096567, - "loss": 1.1944, - "step": 9100 - }, - { - "epoch": 0.7000230751480655, - "learning_rate": 0.0006182341521196759, - "loss": 1.3062, - "step": 9101 - }, - { - "epoch": 0.700099992308284, - "learning_rate": 0.000617940954116922, - "loss": 1.0186, - "step": 9102 - }, - { - "epoch": 0.7001769094685024, - "learning_rate": 0.0006176478076185151, - "loss": 1.1119, - "step": 9103 - }, - { - "epoch": 0.7002538266287208, - "learning_rate": 0.0006173547126415722, - "loss": 0.9446, - "step": 9104 - }, - { - "epoch": 0.7003307437889393, - "learning_rate": 0.0006170616692032071, - "loss": 1.0974, - "step": 9105 - }, - { - "epoch": 0.7004076609491577, - "learning_rate": 0.0006167686773205322, - "loss": 0.9449, - "step": 9106 - }, - { - "epoch": 0.7004845781093763, - "learning_rate": 0.0006164757370106537, - "loss": 1.1489, - "step": 9107 - }, - { - "epoch": 0.7005614952695947, - "learning_rate": 0.0006161828482906783, - "loss": 1.2107, - "step": 9108 - }, - { - "epoch": 0.7006384124298131, - "learning_rate": 0.0006158900111777075, - "loss": 1.2626, - "step": 9109 - }, - { - "epoch": 0.7007153295900316, - "learning_rate": 0.0006155972256888402, - "loss": 1.3257, - "step": 9110 - }, - { - "epoch": 0.70079224675025, - "learning_rate": 0.0006153044918411727, - "loss": 1.0702, - "step": 9111 - }, - { - "epoch": 0.7008691639104684, - "learning_rate": 0.000615011809651798, - "loss": 0.8849, - "step": 9112 - }, - { - "epoch": 0.7009460810706869, - "learning_rate": 0.0006147191791378053, - "loss": 0.9322, - "step": 9113 - }, - { - "epoch": 0.7010229982309053, - "learning_rate": 0.0006144266003162831, - "loss": 1.2252, - "step": 9114 - }, - { - "epoch": 0.7010999153911237, - "learning_rate": 0.0006141340732043152, - "loss": 1.1744, - "step": 9115 - }, - { - "epoch": 0.7011768325513422, - "learning_rate": 0.0006138415978189808, - "loss": 1.2032, - "step": 9116 - }, - { - "epoch": 0.7012537497115606, - "learning_rate": 0.0006135491741773594, - "loss": 0.8612, - "step": 9117 - }, - { - "epoch": 0.7013306668717791, - "learning_rate": 0.0006132568022965253, - "loss": 1.3158, - "step": 9118 - }, - { - "epoch": 0.7014075840319975, - "learning_rate": 0.0006129644821935517, - "loss": 1.1177, - "step": 9119 - }, - { - "epoch": 0.701484501192216, - "learning_rate": 0.0006126722138855057, - "loss": 1.1121, - "step": 9120 - }, - { - "epoch": 0.7015614183524345, - "learning_rate": 0.0006123799973894534, - "loss": 1.1532, - "step": 9121 - }, - { - "epoch": 0.7016383355126529, - "learning_rate": 0.0006120878327224587, - "loss": 1.048, - "step": 9122 - }, - { - "epoch": 0.7017152526728713, - "learning_rate": 0.0006117957199015809, - "loss": 1.0144, - "step": 9123 - }, - { - "epoch": 0.7017921698330898, - "learning_rate": 0.0006115036589438767, - "loss": 1.1654, - "step": 9124 - }, - { - "epoch": 0.7018690869933082, - "learning_rate": 0.0006112116498663996, - "loss": 1.3741, - "step": 9125 - }, - { - "epoch": 0.7019460041535267, - "learning_rate": 0.0006109196926862001, - "loss": 0.7862, - "step": 9126 - }, - { - "epoch": 0.7020229213137451, - "learning_rate": 0.000610627787420327, - "loss": 1.3426, - "step": 9127 - }, - { - "epoch": 0.7020998384739635, - "learning_rate": 0.0006103359340858247, - "loss": 1.1919, - "step": 9128 - }, - { - "epoch": 0.702176755634182, - "learning_rate": 0.0006100441326997332, - "loss": 1.0977, - "step": 9129 - }, - { - "epoch": 0.7022536727944004, - "learning_rate": 0.0006097523832790929, - "loss": 1.0682, - "step": 9130 - }, - { - "epoch": 0.7023305899546188, - "learning_rate": 0.0006094606858409388, - "loss": 1.3242, - "step": 9131 - }, - { - "epoch": 0.7024075071148373, - "learning_rate": 0.0006091690404023034, - "loss": 1.0938, - "step": 9132 - }, - { - "epoch": 0.7024844242750558, - "learning_rate": 0.000608877446980216, - "loss": 0.9688, - "step": 9133 - }, - { - "epoch": 0.7025613414352742, - "learning_rate": 0.0006085859055917031, - "loss": 1.1817, - "step": 9134 - }, - { - "epoch": 0.7026382585954927, - "learning_rate": 0.000608294416253788, - "loss": 0.8119, - "step": 9135 - }, - { - "epoch": 0.7027151757557111, - "learning_rate": 0.0006080029789834913, - "loss": 1.3178, - "step": 9136 - }, - { - "epoch": 0.7027920929159296, - "learning_rate": 0.0006077115937978306, - "loss": 0.8258, - "step": 9137 - }, - { - "epoch": 0.702869010076148, - "learning_rate": 0.0006074202607138197, - "loss": 0.9626, - "step": 9138 - }, - { - "epoch": 0.7029459272363664, - "learning_rate": 0.0006071289797484703, - "loss": 1.0446, - "step": 9139 - }, - { - "epoch": 0.7030228443965849, - "learning_rate": 0.0006068377509187894, - "loss": 1.1184, - "step": 9140 - }, - { - "epoch": 0.7030997615568033, - "learning_rate": 0.0006065465742417842, - "loss": 1.6268, - "step": 9141 - }, - { - "epoch": 0.7031766787170217, - "learning_rate": 0.0006062554497344553, - "loss": 0.9224, - "step": 9142 - }, - { - "epoch": 0.7032535958772402, - "learning_rate": 0.0006059643774138016, - "loss": 0.7972, - "step": 9143 - }, - { - "epoch": 0.7033305130374586, - "learning_rate": 0.00060567335729682, - "loss": 0.965, - "step": 9144 - }, - { - "epoch": 0.7034074301976772, - "learning_rate": 0.0006053823894005031, - "loss": 1.191, - "step": 9145 - }, - { - "epoch": 0.7034843473578956, - "learning_rate": 0.000605091473741841, - "loss": 1.2312, - "step": 9146 - }, - { - "epoch": 0.703561264518114, - "learning_rate": 0.0006048006103378203, - "loss": 0.6918, - "step": 9147 - }, - { - "epoch": 0.7036381816783325, - "learning_rate": 0.0006045097992054245, - "loss": 0.7676, - "step": 9148 - }, - { - "epoch": 0.7037150988385509, - "learning_rate": 0.0006042190403616351, - "loss": 1.4219, - "step": 9149 - }, - { - "epoch": 0.7037920159987693, - "learning_rate": 0.0006039283338234303, - "loss": 0.998, - "step": 9150 - }, - { - "epoch": 0.7038689331589878, - "learning_rate": 0.0006036376796077826, - "loss": 1.0559, - "step": 9151 - }, - { - "epoch": 0.7039458503192062, - "learning_rate": 0.0006033470777316657, - "loss": 1.4876, - "step": 9152 - }, - { - "epoch": 0.7040227674794246, - "learning_rate": 0.0006030565282120473, - "loss": 1.4646, - "step": 9153 - }, - { - "epoch": 0.7040996846396431, - "learning_rate": 0.000602766031065893, - "loss": 1.0207, - "step": 9154 - }, - { - "epoch": 0.7041766017998615, - "learning_rate": 0.0006024755863101652, - "loss": 1.0601, - "step": 9155 - }, - { - "epoch": 0.70425351896008, - "learning_rate": 0.0006021851939618228, - "loss": 1.3219, - "step": 9156 - }, - { - "epoch": 0.7043304361202984, - "learning_rate": 0.0006018948540378232, - "loss": 1.1121, - "step": 9157 - }, - { - "epoch": 0.7044073532805168, - "learning_rate": 0.000601604566555119, - "loss": 1.3604, - "step": 9158 - }, - { - "epoch": 0.7044842704407354, - "learning_rate": 0.0006013143315306603, - "loss": 1.2011, - "step": 9159 - }, - { - "epoch": 0.7045611876009538, - "learning_rate": 0.0006010241489813945, - "loss": 1.1483, - "step": 9160 - }, - { - "epoch": 0.7046381047611722, - "learning_rate": 0.0006007340189242656, - "loss": 1.1276, - "step": 9161 - }, - { - "epoch": 0.7047150219213907, - "learning_rate": 0.0006004439413762138, - "loss": 1.2896, - "step": 9162 - }, - { - "epoch": 0.7047919390816091, - "learning_rate": 0.000600153916354179, - "loss": 0.8891, - "step": 9163 - }, - { - "epoch": 0.7048688562418276, - "learning_rate": 0.0005998639438750936, - "loss": 0.9449, - "step": 9164 - }, - { - "epoch": 0.704945773402046, - "learning_rate": 0.0005995740239558914, - "loss": 1.0639, - "step": 9165 - }, - { - "epoch": 0.7050226905622644, - "learning_rate": 0.0005992841566135002, - "loss": 1.0862, - "step": 9166 - }, - { - "epoch": 0.7050996077224829, - "learning_rate": 0.0005989943418648457, - "loss": 1.1619, - "step": 9167 - }, - { - "epoch": 0.7051765248827013, - "learning_rate": 0.0005987045797268508, - "loss": 1.0624, - "step": 9168 - }, - { - "epoch": 0.7052534420429197, - "learning_rate": 0.0005984148702164346, - "loss": 1.1822, - "step": 9169 - }, - { - "epoch": 0.7053303592031382, - "learning_rate": 0.0005981252133505134, - "loss": 1.2507, - "step": 9170 - }, - { - "epoch": 0.7054072763633567, - "learning_rate": 0.0005978356091460011, - "loss": 1.0611, - "step": 9171 - }, - { - "epoch": 0.7054841935235752, - "learning_rate": 0.0005975460576198081, - "loss": 1.2883, - "step": 9172 - }, - { - "epoch": 0.7055611106837936, - "learning_rate": 0.0005972565587888413, - "loss": 1.1024, - "step": 9173 - }, - { - "epoch": 0.705638027844012, - "learning_rate": 0.0005969671126700047, - "loss": 0.968, - "step": 9174 - }, - { - "epoch": 0.7057149450042305, - "learning_rate": 0.0005966777192801991, - "loss": 1.2252, - "step": 9175 - }, - { - "epoch": 0.7057918621644489, - "learning_rate": 0.0005963883786363239, - "loss": 1.1141, - "step": 9176 - }, - { - "epoch": 0.7058687793246673, - "learning_rate": 0.0005960990907552724, - "loss": 1.105, - "step": 9177 - }, - { - "epoch": 0.7059456964848858, - "learning_rate": 0.0005958098556539365, - "loss": 1.1531, - "step": 9178 - }, - { - "epoch": 0.7060226136451042, - "learning_rate": 0.000595520673349206, - "loss": 1.1202, - "step": 9179 - }, - { - "epoch": 0.7060995308053226, - "learning_rate": 0.000595231543857966, - "loss": 1.0094, - "step": 9180 - }, - { - "epoch": 0.7061764479655411, - "learning_rate": 0.0005949424671970991, - "loss": 1.0983, - "step": 9181 - }, - { - "epoch": 0.7062533651257595, - "learning_rate": 0.000594653443383485, - "loss": 1.1388, - "step": 9182 - }, - { - "epoch": 0.706330282285978, - "learning_rate": 0.000594364472433999, - "loss": 1.2372, - "step": 9183 - }, - { - "epoch": 0.7064071994461965, - "learning_rate": 0.0005940755543655161, - "loss": 1.3763, - "step": 9184 - }, - { - "epoch": 0.7064841166064149, - "learning_rate": 0.0005937866891949054, - "loss": 1.2527, - "step": 9185 - }, - { - "epoch": 0.7065610337666334, - "learning_rate": 0.0005934978769390345, - "loss": 0.9266, - "step": 9186 - }, - { - "epoch": 0.7066379509268518, - "learning_rate": 0.0005932091176147674, - "loss": 1.3706, - "step": 9187 - }, - { - "epoch": 0.7067148680870702, - "learning_rate": 0.0005929204112389648, - "loss": 1.2825, - "step": 9188 - }, - { - "epoch": 0.7067917852472887, - "learning_rate": 0.0005926317578284844, - "loss": 1.0201, - "step": 9189 - }, - { - "epoch": 0.7068687024075071, - "learning_rate": 0.0005923431574001821, - "loss": 0.7452, - "step": 9190 - }, - { - "epoch": 0.7069456195677256, - "learning_rate": 0.0005920546099709077, - "loss": 1.5096, - "step": 9191 - }, - { - "epoch": 0.707022536727944, - "learning_rate": 0.0005917661155575116, - "loss": 1.0856, - "step": 9192 - }, - { - "epoch": 0.7070994538881624, - "learning_rate": 0.0005914776741768385, - "loss": 0.4657, - "step": 9193 - }, - { - "epoch": 0.7071763710483809, - "learning_rate": 0.0005911892858457309, - "loss": 1.2545, - "step": 9194 - }, - { - "epoch": 0.7072532882085993, - "learning_rate": 0.000590900950581028, - "loss": 1.0548, - "step": 9195 - }, - { - "epoch": 0.7073302053688177, - "learning_rate": 0.000590612668399566, - "loss": 1.1633, - "step": 9196 - }, - { - "epoch": 0.7074071225290363, - "learning_rate": 0.0005903244393181774, - "loss": 1.0331, - "step": 9197 - }, - { - "epoch": 0.7074840396892547, - "learning_rate": 0.0005900362633536934, - "loss": 1.3973, - "step": 9198 - }, - { - "epoch": 0.7075609568494731, - "learning_rate": 0.000589748140522941, - "loss": 1.3848, - "step": 9199 - }, - { - "epoch": 0.7076378740096916, - "learning_rate": 0.0005894600708427421, - "loss": 1.1831, - "step": 9200 - }, - { - "epoch": 0.70771479116991, - "learning_rate": 0.0005891720543299193, - "loss": 1.0076, - "step": 9201 - }, - { - "epoch": 0.7077917083301285, - "learning_rate": 0.0005888840910012887, - "loss": 1.3635, - "step": 9202 - }, - { - "epoch": 0.7078686254903469, - "learning_rate": 0.0005885961808736668, - "loss": 1.1163, - "step": 9203 - }, - { - "epoch": 0.7079455426505653, - "learning_rate": 0.0005883083239638632, - "loss": 1.1741, - "step": 9204 - }, - { - "epoch": 0.7080224598107838, - "learning_rate": 0.0005880205202886863, - "loss": 1.1505, - "step": 9205 - }, - { - "epoch": 0.7080993769710022, - "learning_rate": 0.000587732769864942, - "loss": 1.0987, - "step": 9206 - }, - { - "epoch": 0.7081762941312206, - "learning_rate": 0.000587445072709432, - "loss": 0.9515, - "step": 9207 - }, - { - "epoch": 0.7082532112914391, - "learning_rate": 0.0005871574288389555, - "loss": 1.1378, - "step": 9208 - }, - { - "epoch": 0.7083301284516575, - "learning_rate": 0.0005868698382703078, - "loss": 1.099, - "step": 9209 - }, - { - "epoch": 0.7084070456118761, - "learning_rate": 0.0005865823010202815, - "loss": 1.2283, - "step": 9210 - }, - { - "epoch": 0.7084839627720945, - "learning_rate": 0.000586294817105667, - "loss": 1.3589, - "step": 9211 - }, - { - "epoch": 0.7085608799323129, - "learning_rate": 0.000586007386543251, - "loss": 1.1269, - "step": 9212 - }, - { - "epoch": 0.7086377970925314, - "learning_rate": 0.0005857200093498152, - "loss": 1.2875, - "step": 9213 - }, - { - "epoch": 0.7087147142527498, - "learning_rate": 0.0005854326855421414, - "loss": 1.111, - "step": 9214 - }, - { - "epoch": 0.7087916314129682, - "learning_rate": 0.0005851454151370061, - "loss": 1.3645, - "step": 9215 - }, - { - "epoch": 0.7088685485731867, - "learning_rate": 0.0005848581981511837, - "loss": 1.1044, - "step": 9216 - }, - { - "epoch": 0.7089454657334051, - "learning_rate": 0.0005845710346014445, - "loss": 1.2185, - "step": 9217 - }, - { - "epoch": 0.7090223828936235, - "learning_rate": 0.000584283924504557, - "loss": 1.1358, - "step": 9218 - }, - { - "epoch": 0.709099300053842, - "learning_rate": 0.0005839968678772846, - "loss": 1.1562, - "step": 9219 - }, - { - "epoch": 0.7091762172140604, - "learning_rate": 0.0005837098647363904, - "loss": 0.9352, - "step": 9220 - }, - { - "epoch": 0.709253134374279, - "learning_rate": 0.0005834229150986323, - "loss": 0.9012, - "step": 9221 - }, - { - "epoch": 0.7093300515344974, - "learning_rate": 0.0005831360189807653, - "loss": 0.9845, - "step": 9222 - }, - { - "epoch": 0.7094069686947158, - "learning_rate": 0.0005828491763995417, - "loss": 0.8973, - "step": 9223 - }, - { - "epoch": 0.7094838858549343, - "learning_rate": 0.0005825623873717098, - "loss": 1.1538, - "step": 9224 - }, - { - "epoch": 0.7095608030151527, - "learning_rate": 0.0005822756519140176, - "loss": 1.1007, - "step": 9225 - }, - { - "epoch": 0.7096377201753711, - "learning_rate": 0.0005819889700432058, - "loss": 1.1432, - "step": 9226 - }, - { - "epoch": 0.7097146373355896, - "learning_rate": 0.0005817023417760142, - "loss": 1.287, - "step": 9227 - }, - { - "epoch": 0.709791554495808, - "learning_rate": 0.0005814157671291805, - "loss": 1.0971, - "step": 9228 - }, - { - "epoch": 0.7098684716560265, - "learning_rate": 0.0005811292461194375, - "loss": 1.3493, - "step": 9229 - }, - { - "epoch": 0.7099453888162449, - "learning_rate": 0.0005808427787635154, - "loss": 0.7992, - "step": 9230 - }, - { - "epoch": 0.7100223059764633, - "learning_rate": 0.0005805563650781414, - "loss": 1.1275, - "step": 9231 - }, - { - "epoch": 0.7100992231366818, - "learning_rate": 0.000580270005080039, - "loss": 1.2756, - "step": 9232 - }, - { - "epoch": 0.7101761402969002, - "learning_rate": 0.00057998369878593, - "loss": 0.9373, - "step": 9233 - }, - { - "epoch": 0.7102530574571186, - "learning_rate": 0.0005796974462125321, - "loss": 1.2198, - "step": 9234 - }, - { - "epoch": 0.7103299746173372, - "learning_rate": 0.0005794112473765584, - "loss": 0.8973, - "step": 9235 - }, - { - "epoch": 0.7104068917775556, - "learning_rate": 0.0005791251022947218, - "loss": 0.9384, - "step": 9236 - }, - { - "epoch": 0.710483808937774, - "learning_rate": 0.0005788390109837305, - "loss": 1.0546, - "step": 9237 - }, - { - "epoch": 0.7105607260979925, - "learning_rate": 0.000578552973460289, - "loss": 1.2324, - "step": 9238 - }, - { - "epoch": 0.7106376432582109, - "learning_rate": 0.0005782669897411001, - "loss": 1.47, - "step": 9239 - }, - { - "epoch": 0.7107145604184294, - "learning_rate": 0.0005779810598428614, - "loss": 1.1954, - "step": 9240 - }, - { - "epoch": 0.7107914775786478, - "learning_rate": 0.0005776951837822702, - "loss": 0.8435, - "step": 9241 - }, - { - "epoch": 0.7108683947388662, - "learning_rate": 0.0005774093615760184, - "loss": 0.9694, - "step": 9242 - }, - { - "epoch": 0.7109453118990847, - "learning_rate": 0.0005771235932407955, - "loss": 1.1737, - "step": 9243 - }, - { - "epoch": 0.7110222290593031, - "learning_rate": 0.0005768378787932878, - "loss": 1.1425, - "step": 9244 - }, - { - "epoch": 0.7110991462195215, - "learning_rate": 0.0005765522182501784, - "loss": 1.056, - "step": 9245 - }, - { - "epoch": 0.71117606337974, - "learning_rate": 0.0005762666116281469, - "loss": 0.7269, - "step": 9246 - }, - { - "epoch": 0.7112529805399584, - "learning_rate": 0.0005759810589438715, - "loss": 1.2509, - "step": 9247 - }, - { - "epoch": 0.711329897700177, - "learning_rate": 0.0005756955602140242, - "loss": 1.0475, - "step": 9248 - }, - { - "epoch": 0.7114068148603954, - "learning_rate": 0.0005754101154552768, - "loss": 1.4002, - "step": 9249 - }, - { - "epoch": 0.7114837320206138, - "learning_rate": 0.0005751247246842965, - "loss": 1.3878, - "step": 9250 - }, - { - "epoch": 0.7115606491808323, - "learning_rate": 0.0005748393879177474, - "loss": 0.8607, - "step": 9251 - }, - { - "epoch": 0.7116375663410507, - "learning_rate": 0.0005745541051722906, - "loss": 0.9819, - "step": 9252 - }, - { - "epoch": 0.7117144835012691, - "learning_rate": 0.000574268876464584, - "loss": 0.9967, - "step": 9253 - }, - { - "epoch": 0.7117914006614876, - "learning_rate": 0.0005739837018112821, - "loss": 1.0851, - "step": 9254 - }, - { - "epoch": 0.711868317821706, - "learning_rate": 0.0005736985812290372, - "loss": 1.2694, - "step": 9255 - }, - { - "epoch": 0.7119452349819244, - "learning_rate": 0.0005734135147344983, - "loss": 1.0488, - "step": 9256 - }, - { - "epoch": 0.7120221521421429, - "learning_rate": 0.0005731285023443087, - "loss": 1.1041, - "step": 9257 - }, - { - "epoch": 0.7120990693023613, - "learning_rate": 0.0005728435440751125, - "loss": 1.4289, - "step": 9258 - }, - { - "epoch": 0.7121759864625798, - "learning_rate": 0.0005725586399435473, - "loss": 1.0462, - "step": 9259 - }, - { - "epoch": 0.7122529036227983, - "learning_rate": 0.0005722737899662509, - "loss": 0.9973, - "step": 9260 - }, - { - "epoch": 0.7123298207830167, - "learning_rate": 0.0005719889941598542, - "loss": 1.0765, - "step": 9261 - }, - { - "epoch": 0.7124067379432352, - "learning_rate": 0.0005717042525409866, - "loss": 1.0844, - "step": 9262 - }, - { - "epoch": 0.7124836551034536, - "learning_rate": 0.0005714195651262758, - "loss": 1.1386, - "step": 9263 - }, - { - "epoch": 0.712560572263672, - "learning_rate": 0.0005711349319323445, - "loss": 0.929, - "step": 9264 - }, - { - "epoch": 0.7126374894238905, - "learning_rate": 0.0005708503529758123, - "loss": 1.2356, - "step": 9265 - }, - { - "epoch": 0.7127144065841089, - "learning_rate": 0.0005705658282732963, - "loss": 0.8556, - "step": 9266 - }, - { - "epoch": 0.7127913237443274, - "learning_rate": 0.0005702813578414098, - "loss": 0.928, - "step": 9267 - }, - { - "epoch": 0.7128682409045458, - "learning_rate": 0.0005699969416967643, - "loss": 0.98, - "step": 9268 - }, - { - "epoch": 0.7129451580647642, - "learning_rate": 0.0005697125798559672, - "loss": 1.6185, - "step": 9269 - }, - { - "epoch": 0.7130220752249827, - "learning_rate": 0.0005694282723356209, - "loss": 1.1985, - "step": 9270 - }, - { - "epoch": 0.7130989923852011, - "learning_rate": 0.0005691440191523282, - "loss": 1.3479, - "step": 9271 - }, - { - "epoch": 0.7131759095454195, - "learning_rate": 0.0005688598203226862, - "loss": 1.0522, - "step": 9272 - }, - { - "epoch": 0.713252826705638, - "learning_rate": 0.0005685756758632898, - "loss": 1.0467, - "step": 9273 - }, - { - "epoch": 0.7133297438658565, - "learning_rate": 0.0005682915857907303, - "loss": 1.2088, - "step": 9274 - }, - { - "epoch": 0.7134066610260749, - "learning_rate": 0.0005680075501215963, - "loss": 1.1078, - "step": 9275 - }, - { - "epoch": 0.7134835781862934, - "learning_rate": 0.0005677235688724721, - "loss": 1.1156, - "step": 9276 - }, - { - "epoch": 0.7135604953465118, - "learning_rate": 0.0005674396420599408, - "loss": 1.2745, - "step": 9277 - }, - { - "epoch": 0.7136374125067303, - "learning_rate": 0.0005671557697005808, - "loss": 1.3483, - "step": 9278 - }, - { - "epoch": 0.7137143296669487, - "learning_rate": 0.0005668719518109675, - "loss": 1.489, - "step": 9279 - }, - { - "epoch": 0.7137912468271671, - "learning_rate": 0.0005665881884076735, - "loss": 1.2943, - "step": 9280 - }, - { - "epoch": 0.7138681639873856, - "learning_rate": 0.0005663044795072673, - "loss": 1.2696, - "step": 9281 - }, - { - "epoch": 0.713945081147604, - "learning_rate": 0.0005660208251263169, - "loss": 1.3986, - "step": 9282 - }, - { - "epoch": 0.7140219983078224, - "learning_rate": 0.0005657372252813833, - "loss": 1.054, - "step": 9283 - }, - { - "epoch": 0.7140989154680409, - "learning_rate": 0.0005654536799890261, - "loss": 1.4222, - "step": 9284 - }, - { - "epoch": 0.7141758326282593, - "learning_rate": 0.000565170189265803, - "loss": 0.6641, - "step": 9285 - }, - { - "epoch": 0.7142527497884779, - "learning_rate": 0.0005648867531282669, - "loss": 0.8956, - "step": 9286 - }, - { - "epoch": 0.7143296669486963, - "learning_rate": 0.0005646033715929678, - "loss": 0.9189, - "step": 9287 - }, - { - "epoch": 0.7144065841089147, - "learning_rate": 0.0005643200446764526, - "loss": 1.2879, - "step": 9288 - }, - { - "epoch": 0.7144835012691332, - "learning_rate": 0.0005640367723952648, - "loss": 1.2763, - "step": 9289 - }, - { - "epoch": 0.7145604184293516, - "learning_rate": 0.0005637535547659458, - "loss": 1.0203, - "step": 9290 - }, - { - "epoch": 0.71463733558957, - "learning_rate": 0.0005634703918050329, - "loss": 1.4123, - "step": 9291 - }, - { - "epoch": 0.7147142527497885, - "learning_rate": 0.0005631872835290589, - "loss": 0.8268, - "step": 9292 - }, - { - "epoch": 0.7147911699100069, - "learning_rate": 0.0005629042299545561, - "loss": 1.198, - "step": 9293 - }, - { - "epoch": 0.7148680870702254, - "learning_rate": 0.0005626212310980514, - "loss": 1.0468, - "step": 9294 - }, - { - "epoch": 0.7149450042304438, - "learning_rate": 0.0005623382869760713, - "loss": 1.1087, - "step": 9295 - }, - { - "epoch": 0.7150219213906622, - "learning_rate": 0.0005620553976051351, - "loss": 1.0472, - "step": 9296 - }, - { - "epoch": 0.7150988385508807, - "learning_rate": 0.0005617725630017611, - "loss": 1.1679, - "step": 9297 - }, - { - "epoch": 0.7151757557110991, - "learning_rate": 0.0005614897831824656, - "loss": 1.5574, - "step": 9298 - }, - { - "epoch": 0.7152526728713176, - "learning_rate": 0.0005612070581637596, - "loss": 1.0795, - "step": 9299 - }, - { - "epoch": 0.7153295900315361, - "learning_rate": 0.0005609243879621521, - "loss": 1.1924, - "step": 9300 - }, - { - "epoch": 0.7154065071917545, - "learning_rate": 0.0005606417725941481, - "loss": 1.1555, - "step": 9301 - }, - { - "epoch": 0.7154834243519729, - "learning_rate": 0.0005603592120762501, - "loss": 1.1698, - "step": 9302 - }, - { - "epoch": 0.7155603415121914, - "learning_rate": 0.0005600767064249562, - "loss": 0.9323, - "step": 9303 - }, - { - "epoch": 0.7156372586724098, - "learning_rate": 0.0005597942556567643, - "loss": 1.3894, - "step": 9304 - }, - { - "epoch": 0.7157141758326283, - "learning_rate": 0.0005595118597881646, - "loss": 1.0572, - "step": 9305 - }, - { - "epoch": 0.7157910929928467, - "learning_rate": 0.0005592295188356479, - "loss": 1.1573, - "step": 9306 - }, - { - "epoch": 0.7158680101530651, - "learning_rate": 0.0005589472328157002, - "loss": 1.0623, - "step": 9307 - }, - { - "epoch": 0.7159449273132836, - "learning_rate": 0.0005586650017448041, - "loss": 1.0767, - "step": 9308 - }, - { - "epoch": 0.716021844473502, - "learning_rate": 0.0005583828256394399, - "loss": 0.9451, - "step": 9309 - }, - { - "epoch": 0.7160987616337204, - "learning_rate": 0.0005581007045160837, - "loss": 1.0433, - "step": 9310 - }, - { - "epoch": 0.716175678793939, - "learning_rate": 0.0005578186383912085, - "loss": 0.8603, - "step": 9311 - }, - { - "epoch": 0.7162525959541574, - "learning_rate": 0.0005575366272812854, - "loss": 1.2741, - "step": 9312 - }, - { - "epoch": 0.7163295131143759, - "learning_rate": 0.0005572546712027808, - "loss": 1.3708, - "step": 9313 - }, - { - "epoch": 0.7164064302745943, - "learning_rate": 0.0005569727701721584, - "loss": 1.356, - "step": 9314 - }, - { - "epoch": 0.7164833474348127, - "learning_rate": 0.0005566909242058789, - "loss": 1.1124, - "step": 9315 - }, - { - "epoch": 0.7165602645950312, - "learning_rate": 0.0005564091333203988, - "loss": 1.0062, - "step": 9316 - }, - { - "epoch": 0.7166371817552496, - "learning_rate": 0.000556127397532174, - "loss": 1.0108, - "step": 9317 - }, - { - "epoch": 0.716714098915468, - "learning_rate": 0.0005558457168576534, - "loss": 1.0154, - "step": 9318 - }, - { - "epoch": 0.7167910160756865, - "learning_rate": 0.000555564091313285, - "loss": 1.1735, - "step": 9319 - }, - { - "epoch": 0.7168679332359049, - "learning_rate": 0.000555282520915514, - "loss": 0.9607, - "step": 9320 - }, - { - "epoch": 0.7169448503961233, - "learning_rate": 0.0005550010056807813, - "loss": 1.3898, - "step": 9321 - }, - { - "epoch": 0.7170217675563418, - "learning_rate": 0.0005547195456255246, - "loss": 1.3058, - "step": 9322 - }, - { - "epoch": 0.7170986847165602, - "learning_rate": 0.0005544381407661787, - "loss": 1.3691, - "step": 9323 - }, - { - "epoch": 0.7171756018767788, - "learning_rate": 0.0005541567911191749, - "loss": 1.0796, - "step": 9324 - }, - { - "epoch": 0.7172525190369972, - "learning_rate": 0.0005538754967009423, - "loss": 1.2049, - "step": 9325 - }, - { - "epoch": 0.7173294361972156, - "learning_rate": 0.0005535942575279053, - "loss": 1.0439, - "step": 9326 - }, - { - "epoch": 0.7174063533574341, - "learning_rate": 0.0005533130736164861, - "loss": 1.515, - "step": 9327 - }, - { - "epoch": 0.7174832705176525, - "learning_rate": 0.000553031944983103, - "loss": 0.829, - "step": 9328 - }, - { - "epoch": 0.7175601876778709, - "learning_rate": 0.0005527508716441716, - "loss": 1.0455, - "step": 9329 - }, - { - "epoch": 0.7176371048380894, - "learning_rate": 0.0005524698536161041, - "loss": 1.173, - "step": 9330 - }, - { - "epoch": 0.7177140219983078, - "learning_rate": 0.0005521888909153092, - "loss": 0.8212, - "step": 9331 - }, - { - "epoch": 0.7177909391585263, - "learning_rate": 0.0005519079835581924, - "loss": 1.1177, - "step": 9332 - }, - { - "epoch": 0.7178678563187447, - "learning_rate": 0.0005516271315611568, - "loss": 1.0467, - "step": 9333 - }, - { - "epoch": 0.7179447734789631, - "learning_rate": 0.0005513463349406015, - "loss": 1.4146, - "step": 9334 - }, - { - "epoch": 0.7180216906391816, - "learning_rate": 0.0005510655937129224, - "loss": 0.9482, - "step": 9335 - }, - { - "epoch": 0.7180986077994, - "learning_rate": 0.0005507849078945121, - "loss": 1.023, - "step": 9336 - }, - { - "epoch": 0.7181755249596184, - "learning_rate": 0.0005505042775017603, - "loss": 0.8764, - "step": 9337 - }, - { - "epoch": 0.718252442119837, - "learning_rate": 0.0005502237025510529, - "loss": 1.1324, - "step": 9338 - }, - { - "epoch": 0.7183293592800554, - "learning_rate": 0.0005499431830587738, - "loss": 0.9934, - "step": 9339 - }, - { - "epoch": 0.7184062764402738, - "learning_rate": 0.0005496627190413028, - "loss": 1.209, - "step": 9340 - }, - { - "epoch": 0.7184831936004923, - "learning_rate": 0.0005493823105150151, - "loss": 1.2592, - "step": 9341 - }, - { - "epoch": 0.7185601107607107, - "learning_rate": 0.0005491019574962856, - "loss": 1.0501, - "step": 9342 - }, - { - "epoch": 0.7186370279209292, - "learning_rate": 0.0005488216600014836, - "loss": 1.2749, - "step": 9343 - }, - { - "epoch": 0.7187139450811476, - "learning_rate": 0.0005485414180469762, - "loss": 1.1647, - "step": 9344 - }, - { - "epoch": 0.718790862241366, - "learning_rate": 0.000548261231649127, - "loss": 0.8373, - "step": 9345 - }, - { - "epoch": 0.7188677794015845, - "learning_rate": 0.0005479811008242961, - "loss": 0.9246, - "step": 9346 - }, - { - "epoch": 0.7189446965618029, - "learning_rate": 0.0005477010255888412, - "loss": 1.1222, - "step": 9347 - }, - { - "epoch": 0.7190216137220213, - "learning_rate": 0.0005474210059591158, - "loss": 1.0607, - "step": 9348 - }, - { - "epoch": 0.7190985308822399, - "learning_rate": 0.0005471410419514707, - "loss": 1.3698, - "step": 9349 - }, - { - "epoch": 0.7191754480424583, - "learning_rate": 0.0005468611335822532, - "loss": 1.5374, - "step": 9350 - }, - { - "epoch": 0.7192523652026768, - "learning_rate": 0.000546581280867807, - "loss": 1.2769, - "step": 9351 - }, - { - "epoch": 0.7193292823628952, - "learning_rate": 0.0005463014838244738, - "loss": 1.0591, - "step": 9352 - }, - { - "epoch": 0.7194061995231136, - "learning_rate": 0.0005460217424685915, - "loss": 1.668, - "step": 9353 - }, - { - "epoch": 0.7194831166833321, - "learning_rate": 0.0005457420568164928, - "loss": 1.0583, - "step": 9354 - }, - { - "epoch": 0.7195600338435505, - "learning_rate": 0.0005454624268845105, - "loss": 0.8462, - "step": 9355 - }, - { - "epoch": 0.7196369510037689, - "learning_rate": 0.0005451828526889718, - "loss": 1.3111, - "step": 9356 - }, - { - "epoch": 0.7197138681639874, - "learning_rate": 0.0005449033342462015, - "loss": 1.2163, - "step": 9357 - }, - { - "epoch": 0.7197907853242058, - "learning_rate": 0.0005446238715725209, - "loss": 1.119, - "step": 9358 - }, - { - "epoch": 0.7198677024844242, - "learning_rate": 0.0005443444646842479, - "loss": 1.292, - "step": 9359 - }, - { - "epoch": 0.7199446196446427, - "learning_rate": 0.0005440651135976973, - "loss": 1.3125, - "step": 9360 - }, - { - "epoch": 0.7200215368048611, - "learning_rate": 0.0005437858183291814, - "loss": 1.2528, - "step": 9361 - }, - { - "epoch": 0.7200984539650797, - "learning_rate": 0.000543506578895008, - "loss": 0.8609, - "step": 9362 - }, - { - "epoch": 0.7201753711252981, - "learning_rate": 0.0005432273953114825, - "loss": 1.1988, - "step": 9363 - }, - { - "epoch": 0.7202522882855165, - "learning_rate": 0.0005429482675949063, - "loss": 1.0856, - "step": 9364 - }, - { - "epoch": 0.720329205445735, - "learning_rate": 0.0005426691957615778, - "loss": 1.2129, - "step": 9365 - }, - { - "epoch": 0.7204061226059534, - "learning_rate": 0.0005423901798277936, - "loss": 1.3119, - "step": 9366 - }, - { - "epoch": 0.7204830397661718, - "learning_rate": 0.0005421112198098444, - "loss": 1.2377, - "step": 9367 - }, - { - "epoch": 0.7205599569263903, - "learning_rate": 0.0005418323157240187, - "loss": 1.0323, - "step": 9368 - }, - { - "epoch": 0.7206368740866087, - "learning_rate": 0.0005415534675866033, - "loss": 1.3434, - "step": 9369 - }, - { - "epoch": 0.7207137912468272, - "learning_rate": 0.0005412746754138799, - "loss": 1.2895, - "step": 9370 - }, - { - "epoch": 0.7207907084070456, - "learning_rate": 0.0005409959392221271, - "loss": 0.9031, - "step": 9371 - }, - { - "epoch": 0.720867625567264, - "learning_rate": 0.000540717259027621, - "loss": 1.114, - "step": 9372 - }, - { - "epoch": 0.7209445427274825, - "learning_rate": 0.0005404386348466333, - "loss": 1.2747, - "step": 9373 - }, - { - "epoch": 0.7210214598877009, - "learning_rate": 0.0005401600666954344, - "loss": 1.0093, - "step": 9374 - }, - { - "epoch": 0.7210983770479193, - "learning_rate": 0.0005398815545902899, - "loss": 1.6238, - "step": 9375 - }, - { - "epoch": 0.7211752942081379, - "learning_rate": 0.0005396030985474611, - "loss": 1.0713, - "step": 9376 - }, - { - "epoch": 0.7212522113683563, - "learning_rate": 0.0005393246985832087, - "loss": 0.9556, - "step": 9377 - }, - { - "epoch": 0.7213291285285747, - "learning_rate": 0.0005390463547137883, - "loss": 0.9452, - "step": 9378 - }, - { - "epoch": 0.7214060456887932, - "learning_rate": 0.000538768066955453, - "loss": 1.1993, - "step": 9379 - }, - { - "epoch": 0.7214829628490116, - "learning_rate": 0.0005384898353244517, - "loss": 1.2313, - "step": 9380 - }, - { - "epoch": 0.7215598800092301, - "learning_rate": 0.0005382116598370307, - "loss": 1.1112, - "step": 9381 - }, - { - "epoch": 0.7216367971694485, - "learning_rate": 0.0005379335405094337, - "loss": 1.2084, - "step": 9382 - }, - { - "epoch": 0.7217137143296669, - "learning_rate": 0.0005376554773578998, - "loss": 0.8355, - "step": 9383 - }, - { - "epoch": 0.7217906314898854, - "learning_rate": 0.0005373774703986658, - "loss": 1.0795, - "step": 9384 - }, - { - "epoch": 0.7218675486501038, - "learning_rate": 0.0005370995196479642, - "loss": 1.1485, - "step": 9385 - }, - { - "epoch": 0.7219444658103222, - "learning_rate": 0.0005368216251220253, - "loss": 1.2273, - "step": 9386 - }, - { - "epoch": 0.7220213829705407, - "learning_rate": 0.0005365437868370751, - "loss": 1.2547, - "step": 9387 - }, - { - "epoch": 0.7220983001307592, - "learning_rate": 0.000536266004809338, - "loss": 1.2919, - "step": 9388 - }, - { - "epoch": 0.7221752172909777, - "learning_rate": 0.0005359882790550325, - "loss": 1.0553, - "step": 9389 - }, - { - "epoch": 0.7222521344511961, - "learning_rate": 0.0005357106095903763, - "loss": 1.1381, - "step": 9390 - }, - { - "epoch": 0.7223290516114145, - "learning_rate": 0.0005354329964315829, - "loss": 1.1194, - "step": 9391 - }, - { - "epoch": 0.722405968771633, - "learning_rate": 0.0005351554395948619, - "loss": 1.0312, - "step": 9392 - }, - { - "epoch": 0.7224828859318514, - "learning_rate": 0.0005348779390964203, - "loss": 0.7245, - "step": 9393 - }, - { - "epoch": 0.7225598030920698, - "learning_rate": 0.0005346004949524618, - "loss": 1.0681, - "step": 9394 - }, - { - "epoch": 0.7226367202522883, - "learning_rate": 0.0005343231071791858, - "loss": 1.0064, - "step": 9395 - }, - { - "epoch": 0.7227136374125067, - "learning_rate": 0.0005340457757927906, - "loss": 1.0391, - "step": 9396 - }, - { - "epoch": 0.7227905545727252, - "learning_rate": 0.0005337685008094694, - "loss": 0.9042, - "step": 9397 - }, - { - "epoch": 0.7228674717329436, - "learning_rate": 0.0005334912822454121, - "loss": 1.4803, - "step": 9398 - }, - { - "epoch": 0.722944388893162, - "learning_rate": 0.0005332141201168063, - "loss": 1.2519, - "step": 9399 - }, - { - "epoch": 0.7230213060533806, - "learning_rate": 0.0005329370144398351, - "loss": 1.1931, - "step": 9400 - }, - { - "epoch": 0.723098223213599, - "learning_rate": 0.0005326599652306804, - "loss": 1.1531, - "step": 9401 - }, - { - "epoch": 0.7231751403738174, - "learning_rate": 0.0005323829725055181, - "loss": 1.4472, - "step": 9402 - }, - { - "epoch": 0.7232520575340359, - "learning_rate": 0.0005321060362805217, - "loss": 0.9079, - "step": 9403 - }, - { - "epoch": 0.7233289746942543, - "learning_rate": 0.0005318291565718633, - "loss": 1.2469, - "step": 9404 - }, - { - "epoch": 0.7234058918544727, - "learning_rate": 0.0005315523333957094, - "loss": 1.1492, - "step": 9405 - }, - { - "epoch": 0.7234828090146912, - "learning_rate": 0.0005312755667682239, - "loss": 0.8142, - "step": 9406 - }, - { - "epoch": 0.7235597261749096, - "learning_rate": 0.0005309988567055679, - "loss": 1.0987, - "step": 9407 - }, - { - "epoch": 0.7236366433351281, - "learning_rate": 0.0005307222032238979, - "loss": 1.2596, - "step": 9408 - }, - { - "epoch": 0.7237135604953465, - "learning_rate": 0.0005304456063393691, - "loss": 1.2135, - "step": 9409 - }, - { - "epoch": 0.7237904776555649, - "learning_rate": 0.0005301690660681322, - "loss": 1.2055, - "step": 9410 - }, - { - "epoch": 0.7238673948157834, - "learning_rate": 0.0005298925824263333, - "loss": 1.2809, - "step": 9411 - }, - { - "epoch": 0.7239443119760018, - "learning_rate": 0.0005296161554301181, - "loss": 1.1521, - "step": 9412 - }, - { - "epoch": 0.7240212291362202, - "learning_rate": 0.0005293397850956268, - "loss": 1.1096, - "step": 9413 - }, - { - "epoch": 0.7240981462964388, - "learning_rate": 0.0005290634714389972, - "loss": 1.1432, - "step": 9414 - }, - { - "epoch": 0.7241750634566572, - "learning_rate": 0.0005287872144763633, - "loss": 1.1967, - "step": 9415 - }, - { - "epoch": 0.7242519806168757, - "learning_rate": 0.0005285110142238556, - "loss": 1.3089, - "step": 9416 - }, - { - "epoch": 0.7243288977770941, - "learning_rate": 0.0005282348706976027, - "loss": 0.9607, - "step": 9417 - }, - { - "epoch": 0.7244058149373125, - "learning_rate": 0.0005279587839137286, - "loss": 1.2065, - "step": 9418 - }, - { - "epoch": 0.724482732097531, - "learning_rate": 0.0005276827538883541, - "loss": 0.6844, - "step": 9419 - }, - { - "epoch": 0.7245596492577494, - "learning_rate": 0.0005274067806375968, - "loss": 1.1616, - "step": 9420 - }, - { - "epoch": 0.7246365664179678, - "learning_rate": 0.0005271308641775713, - "loss": 1.3268, - "step": 9421 - }, - { - "epoch": 0.7247134835781863, - "learning_rate": 0.000526855004524388, - "loss": 1.3412, - "step": 9422 - }, - { - "epoch": 0.7247904007384047, - "learning_rate": 0.0005265792016941563, - "loss": 1.2578, - "step": 9423 - }, - { - "epoch": 0.7248673178986231, - "learning_rate": 0.0005263034557029788, - "loss": 1.1353, - "step": 9424 - }, - { - "epoch": 0.7249442350588416, - "learning_rate": 0.0005260277665669569, - "loss": 1.0317, - "step": 9425 - }, - { - "epoch": 0.72502115221906, - "learning_rate": 0.0005257521343021893, - "loss": 1.2119, - "step": 9426 - }, - { - "epoch": 0.7250980693792786, - "learning_rate": 0.0005254765589247697, - "loss": 1.2605, - "step": 9427 - }, - { - "epoch": 0.725174986539497, - "learning_rate": 0.0005252010404507896, - "loss": 1.4108, - "step": 9428 - }, - { - "epoch": 0.7252519036997154, - "learning_rate": 0.0005249255788963367, - "loss": 1.1841, - "step": 9429 - }, - { - "epoch": 0.7253288208599339, - "learning_rate": 0.0005246501742774948, - "loss": 1.1969, - "step": 9430 - }, - { - "epoch": 0.7254057380201523, - "learning_rate": 0.0005243748266103462, - "loss": 0.8568, - "step": 9431 - }, - { - "epoch": 0.7254826551803707, - "learning_rate": 0.000524099535910969, - "loss": 0.8923, - "step": 9432 - }, - { - "epoch": 0.7255595723405892, - "learning_rate": 0.0005238243021954358, - "loss": 1.7811, - "step": 9433 - }, - { - "epoch": 0.7256364895008076, - "learning_rate": 0.0005235491254798193, - "loss": 1.0812, - "step": 9434 - }, - { - "epoch": 0.7257134066610261, - "learning_rate": 0.0005232740057801867, - "loss": 0.8363, - "step": 9435 - }, - { - "epoch": 0.7257903238212445, - "learning_rate": 0.0005229989431126037, - "loss": 1.0442, - "step": 9436 - }, - { - "epoch": 0.7258672409814629, - "learning_rate": 0.0005227239374931302, - "loss": 1.5356, - "step": 9437 - }, - { - "epoch": 0.7259441581416815, - "learning_rate": 0.0005224489889378241, - "loss": 1.0046, - "step": 9438 - }, - { - "epoch": 0.7260210753018999, - "learning_rate": 0.0005221740974627407, - "loss": 1.0041, - "step": 9439 - }, - { - "epoch": 0.7260979924621183, - "learning_rate": 0.0005218992630839307, - "loss": 0.9842, - "step": 9440 - }, - { - "epoch": 0.7261749096223368, - "learning_rate": 0.0005216244858174422, - "loss": 1.2465, - "step": 9441 - }, - { - "epoch": 0.7262518267825552, - "learning_rate": 0.0005213497656793195, - "loss": 1.1504, - "step": 9442 - }, - { - "epoch": 0.7263287439427736, - "learning_rate": 0.0005210751026856041, - "loss": 0.7373, - "step": 9443 - }, - { - "epoch": 0.7264056611029921, - "learning_rate": 0.000520800496852333, - "loss": 1.1089, - "step": 9444 - }, - { - "epoch": 0.7264825782632105, - "learning_rate": 0.0005205259481955424, - "loss": 1.1643, - "step": 9445 - }, - { - "epoch": 0.726559495423429, - "learning_rate": 0.0005202514567312616, - "loss": 1.0695, - "step": 9446 - }, - { - "epoch": 0.7266364125836474, - "learning_rate": 0.0005199770224755197, - "loss": 1.7981, - "step": 9447 - }, - { - "epoch": 0.7267133297438658, - "learning_rate": 0.0005197026454443408, - "loss": 0.8053, - "step": 9448 - }, - { - "epoch": 0.7267902469040843, - "learning_rate": 0.0005194283256537461, - "loss": 0.9412, - "step": 9449 - }, - { - "epoch": 0.7268671640643027, - "learning_rate": 0.0005191540631197534, - "loss": 0.9117, - "step": 9450 - }, - { - "epoch": 0.7269440812245211, - "learning_rate": 0.0005188798578583772, - "loss": 1.1715, - "step": 9451 - }, - { - "epoch": 0.7270209983847397, - "learning_rate": 0.000518605709885628, - "loss": 0.8628, - "step": 9452 - }, - { - "epoch": 0.7270979155449581, - "learning_rate": 0.0005183316192175149, - "loss": 1.2291, - "step": 9453 - }, - { - "epoch": 0.7271748327051766, - "learning_rate": 0.0005180575858700413, - "loss": 1.2169, - "step": 9454 - }, - { - "epoch": 0.727251749865395, - "learning_rate": 0.0005177836098592089, - "loss": 1.1948, - "step": 9455 - }, - { - "epoch": 0.7273286670256134, - "learning_rate": 0.000517509691201015, - "loss": 1.0877, - "step": 9456 - }, - { - "epoch": 0.7274055841858319, - "learning_rate": 0.0005172358299114538, - "loss": 1.093, - "step": 9457 - }, - { - "epoch": 0.7274825013460503, - "learning_rate": 0.0005169620260065175, - "loss": 0.9695, - "step": 9458 - }, - { - "epoch": 0.7275594185062687, - "learning_rate": 0.0005166882795021926, - "loss": 0.9559, - "step": 9459 - }, - { - "epoch": 0.7276363356664872, - "learning_rate": 0.0005164145904144634, - "loss": 1.4075, - "step": 9460 - }, - { - "epoch": 0.7277132528267056, - "learning_rate": 0.0005161409587593119, - "loss": 1.3804, - "step": 9461 - }, - { - "epoch": 0.727790169986924, - "learning_rate": 0.000515867384552715, - "loss": 0.9768, - "step": 9462 - }, - { - "epoch": 0.7278670871471425, - "learning_rate": 0.0005155938678106472, - "loss": 1.2198, - "step": 9463 - }, - { - "epoch": 0.727944004307361, - "learning_rate": 0.0005153204085490794, - "loss": 1.1318, - "step": 9464 - }, - { - "epoch": 0.7280209214675795, - "learning_rate": 0.0005150470067839787, - "loss": 1.1305, - "step": 9465 - }, - { - "epoch": 0.7280978386277979, - "learning_rate": 0.0005147736625313101, - "loss": 1.1635, - "step": 9466 - }, - { - "epoch": 0.7281747557880163, - "learning_rate": 0.0005145003758070348, - "loss": 1.1236, - "step": 9467 - }, - { - "epoch": 0.7282516729482348, - "learning_rate": 0.0005142271466271086, - "loss": 1.026, - "step": 9468 - }, - { - "epoch": 0.7283285901084532, - "learning_rate": 0.000513953975007487, - "loss": 0.9184, - "step": 9469 - }, - { - "epoch": 0.7284055072686716, - "learning_rate": 0.0005136808609641206, - "loss": 1.19, - "step": 9470 - }, - { - "epoch": 0.7284824244288901, - "learning_rate": 0.0005134078045129567, - "loss": 1.1412, - "step": 9471 - }, - { - "epoch": 0.7285593415891085, - "learning_rate": 0.0005131348056699391, - "loss": 1.1946, - "step": 9472 - }, - { - "epoch": 0.728636258749327, - "learning_rate": 0.0005128618644510083, - "loss": 1.1496, - "step": 9473 - }, - { - "epoch": 0.7287131759095454, - "learning_rate": 0.0005125889808721025, - "loss": 1.2636, - "step": 9474 - }, - { - "epoch": 0.7287900930697638, - "learning_rate": 0.0005123161549491551, - "loss": 1.34, - "step": 9475 - }, - { - "epoch": 0.7288670102299823, - "learning_rate": 0.0005120433866980968, - "loss": 1.1529, - "step": 9476 - }, - { - "epoch": 0.7289439273902008, - "learning_rate": 0.0005117706761348549, - "loss": 0.9917, - "step": 9477 - }, - { - "epoch": 0.7290208445504192, - "learning_rate": 0.0005114980232753529, - "loss": 1.0118, - "step": 9478 - }, - { - "epoch": 0.7290977617106377, - "learning_rate": 0.000511225428135511, - "loss": 1.16, - "step": 9479 - }, - { - "epoch": 0.7291746788708561, - "learning_rate": 0.0005109528907312476, - "loss": 1.4642, - "step": 9480 - }, - { - "epoch": 0.7292515960310745, - "learning_rate": 0.0005106804110784758, - "loss": 1.1429, - "step": 9481 - }, - { - "epoch": 0.729328513191293, - "learning_rate": 0.0005104079891931052, - "loss": 1.1273, - "step": 9482 - }, - { - "epoch": 0.7294054303515114, - "learning_rate": 0.0005101356250910438, - "loss": 1.2409, - "step": 9483 - }, - { - "epoch": 0.7294823475117299, - "learning_rate": 0.0005098633187881948, - "loss": 0.9331, - "step": 9484 - }, - { - "epoch": 0.7295592646719483, - "learning_rate": 0.0005095910703004586, - "loss": 1.1855, - "step": 9485 - }, - { - "epoch": 0.7296361818321667, - "learning_rate": 0.0005093188796437319, - "loss": 1.0239, - "step": 9486 - }, - { - "epoch": 0.7297130989923852, - "learning_rate": 0.0005090467468339078, - "loss": 1.1713, - "step": 9487 - }, - { - "epoch": 0.7297900161526036, - "learning_rate": 0.0005087746718868774, - "loss": 1.3517, - "step": 9488 - }, - { - "epoch": 0.729866933312822, - "learning_rate": 0.000508502654818527, - "loss": 1.2247, - "step": 9489 - }, - { - "epoch": 0.7299438504730406, - "learning_rate": 0.0005082306956447399, - "loss": 1.1024, - "step": 9490 - }, - { - "epoch": 0.730020767633259, - "learning_rate": 0.0005079587943813959, - "loss": 1.0691, - "step": 9491 - }, - { - "epoch": 0.7300976847934775, - "learning_rate": 0.0005076869510443715, - "loss": 1.4135, - "step": 9492 - }, - { - "epoch": 0.7301746019536959, - "learning_rate": 0.0005074151656495406, - "loss": 1.1331, - "step": 9493 - }, - { - "epoch": 0.7302515191139143, - "learning_rate": 0.0005071434382127732, - "loss": 0.9224, - "step": 9494 - }, - { - "epoch": 0.7303284362741328, - "learning_rate": 0.0005068717687499341, - "loss": 0.9198, - "step": 9495 - }, - { - "epoch": 0.7304053534343512, - "learning_rate": 0.000506600157276888, - "loss": 0.995, - "step": 9496 - }, - { - "epoch": 0.7304822705945696, - "learning_rate": 0.0005063286038094938, - "loss": 1.0685, - "step": 9497 - }, - { - "epoch": 0.7305591877547881, - "learning_rate": 0.0005060571083636083, - "loss": 1.2064, - "step": 9498 - }, - { - "epoch": 0.7306361049150065, - "learning_rate": 0.0005057856709550839, - "loss": 0.8912, - "step": 9499 - }, - { - "epoch": 0.7307130220752249, - "learning_rate": 0.0005055142915997705, - "loss": 1.7754, - "step": 9500 - }, - { - "epoch": 0.7307899392354434, - "learning_rate": 0.0005052429703135134, - "loss": 1.4261, - "step": 9501 - }, - { - "epoch": 0.7308668563956618, - "learning_rate": 0.0005049717071121565, - "loss": 1.5207, - "step": 9502 - }, - { - "epoch": 0.7309437735558804, - "learning_rate": 0.0005047005020115385, - "loss": 1.2914, - "step": 9503 - }, - { - "epoch": 0.7310206907160988, - "learning_rate": 0.0005044293550274957, - "loss": 1.3741, - "step": 9504 - }, - { - "epoch": 0.7310976078763172, - "learning_rate": 0.0005041582661758602, - "loss": 1.3324, - "step": 9505 - }, - { - "epoch": 0.7311745250365357, - "learning_rate": 0.000503887235472461, - "loss": 1.411, - "step": 9506 - }, - { - "epoch": 0.7312514421967541, - "learning_rate": 0.0005036162629331252, - "loss": 1.2625, - "step": 9507 - }, - { - "epoch": 0.7313283593569725, - "learning_rate": 0.0005033453485736737, - "loss": 0.9098, - "step": 9508 - }, - { - "epoch": 0.731405276517191, - "learning_rate": 0.0005030744924099256, - "loss": 0.9647, - "step": 9509 - }, - { - "epoch": 0.7314821936774094, - "learning_rate": 0.0005028036944576973, - "loss": 0.6814, - "step": 9510 - }, - { - "epoch": 0.7315591108376279, - "learning_rate": 0.0005025329547328006, - "loss": 0.8911, - "step": 9511 - }, - { - "epoch": 0.7316360279978463, - "learning_rate": 0.0005022622732510439, - "loss": 0.8692, - "step": 9512 - }, - { - "epoch": 0.7317129451580647, - "learning_rate": 0.0005019916500282332, - "loss": 1.288, - "step": 9513 - }, - { - "epoch": 0.7317898623182832, - "learning_rate": 0.0005017210850801696, - "loss": 1.1947, - "step": 9514 - }, - { - "epoch": 0.7318667794785017, - "learning_rate": 0.0005014505784226527, - "loss": 1.1111, - "step": 9515 - }, - { - "epoch": 0.73194369663872, - "learning_rate": 0.0005011801300714777, - "loss": 1.0163, - "step": 9516 - }, - { - "epoch": 0.7320206137989386, - "learning_rate": 0.0005009097400424347, - "loss": 1.1756, - "step": 9517 - }, - { - "epoch": 0.732097530959157, - "learning_rate": 0.0005006394083513139, - "loss": 1.3218, - "step": 9518 - }, - { - "epoch": 0.7321744481193755, - "learning_rate": 0.0005003691350138989, - "loss": 1.1557, - "step": 9519 - }, - { - "epoch": 0.7322513652795939, - "learning_rate": 0.000500098920045973, - "loss": 1.1851, - "step": 9520 - }, - { - "epoch": 0.7323282824398123, - "learning_rate": 0.0004998287634633128, - "loss": 1.1823, - "step": 9521 - }, - { - "epoch": 0.7324051996000308, - "learning_rate": 0.0004995586652816928, - "loss": 1.0033, - "step": 9522 - }, - { - "epoch": 0.7324821167602492, - "learning_rate": 0.0004992886255168855, - "loss": 1.279, - "step": 9523 - }, - { - "epoch": 0.7325590339204676, - "learning_rate": 0.0004990186441846584, - "loss": 0.9729, - "step": 9524 - }, - { - "epoch": 0.7326359510806861, - "learning_rate": 0.0004987487213007756, - "loss": 0.7133, - "step": 9525 - }, - { - "epoch": 0.7327128682409045, - "learning_rate": 0.0004984788568809988, - "loss": 1.163, - "step": 9526 - }, - { - "epoch": 0.7327897854011229, - "learning_rate": 0.000498209050941085, - "loss": 1.2016, - "step": 9527 - }, - { - "epoch": 0.7328667025613415, - "learning_rate": 0.0004979393034967884, - "loss": 1.334, - "step": 9528 - }, - { - "epoch": 0.7329436197215599, - "learning_rate": 0.0004976696145638612, - "loss": 0.9937, - "step": 9529 - }, - { - "epoch": 0.7330205368817784, - "learning_rate": 0.0004973999841580487, - "loss": 1.1001, - "step": 9530 - }, - { - "epoch": 0.7330974540419968, - "learning_rate": 0.0004971304122950966, - "loss": 1.2798, - "step": 9531 - }, - { - "epoch": 0.7331743712022152, - "learning_rate": 0.000496860898990745, - "loss": 0.9932, - "step": 9532 - }, - { - "epoch": 0.7332512883624337, - "learning_rate": 0.0004965914442607307, - "loss": 0.8744, - "step": 9533 - }, - { - "epoch": 0.7333282055226521, - "learning_rate": 0.000496322048120788, - "loss": 1.2469, - "step": 9534 - }, - { - "epoch": 0.7334051226828705, - "learning_rate": 0.0004960527105866467, - "loss": 0.3908, - "step": 9535 - }, - { - "epoch": 0.733482039843089, - "learning_rate": 0.0004957834316740335, - "loss": 1.1258, - "step": 9536 - }, - { - "epoch": 0.7335589570033074, - "learning_rate": 0.0004955142113986727, - "loss": 0.8403, - "step": 9537 - }, - { - "epoch": 0.7336358741635259, - "learning_rate": 0.000495245049776284, - "loss": 1.2507, - "step": 9538 - }, - { - "epoch": 0.7337127913237443, - "learning_rate": 0.0004949759468225839, - "loss": 1.1635, - "step": 9539 - }, - { - "epoch": 0.7337897084839627, - "learning_rate": 0.0004947069025532859, - "loss": 1.0787, - "step": 9540 - }, - { - "epoch": 0.7338666256441813, - "learning_rate": 0.0004944379169840988, - "loss": 1.2461, - "step": 9541 - }, - { - "epoch": 0.7339435428043997, - "learning_rate": 0.0004941689901307308, - "loss": 1.0702, - "step": 9542 - }, - { - "epoch": 0.7340204599646181, - "learning_rate": 0.0004939001220088833, - "loss": 1.0311, - "step": 9543 - }, - { - "epoch": 0.7340973771248366, - "learning_rate": 0.0004936313126342556, - "loss": 1.0072, - "step": 9544 - }, - { - "epoch": 0.734174294285055, - "learning_rate": 0.0004933625620225449, - "loss": 1.4752, - "step": 9545 - }, - { - "epoch": 0.7342512114452734, - "learning_rate": 0.0004930938701894434, - "loss": 1.0207, - "step": 9546 - }, - { - "epoch": 0.7343281286054919, - "learning_rate": 0.00049282523715064, - "loss": 1.3235, - "step": 9547 - }, - { - "epoch": 0.7344050457657103, - "learning_rate": 0.0004925566629218208, - "loss": 1.133, - "step": 9548 - }, - { - "epoch": 0.7344819629259288, - "learning_rate": 0.0004922881475186673, - "loss": 1.1452, - "step": 9549 - }, - { - "epoch": 0.7345588800861472, - "learning_rate": 0.0004920196909568599, - "loss": 1.055, - "step": 9550 - }, - { - "epoch": 0.7346357972463656, - "learning_rate": 0.0004917512932520734, - "loss": 0.8454, - "step": 9551 - }, - { - "epoch": 0.7347127144065841, - "learning_rate": 0.0004914829544199788, - "loss": 1.3494, - "step": 9552 - }, - { - "epoch": 0.7347896315668025, - "learning_rate": 0.000491214674476246, - "loss": 1.3645, - "step": 9553 - }, - { - "epoch": 0.734866548727021, - "learning_rate": 0.0004909464534365399, - "loss": 1.1012, - "step": 9554 - }, - { - "epoch": 0.7349434658872395, - "learning_rate": 0.0004906782913165217, - "loss": 1.4507, - "step": 9555 - }, - { - "epoch": 0.7350203830474579, - "learning_rate": 0.0004904101881318501, - "loss": 1.1934, - "step": 9556 - }, - { - "epoch": 0.7350973002076764, - "learning_rate": 0.0004901421438981792, - "loss": 1.6996, - "step": 9557 - }, - { - "epoch": 0.7351742173678948, - "learning_rate": 0.0004898741586311615, - "loss": 1.2667, - "step": 9558 - }, - { - "epoch": 0.7352511345281132, - "learning_rate": 0.0004896062323464447, - "loss": 1.1764, - "step": 9559 - }, - { - "epoch": 0.7353280516883317, - "learning_rate": 0.0004893383650596726, - "loss": 0.8002, - "step": 9560 - }, - { - "epoch": 0.7354049688485501, - "learning_rate": 0.0004890705567864868, - "loss": 0.9024, - "step": 9561 - }, - { - "epoch": 0.7354818860087685, - "learning_rate": 0.0004888028075425248, - "loss": 1.0302, - "step": 9562 - }, - { - "epoch": 0.735558803168987, - "learning_rate": 0.0004885351173434203, - "loss": 1.1395, - "step": 9563 - }, - { - "epoch": 0.7356357203292054, - "learning_rate": 0.00048826748620480517, - "loss": 1.3611, - "step": 9564 - }, - { - "epoch": 0.7357126374894238, - "learning_rate": 0.00048799991414230545, - "loss": 1.1677, - "step": 9565 - }, - { - "epoch": 0.7357895546496424, - "learning_rate": 0.00048773240117154504, - "loss": 1.1451, - "step": 9566 - }, - { - "epoch": 0.7358664718098608, - "learning_rate": 0.0004874649473081452, - "loss": 1.0619, - "step": 9567 - }, - { - "epoch": 0.7359433889700793, - "learning_rate": 0.0004871975525677223, - "loss": 1.2173, - "step": 9568 - }, - { - "epoch": 0.7360203061302977, - "learning_rate": 0.0004869302169658898, - "loss": 0.9331, - "step": 9569 - }, - { - "epoch": 0.7360972232905161, - "learning_rate": 0.00048666294051825763, - "loss": 1.1044, - "step": 9570 - }, - { - "epoch": 0.7361741404507346, - "learning_rate": 0.0004863957232404321, - "loss": 1.0849, - "step": 9571 - }, - { - "epoch": 0.736251057610953, - "learning_rate": 0.00048612856514801704, - "loss": 1.3422, - "step": 9572 - }, - { - "epoch": 0.7363279747711714, - "learning_rate": 0.00048586146625661234, - "loss": 1.2571, - "step": 9573 - }, - { - "epoch": 0.7364048919313899, - "learning_rate": 0.00048559442658181235, - "loss": 0.9953, - "step": 9574 - }, - { - "epoch": 0.7364818090916083, - "learning_rate": 0.00048532744613921154, - "loss": 1.3342, - "step": 9575 - }, - { - "epoch": 0.7365587262518268, - "learning_rate": 0.0004850605249443982, - "loss": 0.8317, - "step": 9576 - }, - { - "epoch": 0.7366356434120452, - "learning_rate": 0.00048479366301295944, - "loss": 1.3491, - "step": 9577 - }, - { - "epoch": 0.7367125605722636, - "learning_rate": 0.00048452686036047626, - "loss": 0.9034, - "step": 9578 - }, - { - "epoch": 0.7367894777324822, - "learning_rate": 0.00048426011700252754, - "loss": 1.0369, - "step": 9579 - }, - { - "epoch": 0.7368663948927006, - "learning_rate": 0.0004839934329546896, - "loss": 1.2174, - "step": 9580 - }, - { - "epoch": 0.736943312052919, - "learning_rate": 0.0004837268082325341, - "loss": 1.0057, - "step": 9581 - }, - { - "epoch": 0.7370202292131375, - "learning_rate": 0.0004834602428516293, - "loss": 1.2124, - "step": 9582 - }, - { - "epoch": 0.7370971463733559, - "learning_rate": 0.00048319373682754026, - "loss": 1.3084, - "step": 9583 - }, - { - "epoch": 0.7371740635335743, - "learning_rate": 0.00048292729017582875, - "loss": 1.3049, - "step": 9584 - }, - { - "epoch": 0.7372509806937928, - "learning_rate": 0.0004826609029120522, - "loss": 1.1122, - "step": 9585 - }, - { - "epoch": 0.7373278978540112, - "learning_rate": 0.0004823945750517667, - "loss": 1.2862, - "step": 9586 - }, - { - "epoch": 0.7374048150142297, - "learning_rate": 0.0004821283066105216, - "loss": 1.1289, - "step": 9587 - }, - { - "epoch": 0.7374817321744481, - "learning_rate": 0.0004818620976038659, - "loss": 1.1788, - "step": 9588 - }, - { - "epoch": 0.7375586493346665, - "learning_rate": 0.00048159594804734335, - "loss": 0.842, - "step": 9589 - }, - { - "epoch": 0.737635566494885, - "learning_rate": 0.00048132985795649487, - "loss": 1.2714, - "step": 9590 - }, - { - "epoch": 0.7377124836551034, - "learning_rate": 0.00048106382734685747, - "loss": 1.1957, - "step": 9591 - }, - { - "epoch": 0.7377894008153218, - "learning_rate": 0.00048079785623396533, - "loss": 1.3048, - "step": 9592 - }, - { - "epoch": 0.7378663179755404, - "learning_rate": 0.00048053194463334775, - "loss": 1.1409, - "step": 9593 - }, - { - "epoch": 0.7379432351357588, - "learning_rate": 0.0004802660925605332, - "loss": 1.3611, - "step": 9594 - }, - { - "epoch": 0.7380201522959773, - "learning_rate": 0.00048000030003104413, - "loss": 0.7823, - "step": 9595 - }, - { - "epoch": 0.7380970694561957, - "learning_rate": 0.00047973456706040037, - "loss": 0.967, - "step": 9596 - }, - { - "epoch": 0.7381739866164141, - "learning_rate": 0.0004794688936641187, - "loss": 1.2858, - "step": 9597 - }, - { - "epoch": 0.7382509037766326, - "learning_rate": 0.0004792032798577114, - "loss": 1.0721, - "step": 9598 - }, - { - "epoch": 0.738327820936851, - "learning_rate": 0.0004789377256566895, - "loss": 1.0331, - "step": 9599 - }, - { - "epoch": 0.7384047380970694, - "learning_rate": 0.0004786722310765574, - "loss": 1.0129, - "step": 9600 - }, - { - "epoch": 0.7384816552572879, - "learning_rate": 0.00047840679613281745, - "loss": 1.1207, - "step": 9601 - }, - { - "epoch": 0.7385585724175063, - "learning_rate": 0.0004781414208409701, - "loss": 1.2421, - "step": 9602 - }, - { - "epoch": 0.7386354895777247, - "learning_rate": 0.00047787610521651, - "loss": 1.2682, - "step": 9603 - }, - { - "epoch": 0.7387124067379433, - "learning_rate": 0.0004776108492749293, - "loss": 1.2052, - "step": 9604 - }, - { - "epoch": 0.7387893238981617, - "learning_rate": 0.0004773456530317167, - "loss": 1.1606, - "step": 9605 - }, - { - "epoch": 0.7388662410583802, - "learning_rate": 0.00047708051650235684, - "loss": 1.1792, - "step": 9606 - }, - { - "epoch": 0.7389431582185986, - "learning_rate": 0.00047681543970233195, - "loss": 1.2722, - "step": 9607 - }, - { - "epoch": 0.739020075378817, - "learning_rate": 0.00047655042264712044, - "loss": 1.2043, - "step": 9608 - }, - { - "epoch": 0.7390969925390355, - "learning_rate": 0.00047628546535219554, - "loss": 1.0525, - "step": 9609 - }, - { - "epoch": 0.7391739096992539, - "learning_rate": 0.0004760205678330294, - "loss": 0.9423, - "step": 9610 - }, - { - "epoch": 0.7392508268594723, - "learning_rate": 0.0004757557301050897, - "loss": 1.1264, - "step": 9611 - }, - { - "epoch": 0.7393277440196908, - "learning_rate": 0.0004754909521838403, - "loss": 1.4684, - "step": 9612 - }, - { - "epoch": 0.7394046611799092, - "learning_rate": 0.00047522623408474186, - "loss": 1.1883, - "step": 9613 - }, - { - "epoch": 0.7394815783401277, - "learning_rate": 0.00047496157582325126, - "loss": 0.9851, - "step": 9614 - }, - { - "epoch": 0.7395584955003461, - "learning_rate": 0.0004746969774148229, - "loss": 0.6258, - "step": 9615 - }, - { - "epoch": 0.7396354126605645, - "learning_rate": 0.00047443243887490663, - "loss": 1.7361, - "step": 9616 - }, - { - "epoch": 0.739712329820783, - "learning_rate": 0.000474167960218949, - "loss": 1.109, - "step": 9617 - }, - { - "epoch": 0.7397892469810015, - "learning_rate": 0.0004739035414623933, - "loss": 1.0861, - "step": 9618 - }, - { - "epoch": 0.7398661641412199, - "learning_rate": 0.0004736391826206792, - "loss": 1.15, - "step": 9619 - }, - { - "epoch": 0.7399430813014384, - "learning_rate": 0.0004733748837092423, - "loss": 0.8956, - "step": 9620 - }, - { - "epoch": 0.7400199984616568, - "learning_rate": 0.000473110644743517, - "loss": 1.4887, - "step": 9621 - }, - { - "epoch": 0.7400969156218753, - "learning_rate": 0.00047284646573893024, - "loss": 1.4459, - "step": 9622 - }, - { - "epoch": 0.7401738327820937, - "learning_rate": 0.00047258234671090937, - "loss": 0.9748, - "step": 9623 - }, - { - "epoch": 0.7402507499423121, - "learning_rate": 0.00047231828767487605, - "loss": 0.9643, - "step": 9624 - }, - { - "epoch": 0.7403276671025306, - "learning_rate": 0.0004720542886462488, - "loss": 1.1797, - "step": 9625 - }, - { - "epoch": 0.740404584262749, - "learning_rate": 0.00047179034964044304, - "loss": 1.0901, - "step": 9626 - }, - { - "epoch": 0.7404815014229674, - "learning_rate": 0.00047152647067287036, - "loss": 1.2084, - "step": 9627 - }, - { - "epoch": 0.7405584185831859, - "learning_rate": 0.0004712626517589384, - "loss": 1.4829, - "step": 9628 - }, - { - "epoch": 0.7406353357434043, - "learning_rate": 0.00047099889291405284, - "loss": 0.8356, - "step": 9629 - }, - { - "epoch": 0.7407122529036227, - "learning_rate": 0.0004707351941536142, - "loss": 1.2059, - "step": 9630 - }, - { - "epoch": 0.7407891700638413, - "learning_rate": 0.0004704715554930203, - "loss": 1.0607, - "step": 9631 - }, - { - "epoch": 0.7408660872240597, - "learning_rate": 0.0004702079769476653, - "loss": 1.2945, - "step": 9632 - }, - { - "epoch": 0.7409430043842782, - "learning_rate": 0.00046994445853293935, - "loss": 1.1079, - "step": 9633 - }, - { - "epoch": 0.7410199215444966, - "learning_rate": 0.0004696810002642307, - "loss": 1.1573, - "step": 9634 - }, - { - "epoch": 0.741096838704715, - "learning_rate": 0.0004694176021569218, - "loss": 1.144, - "step": 9635 - }, - { - "epoch": 0.7411737558649335, - "learning_rate": 0.0004691542642263928, - "loss": 0.8916, - "step": 9636 - }, - { - "epoch": 0.7412506730251519, - "learning_rate": 0.000468890986488021, - "loss": 1.2286, - "step": 9637 - }, - { - "epoch": 0.7413275901853703, - "learning_rate": 0.00046862776895717907, - "loss": 1.212, - "step": 9638 - }, - { - "epoch": 0.7414045073455888, - "learning_rate": 0.00046836461164923677, - "loss": 1.0431, - "step": 9639 - }, - { - "epoch": 0.7414814245058072, - "learning_rate": 0.0004681015145795597, - "loss": 1.1437, - "step": 9640 - }, - { - "epoch": 0.7415583416660257, - "learning_rate": 0.00046783847776351033, - "loss": 1.2865, - "step": 9641 - }, - { - "epoch": 0.7416352588262441, - "learning_rate": 0.0004675755012164483, - "loss": 1.388, - "step": 9642 - }, - { - "epoch": 0.7417121759864626, - "learning_rate": 0.0004673125849537288, - "loss": 1.1288, - "step": 9643 - }, - { - "epoch": 0.7417890931466811, - "learning_rate": 0.0004670497289907035, - "loss": 0.9553, - "step": 9644 - }, - { - "epoch": 0.7418660103068995, - "learning_rate": 0.0004667869333427212, - "loss": 1.2675, - "step": 9645 - }, - { - "epoch": 0.7419429274671179, - "learning_rate": 0.00046652419802512674, - "loss": 1.1955, - "step": 9646 - }, - { - "epoch": 0.7420198446273364, - "learning_rate": 0.00046626152305326133, - "loss": 1.226, - "step": 9647 - }, - { - "epoch": 0.7420967617875548, - "learning_rate": 0.00046599890844246293, - "loss": 0.531, - "step": 9648 - }, - { - "epoch": 0.7421736789477732, - "learning_rate": 0.0004657363542080658, - "loss": 0.9925, - "step": 9649 - }, - { - "epoch": 0.7422505961079917, - "learning_rate": 0.00046547386036540034, - "loss": 1.5166, - "step": 9650 - }, - { - "epoch": 0.7423275132682101, - "learning_rate": 0.00046521142692979494, - "loss": 1.3091, - "step": 9651 - }, - { - "epoch": 0.7424044304284286, - "learning_rate": 0.0004649490539165727, - "loss": 1.1894, - "step": 9652 - }, - { - "epoch": 0.742481347588647, - "learning_rate": 0.0004646867413410539, - "loss": 0.5401, - "step": 9653 - }, - { - "epoch": 0.7425582647488654, - "learning_rate": 0.0004644244892185553, - "loss": 1.1338, - "step": 9654 - }, - { - "epoch": 0.742635181909084, - "learning_rate": 0.0004641622975643895, - "loss": 1.0326, - "step": 9655 - }, - { - "epoch": 0.7427120990693024, - "learning_rate": 0.00046390016639386706, - "loss": 1.4126, - "step": 9656 - }, - { - "epoch": 0.7427890162295208, - "learning_rate": 0.00046363809572229434, - "loss": 1.1692, - "step": 9657 - }, - { - "epoch": 0.7428659333897393, - "learning_rate": 0.00046337608556497236, - "loss": 1.0901, - "step": 9658 - }, - { - "epoch": 0.7429428505499577, - "learning_rate": 0.0004631141359372014, - "loss": 1.406, - "step": 9659 - }, - { - "epoch": 0.7430197677101762, - "learning_rate": 0.0004628522468542763, - "loss": 0.714, - "step": 9660 - }, - { - "epoch": 0.7430966848703946, - "learning_rate": 0.00046259041833149024, - "loss": 1.1301, - "step": 9661 - }, - { - "epoch": 0.743173602030613, - "learning_rate": 0.00046232865038413036, - "loss": 0.8453, - "step": 9662 - }, - { - "epoch": 0.7432505191908315, - "learning_rate": 0.0004620669430274816, - "loss": 1.146, - "step": 9663 - }, - { - "epoch": 0.7433274363510499, - "learning_rate": 0.00046180529627682623, - "loss": 1.0231, - "step": 9664 - }, - { - "epoch": 0.7434043535112683, - "learning_rate": 0.0004615437101474415, - "loss": 0.9878, - "step": 9665 - }, - { - "epoch": 0.7434812706714868, - "learning_rate": 0.00046128218465460176, - "loss": 1.4212, - "step": 9666 - }, - { - "epoch": 0.7435581878317052, - "learning_rate": 0.0004610207198135779, - "loss": 0.8955, - "step": 9667 - }, - { - "epoch": 0.7436351049919236, - "learning_rate": 0.0004607593156396371, - "loss": 1.2782, - "step": 9668 - }, - { - "epoch": 0.7437120221521422, - "learning_rate": 0.0004604979721480422, - "loss": 1.2521, - "step": 9669 - }, - { - "epoch": 0.7437889393123606, - "learning_rate": 0.00046023668935405516, - "loss": 1.1578, - "step": 9670 - }, - { - "epoch": 0.7438658564725791, - "learning_rate": 0.00045997546727293047, - "loss": 0.9417, - "step": 9671 - }, - { - "epoch": 0.7439427736327975, - "learning_rate": 0.0004597143059199227, - "loss": 0.9857, - "step": 9672 - }, - { - "epoch": 0.7440196907930159, - "learning_rate": 0.00045945320531028057, - "loss": 1.1483, - "step": 9673 - }, - { - "epoch": 0.7440966079532344, - "learning_rate": 0.00045919216545925023, - "loss": 1.0915, - "step": 9674 - }, - { - "epoch": 0.7441735251134528, - "learning_rate": 0.0004589311863820742, - "loss": 1.2431, - "step": 9675 - }, - { - "epoch": 0.7442504422736712, - "learning_rate": 0.00045867026809399107, - "loss": 1.2593, - "step": 9676 - }, - { - "epoch": 0.7443273594338897, - "learning_rate": 0.00045840941061023587, - "loss": 1.0995, - "step": 9677 - }, - { - "epoch": 0.7444042765941081, - "learning_rate": 0.00045814861394604105, - "loss": 0.8031, - "step": 9678 - }, - { - "epoch": 0.7444811937543266, - "learning_rate": 0.0004578878781166346, - "loss": 1.0974, - "step": 9679 - }, - { - "epoch": 0.744558110914545, - "learning_rate": 0.00045762720313724086, - "loss": 0.8156, - "step": 9680 - }, - { - "epoch": 0.7446350280747634, - "learning_rate": 0.00045736658902308114, - "loss": 1.3662, - "step": 9681 - }, - { - "epoch": 0.744711945234982, - "learning_rate": 0.0004571060357893724, - "loss": 1.1884, - "step": 9682 - }, - { - "epoch": 0.7447888623952004, - "learning_rate": 0.00045684554345133005, - "loss": 1.1542, - "step": 9683 - }, - { - "epoch": 0.7448657795554188, - "learning_rate": 0.0004565851120241632, - "loss": 1.0133, - "step": 9684 - }, - { - "epoch": 0.7449426967156373, - "learning_rate": 0.00045632474152307854, - "loss": 0.6093, - "step": 9685 - }, - { - "epoch": 0.7450196138758557, - "learning_rate": 0.0004560644319632802, - "loss": 1.0068, - "step": 9686 - }, - { - "epoch": 0.7450965310360741, - "learning_rate": 0.00045580418335996786, - "loss": 1.1714, - "step": 9687 - }, - { - "epoch": 0.7451734481962926, - "learning_rate": 0.00045554399572833744, - "loss": 1.1873, - "step": 9688 - }, - { - "epoch": 0.745250365356511, - "learning_rate": 0.00045528386908358153, - "loss": 1.0161, - "step": 9689 - }, - { - "epoch": 0.7453272825167295, - "learning_rate": 0.0004550238034408889, - "loss": 1.0107, - "step": 9690 - }, - { - "epoch": 0.7454041996769479, - "learning_rate": 0.000454763798815446, - "loss": 0.8771, - "step": 9691 - }, - { - "epoch": 0.7454811168371663, - "learning_rate": 0.0004545038552224345, - "loss": 1.2641, - "step": 9692 - }, - { - "epoch": 0.7455580339973849, - "learning_rate": 0.00045424397267703154, - "loss": 1.0957, - "step": 9693 - }, - { - "epoch": 0.7456349511576033, - "learning_rate": 0.0004539841511944134, - "loss": 1.4517, - "step": 9694 - }, - { - "epoch": 0.7457118683178217, - "learning_rate": 0.0004537243907897509, - "loss": 1.0689, - "step": 9695 - }, - { - "epoch": 0.7457887854780402, - "learning_rate": 0.0004534646914782115, - "loss": 1.4997, - "step": 9696 - }, - { - "epoch": 0.7458657026382586, - "learning_rate": 0.0004532050532749595, - "loss": 1.5104, - "step": 9697 - }, - { - "epoch": 0.7459426197984771, - "learning_rate": 0.0004529454761951549, - "loss": 0.7274, - "step": 9698 - }, - { - "epoch": 0.7460195369586955, - "learning_rate": 0.00045268596025395554, - "loss": 1.0045, - "step": 9699 - }, - { - "epoch": 0.7460964541189139, - "learning_rate": 0.0004524265054665146, - "loss": 1.2495, - "step": 9700 - }, - { - "epoch": 0.7461733712791324, - "learning_rate": 0.0004521671118479818, - "loss": 1.1395, - "step": 9701 - }, - { - "epoch": 0.7462502884393508, - "learning_rate": 0.00045190777941350307, - "loss": 1.5124, - "step": 9702 - }, - { - "epoch": 0.7463272055995692, - "learning_rate": 0.00045164850817822164, - "loss": 1.4483, - "step": 9703 - }, - { - "epoch": 0.7464041227597877, - "learning_rate": 0.0004513892981572759, - "loss": 1.1922, - "step": 9704 - }, - { - "epoch": 0.7464810399200061, - "learning_rate": 0.0004511301493658027, - "loss": 0.8614, - "step": 9705 - }, - { - "epoch": 0.7465579570802245, - "learning_rate": 0.0004508710618189329, - "loss": 0.9824, - "step": 9706 - }, - { - "epoch": 0.7466348742404431, - "learning_rate": 0.00045061203553179455, - "loss": 0.982, - "step": 9707 - }, - { - "epoch": 0.7467117914006615, - "learning_rate": 0.00045035307051951353, - "loss": 1.5379, - "step": 9708 - }, - { - "epoch": 0.74678870856088, - "learning_rate": 0.00045009416679721076, - "loss": 1.152, - "step": 9709 - }, - { - "epoch": 0.7468656257210984, - "learning_rate": 0.0004498353243800037, - "loss": 1.3142, - "step": 9710 - }, - { - "epoch": 0.7469425428813168, - "learning_rate": 0.0004495765432830065, - "loss": 1.0859, - "step": 9711 - }, - { - "epoch": 0.7470194600415353, - "learning_rate": 0.00044931782352132917, - "loss": 1.1589, - "step": 9712 - }, - { - "epoch": 0.7470963772017537, - "learning_rate": 0.0004490591651100797, - "loss": 1.077, - "step": 9713 - }, - { - "epoch": 0.7471732943619721, - "learning_rate": 0.00044880056806436124, - "loss": 1.435, - "step": 9714 - }, - { - "epoch": 0.7472502115221906, - "learning_rate": 0.0004485420323992722, - "loss": 1.2256, - "step": 9715 - }, - { - "epoch": 0.747327128682409, - "learning_rate": 0.0004482835581299102, - "loss": 1.1679, - "step": 9716 - }, - { - "epoch": 0.7474040458426275, - "learning_rate": 0.0004480251452713669, - "loss": 1.0517, - "step": 9717 - }, - { - "epoch": 0.7474809630028459, - "learning_rate": 0.0004477667938387325, - "loss": 1.2843, - "step": 9718 - }, - { - "epoch": 0.7475578801630643, - "learning_rate": 0.00044750850384709116, - "loss": 1.4952, - "step": 9719 - }, - { - "epoch": 0.7476347973232829, - "learning_rate": 0.0004472502753115248, - "loss": 1.2533, - "step": 9720 - }, - { - "epoch": 0.7477117144835013, - "learning_rate": 0.0004469921082471124, - "loss": 1.1568, - "step": 9721 - }, - { - "epoch": 0.7477886316437197, - "learning_rate": 0.0004467340026689283, - "loss": 1.26, - "step": 9722 - }, - { - "epoch": 0.7478655488039382, - "learning_rate": 0.00044647595859204336, - "loss": 0.9277, - "step": 9723 - }, - { - "epoch": 0.7479424659641566, - "learning_rate": 0.0004462179760315253, - "loss": 1.4459, - "step": 9724 - }, - { - "epoch": 0.7480193831243751, - "learning_rate": 0.00044596005500243736, - "loss": 1.577, - "step": 9725 - }, - { - "epoch": 0.7480963002845935, - "learning_rate": 0.00044570219551984063, - "loss": 1.0829, - "step": 9726 - }, - { - "epoch": 0.7481732174448119, - "learning_rate": 0.00044544439759879197, - "loss": 1.2658, - "step": 9727 - }, - { - "epoch": 0.7482501346050304, - "learning_rate": 0.0004451866612543431, - "loss": 1.3286, - "step": 9728 - }, - { - "epoch": 0.7483270517652488, - "learning_rate": 0.0004449289865015448, - "loss": 0.9882, - "step": 9729 - }, - { - "epoch": 0.7484039689254672, - "learning_rate": 0.00044467137335544264, - "loss": 1.5653, - "step": 9730 - }, - { - "epoch": 0.7484808860856857, - "learning_rate": 0.0004444138218310787, - "loss": 1.2296, - "step": 9731 - }, - { - "epoch": 0.7485578032459042, - "learning_rate": 0.0004441563319434919, - "loss": 0.8052, - "step": 9732 - }, - { - "epoch": 0.7486347204061226, - "learning_rate": 0.00044389890370771714, - "loss": 1.3293, - "step": 9733 - }, - { - "epoch": 0.7487116375663411, - "learning_rate": 0.00044364153713878564, - "loss": 1.0459, - "step": 9734 - }, - { - "epoch": 0.7487885547265595, - "learning_rate": 0.00044338423225172627, - "loss": 1.6434, - "step": 9735 - }, - { - "epoch": 0.748865471886778, - "learning_rate": 0.00044312698906156264, - "loss": 1.0665, - "step": 9736 - }, - { - "epoch": 0.7489423890469964, - "learning_rate": 0.0004428698075833156, - "loss": 1.1757, - "step": 9737 - }, - { - "epoch": 0.7490193062072148, - "learning_rate": 0.00044261268783200207, - "loss": 1.2172, - "step": 9738 - }, - { - "epoch": 0.7490962233674333, - "learning_rate": 0.0004423556298226353, - "loss": 0.7575, - "step": 9739 - }, - { - "epoch": 0.7491731405276517, - "learning_rate": 0.0004420986335702266, - "loss": 1.3136, - "step": 9740 - }, - { - "epoch": 0.7492500576878701, - "learning_rate": 0.00044184169908978064, - "loss": 1.3724, - "step": 9741 - }, - { - "epoch": 0.7493269748480886, - "learning_rate": 0.00044158482639630015, - "loss": 0.932, - "step": 9742 - }, - { - "epoch": 0.749403892008307, - "learning_rate": 0.00044132801550478523, - "loss": 1.5715, - "step": 9743 - }, - { - "epoch": 0.7494808091685256, - "learning_rate": 0.00044107126643023087, - "loss": 1.2626, - "step": 9744 - }, - { - "epoch": 0.749557726328744, - "learning_rate": 0.0004408145791876287, - "loss": 1.1827, - "step": 9745 - }, - { - "epoch": 0.7496346434889624, - "learning_rate": 0.00044055795379196734, - "loss": 1.2491, - "step": 9746 - }, - { - "epoch": 0.7497115606491809, - "learning_rate": 0.0004403013902582305, - "loss": 0.8887, - "step": 9747 - }, - { - "epoch": 0.7497884778093993, - "learning_rate": 0.0004400448886014005, - "loss": 1.1813, - "step": 9748 - }, - { - "epoch": 0.7498653949696177, - "learning_rate": 0.00043978844883645437, - "loss": 1.2962, - "step": 9749 - }, - { - "epoch": 0.7499423121298362, - "learning_rate": 0.0004395320709783649, - "loss": 1.2228, - "step": 9750 - }, - { - "epoch": 0.7500192292900546, - "learning_rate": 0.00043927575504210326, - "loss": 0.9294, - "step": 9751 - }, - { - "epoch": 0.750096146450273, - "learning_rate": 0.000439019501042636, - "loss": 0.7966, - "step": 9752 - }, - { - "epoch": 0.7501730636104915, - "learning_rate": 0.0004387633089949256, - "loss": 1.073, - "step": 9753 - }, - { - "epoch": 0.7502499807707099, - "learning_rate": 0.00043850717891393167, - "loss": 0.8838, - "step": 9754 - }, - { - "epoch": 0.7503268979309284, - "learning_rate": 0.00043825111081460935, - "loss": 1.0305, - "step": 9755 - }, - { - "epoch": 0.7504038150911468, - "learning_rate": 0.00043799510471191166, - "loss": 1.3044, - "step": 9756 - }, - { - "epoch": 0.7504807322513652, - "learning_rate": 0.00043773916062078656, - "loss": 0.8356, - "step": 9757 - }, - { - "epoch": 0.7505576494115838, - "learning_rate": 0.00043748327855617906, - "loss": 0.9336, - "step": 9758 - }, - { - "epoch": 0.7506345665718022, - "learning_rate": 0.00043722745853303017, - "loss": 1.0255, - "step": 9759 - }, - { - "epoch": 0.7507114837320206, - "learning_rate": 0.00043697170056627744, - "loss": 1.1667, - "step": 9760 - }, - { - "epoch": 0.7507884008922391, - "learning_rate": 0.00043671600467085466, - "loss": 1.3942, - "step": 9761 - }, - { - "epoch": 0.7508653180524575, - "learning_rate": 0.00043646037086169347, - "loss": 1.1702, - "step": 9762 - }, - { - "epoch": 0.750942235212676, - "learning_rate": 0.0004362047991537185, - "loss": 1.3565, - "step": 9763 - }, - { - "epoch": 0.7510191523728944, - "learning_rate": 0.0004359492895618546, - "loss": 1.3172, - "step": 9764 - }, - { - "epoch": 0.7510960695331128, - "learning_rate": 0.0004356938421010206, - "loss": 0.9548, - "step": 9765 - }, - { - "epoch": 0.7511729866933313, - "learning_rate": 0.00043543845678613226, - "loss": 1.3256, - "step": 9766 - }, - { - "epoch": 0.7512499038535497, - "learning_rate": 0.00043518313363210203, - "loss": 1.2936, - "step": 9767 - }, - { - "epoch": 0.7513268210137681, - "learning_rate": 0.00043492787265383823, - "loss": 1.2388, - "step": 9768 - }, - { - "epoch": 0.7514037381739866, - "learning_rate": 0.0004346726738662455, - "loss": 1.1373, - "step": 9769 - }, - { - "epoch": 0.751480655334205, - "learning_rate": 0.0004344175372842261, - "loss": 1.0863, - "step": 9770 - }, - { - "epoch": 0.7515575724944235, - "learning_rate": 0.00043416246292267717, - "loss": 1.2035, - "step": 9771 - }, - { - "epoch": 0.751634489654642, - "learning_rate": 0.0004339074507964928, - "loss": 1.0746, - "step": 9772 - }, - { - "epoch": 0.7517114068148604, - "learning_rate": 0.0004336525009205635, - "loss": 1.2591, - "step": 9773 - }, - { - "epoch": 0.7517883239750789, - "learning_rate": 0.00043339761330977544, - "loss": 0.8002, - "step": 9774 - }, - { - "epoch": 0.7518652411352973, - "learning_rate": 0.00043314278797901333, - "loss": 0.9797, - "step": 9775 - }, - { - "epoch": 0.7519421582955157, - "learning_rate": 0.0004328880249431553, - "loss": 0.9351, - "step": 9776 - }, - { - "epoch": 0.7520190754557342, - "learning_rate": 0.00043263332421707694, - "loss": 0.9714, - "step": 9777 - }, - { - "epoch": 0.7520959926159526, - "learning_rate": 0.00043237868581565185, - "loss": 1.349, - "step": 9778 - }, - { - "epoch": 0.752172909776171, - "learning_rate": 0.00043212410975374785, - "loss": 1.3253, - "step": 9779 - }, - { - "epoch": 0.7522498269363895, - "learning_rate": 0.00043186959604622993, - "loss": 1.2438, - "step": 9780 - }, - { - "epoch": 0.7523267440966079, - "learning_rate": 0.0004316151447079595, - "loss": 1.2926, - "step": 9781 - }, - { - "epoch": 0.7524036612568265, - "learning_rate": 0.00043136075575379374, - "loss": 1.4467, - "step": 9782 - }, - { - "epoch": 0.7524805784170449, - "learning_rate": 0.00043110642919858764, - "loss": 1.8067, - "step": 9783 - }, - { - "epoch": 0.7525574955772633, - "learning_rate": 0.00043085216505719124, - "loss": 1.134, - "step": 9784 - }, - { - "epoch": 0.7526344127374818, - "learning_rate": 0.000430597963344451, - "loss": 1.2367, - "step": 9785 - }, - { - "epoch": 0.7527113298977002, - "learning_rate": 0.00043034382407521017, - "loss": 1.1981, - "step": 9786 - }, - { - "epoch": 0.7527882470579186, - "learning_rate": 0.00043008974726430836, - "loss": 1.1371, - "step": 9787 - }, - { - "epoch": 0.7528651642181371, - "learning_rate": 0.00042983573292658113, - "loss": 1.1948, - "step": 9788 - }, - { - "epoch": 0.7529420813783555, - "learning_rate": 0.00042958178107686086, - "loss": 0.8531, - "step": 9789 - }, - { - "epoch": 0.7530189985385739, - "learning_rate": 0.00042932789172997585, - "loss": 1.3793, - "step": 9790 - }, - { - "epoch": 0.7530959156987924, - "learning_rate": 0.00042907406490075067, - "loss": 1.1198, - "step": 9791 - }, - { - "epoch": 0.7531728328590108, - "learning_rate": 0.0004288203006040074, - "loss": 1.5074, - "step": 9792 - }, - { - "epoch": 0.7532497500192293, - "learning_rate": 0.0004285665988545631, - "loss": 1.0186, - "step": 9793 - }, - { - "epoch": 0.7533266671794477, - "learning_rate": 0.00042831295966723167, - "loss": 1.3088, - "step": 9794 - }, - { - "epoch": 0.7534035843396661, - "learning_rate": 0.0004280593830568235, - "loss": 1.0121, - "step": 9795 - }, - { - "epoch": 0.7534805014998847, - "learning_rate": 0.0004278058690381445, - "loss": 1.3308, - "step": 9796 - }, - { - "epoch": 0.7535574186601031, - "learning_rate": 0.00042755241762599877, - "loss": 1.2559, - "step": 9797 - }, - { - "epoch": 0.7536343358203215, - "learning_rate": 0.00042729902883518567, - "loss": 1.0505, - "step": 9798 - }, - { - "epoch": 0.75371125298054, - "learning_rate": 0.00042704570268049905, - "loss": 1.103, - "step": 9799 - }, - { - "epoch": 0.7537881701407584, - "learning_rate": 0.00042679243917673264, - "loss": 0.8859, - "step": 9800 - }, - { - "epoch": 0.7538650873009769, - "learning_rate": 0.00042653923833867424, - "loss": 1.5809, - "step": 9801 - }, - { - "epoch": 0.7539420044611953, - "learning_rate": 0.0004262861001811083, - "loss": 1.2575, - "step": 9802 - }, - { - "epoch": 0.7540189216214137, - "learning_rate": 0.000426033024718816, - "loss": 1.1196, - "step": 9803 - }, - { - "epoch": 0.7540958387816322, - "learning_rate": 0.00042578001196657417, - "loss": 1.2707, - "step": 9804 - }, - { - "epoch": 0.7541727559418506, - "learning_rate": 0.0004255270619391575, - "loss": 1.1006, - "step": 9805 - }, - { - "epoch": 0.754249673102069, - "learning_rate": 0.00042527417465133556, - "loss": 1.1542, - "step": 9806 - }, - { - "epoch": 0.7543265902622875, - "learning_rate": 0.00042502135011787445, - "loss": 1.5866, - "step": 9807 - }, - { - "epoch": 0.754403507422506, - "learning_rate": 0.0004247685883535371, - "loss": 1.1532, - "step": 9808 - }, - { - "epoch": 0.7544804245827244, - "learning_rate": 0.0004245158893730825, - "loss": 1.181, - "step": 9809 - }, - { - "epoch": 0.7545573417429429, - "learning_rate": 0.00042426325319126534, - "loss": 1.0649, - "step": 9810 - }, - { - "epoch": 0.7546342589031613, - "learning_rate": 0.00042401067982283903, - "loss": 1.1536, - "step": 9811 - }, - { - "epoch": 0.7547111760633798, - "learning_rate": 0.0004237581692825494, - "loss": 1.0421, - "step": 9812 - }, - { - "epoch": 0.7547880932235982, - "learning_rate": 0.0004235057215851425, - "loss": 1.1205, - "step": 9813 - }, - { - "epoch": 0.7548650103838166, - "learning_rate": 0.0004232533367453584, - "loss": 0.8551, - "step": 9814 - }, - { - "epoch": 0.7549419275440351, - "learning_rate": 0.0004230010147779341, - "loss": 0.9364, - "step": 9815 - }, - { - "epoch": 0.7550188447042535, - "learning_rate": 0.0004227487556976028, - "loss": 1.0157, - "step": 9816 - }, - { - "epoch": 0.7550957618644719, - "learning_rate": 0.0004224965595190946, - "loss": 1.1261, - "step": 9817 - }, - { - "epoch": 0.7551726790246904, - "learning_rate": 0.00042224442625713474, - "loss": 1.1662, - "step": 9818 - }, - { - "epoch": 0.7552495961849088, - "learning_rate": 0.00042199235592644635, - "loss": 1.5206, - "step": 9819 - }, - { - "epoch": 0.7553265133451273, - "learning_rate": 0.00042174034854174775, - "loss": 0.9527, - "step": 9820 - }, - { - "epoch": 0.7554034305053458, - "learning_rate": 0.00042148840411775415, - "loss": 0.7728, - "step": 9821 - }, - { - "epoch": 0.7554803476655642, - "learning_rate": 0.00042123652266917657, - "loss": 0.9655, - "step": 9822 - }, - { - "epoch": 0.7555572648257827, - "learning_rate": 0.0004209847042107221, - "loss": 0.8613, - "step": 9823 - }, - { - "epoch": 0.7556341819860011, - "learning_rate": 0.0004207329487570963, - "loss": 1.3195, - "step": 9824 - }, - { - "epoch": 0.7557110991462195, - "learning_rate": 0.0004204812563229978, - "loss": 1.0556, - "step": 9825 - }, - { - "epoch": 0.755788016306438, - "learning_rate": 0.0004202296269231235, - "loss": 1.2988, - "step": 9826 - }, - { - "epoch": 0.7558649334666564, - "learning_rate": 0.00041997806057216707, - "loss": 1.2244, - "step": 9827 - }, - { - "epoch": 0.7559418506268748, - "learning_rate": 0.0004197265572848172, - "loss": 1.0913, - "step": 9828 - }, - { - "epoch": 0.7560187677870933, - "learning_rate": 0.00041947511707575934, - "loss": 0.9811, - "step": 9829 - }, - { - "epoch": 0.7560956849473117, - "learning_rate": 0.0004192237399596757, - "loss": 0.7383, - "step": 9830 - }, - { - "epoch": 0.7561726021075302, - "learning_rate": 0.00041897242595124366, - "loss": 1.1587, - "step": 9831 - }, - { - "epoch": 0.7562495192677486, - "learning_rate": 0.00041872117506513886, - "loss": 0.8737, - "step": 9832 - }, - { - "epoch": 0.756326436427967, - "learning_rate": 0.00041846998731603197, - "loss": 0.924, - "step": 9833 - }, - { - "epoch": 0.7564033535881856, - "learning_rate": 0.00041821886271858867, - "loss": 1.2262, - "step": 9834 - }, - { - "epoch": 0.756480270748404, - "learning_rate": 0.00041796780128747386, - "loss": 1.234, - "step": 9835 - }, - { - "epoch": 0.7565571879086224, - "learning_rate": 0.00041771680303734685, - "loss": 0.8367, - "step": 9836 - }, - { - "epoch": 0.7566341050688409, - "learning_rate": 0.00041746586798286354, - "loss": 0.9922, - "step": 9837 - }, - { - "epoch": 0.7567110222290593, - "learning_rate": 0.00041721499613867654, - "loss": 1.1963, - "step": 9838 - }, - { - "epoch": 0.7567879393892778, - "learning_rate": 0.00041696418751943366, - "loss": 1.0821, - "step": 9839 - }, - { - "epoch": 0.7568648565494962, - "learning_rate": 0.0004167134421397812, - "loss": 1.1669, - "step": 9840 - }, - { - "epoch": 0.7569417737097146, - "learning_rate": 0.0004164627600143599, - "loss": 1.3612, - "step": 9841 - }, - { - "epoch": 0.7570186908699331, - "learning_rate": 0.00041621214115780724, - "loss": 1.1686, - "step": 9842 - }, - { - "epoch": 0.7570956080301515, - "learning_rate": 0.0004159615855847571, - "loss": 0.9845, - "step": 9843 - }, - { - "epoch": 0.7571725251903699, - "learning_rate": 0.00041571109330983976, - "loss": 1.3389, - "step": 9844 - }, - { - "epoch": 0.7572494423505884, - "learning_rate": 0.0004154606643476811, - "loss": 1.1986, - "step": 9845 - }, - { - "epoch": 0.7573263595108068, - "learning_rate": 0.0004152102987129057, - "loss": 1.526, - "step": 9846 - }, - { - "epoch": 0.7574032766710254, - "learning_rate": 0.00041495999642013044, - "loss": 0.8463, - "step": 9847 - }, - { - "epoch": 0.7574801938312438, - "learning_rate": 0.0004147097574839723, - "loss": 1.1937, - "step": 9848 - }, - { - "epoch": 0.7575571109914622, - "learning_rate": 0.00041445958191904236, - "loss": 1.1596, - "step": 9849 - }, - { - "epoch": 0.7576340281516807, - "learning_rate": 0.0004142094697399489, - "loss": 0.7386, - "step": 9850 - }, - { - "epoch": 0.7577109453118991, - "learning_rate": 0.00041395942096129613, - "loss": 1.2507, - "step": 9851 - }, - { - "epoch": 0.7577878624721175, - "learning_rate": 0.00041370943559768445, - "loss": 1.2967, - "step": 9852 - }, - { - "epoch": 0.757864779632336, - "learning_rate": 0.0004134595136637106, - "loss": 1.1242, - "step": 9853 - }, - { - "epoch": 0.7579416967925544, - "learning_rate": 0.00041320965517396846, - "loss": 1.9654, - "step": 9854 - }, - { - "epoch": 0.7580186139527728, - "learning_rate": 0.00041295986014304773, - "loss": 1.4288, - "step": 9855 - }, - { - "epoch": 0.7580955311129913, - "learning_rate": 0.0004127101285855326, - "loss": 1.4664, - "step": 9856 - }, - { - "epoch": 0.7581724482732097, - "learning_rate": 0.00041246046051600675, - "loss": 1.5966, - "step": 9857 - }, - { - "epoch": 0.7582493654334282, - "learning_rate": 0.00041221085594904763, - "loss": 1.1626, - "step": 9858 - }, - { - "epoch": 0.7583262825936467, - "learning_rate": 0.00041196131489923117, - "loss": 0.9965, - "step": 9859 - }, - { - "epoch": 0.758403199753865, - "learning_rate": 0.00041171183738112694, - "loss": 1.3445, - "step": 9860 - }, - { - "epoch": 0.7584801169140836, - "learning_rate": 0.0004114624234093023, - "loss": 1.1673, - "step": 9861 - }, - { - "epoch": 0.758557034074302, - "learning_rate": 0.00041121307299832175, - "loss": 1.1812, - "step": 9862 - }, - { - "epoch": 0.7586339512345204, - "learning_rate": 0.0004109637861627444, - "loss": 1.6752, - "step": 9863 - }, - { - "epoch": 0.7587108683947389, - "learning_rate": 0.0004107145629171266, - "loss": 1.0776, - "step": 9864 - }, - { - "epoch": 0.7587877855549573, - "learning_rate": 0.00041046540327602075, - "loss": 1.1185, - "step": 9865 - }, - { - "epoch": 0.7588647027151758, - "learning_rate": 0.00041021630725397495, - "loss": 0.8098, - "step": 9866 - }, - { - "epoch": 0.7589416198753942, - "learning_rate": 0.00040996727486553535, - "loss": 1.1833, - "step": 9867 - }, - { - "epoch": 0.7590185370356126, - "learning_rate": 0.0004097183061252429, - "loss": 1.0936, - "step": 9868 - }, - { - "epoch": 0.7590954541958311, - "learning_rate": 0.000409469401047634, - "loss": 1.0916, - "step": 9869 - }, - { - "epoch": 0.7591723713560495, - "learning_rate": 0.0004092205596472436, - "loss": 1.0741, - "step": 9870 - }, - { - "epoch": 0.7592492885162679, - "learning_rate": 0.0004089717819386016, - "loss": 1.1404, - "step": 9871 - }, - { - "epoch": 0.7593262056764865, - "learning_rate": 0.00040872306793623416, - "loss": 1.2892, - "step": 9872 - }, - { - "epoch": 0.7594031228367049, - "learning_rate": 0.0004084744176546642, - "loss": 1.1543, - "step": 9873 - }, - { - "epoch": 0.7594800399969233, - "learning_rate": 0.0004082258311084106, - "loss": 1.1737, - "step": 9874 - }, - { - "epoch": 0.7595569571571418, - "learning_rate": 0.0004079773083119881, - "loss": 1.3703, - "step": 9875 - }, - { - "epoch": 0.7596338743173602, - "learning_rate": 0.00040772884927990904, - "loss": 1.1074, - "step": 9876 - }, - { - "epoch": 0.7597107914775787, - "learning_rate": 0.0004074804540266807, - "loss": 0.8953, - "step": 9877 - }, - { - "epoch": 0.7597877086377971, - "learning_rate": 0.0004072321225668075, - "loss": 1.2146, - "step": 9878 - }, - { - "epoch": 0.7598646257980155, - "learning_rate": 0.0004069838549147892, - "loss": 0.986, - "step": 9879 - }, - { - "epoch": 0.759941542958234, - "learning_rate": 0.0004067356510851226, - "loss": 1.4233, - "step": 9880 - }, - { - "epoch": 0.7600184601184524, - "learning_rate": 0.0004064875110923014, - "loss": 0.9556, - "step": 9881 - }, - { - "epoch": 0.7600953772786708, - "learning_rate": 0.00040623943495081356, - "loss": 1.2152, - "step": 9882 - }, - { - "epoch": 0.7601722944388893, - "learning_rate": 0.0004059914226751447, - "loss": 1.191, - "step": 9883 - }, - { - "epoch": 0.7602492115991077, - "learning_rate": 0.00040574347427977726, - "loss": 0.8316, - "step": 9884 - }, - { - "epoch": 0.7603261287593263, - "learning_rate": 0.0004054955897791888, - "loss": 1.2691, - "step": 9885 - }, - { - "epoch": 0.7604030459195447, - "learning_rate": 0.00040524776918785335, - "loss": 1.0548, - "step": 9886 - }, - { - "epoch": 0.7604799630797631, - "learning_rate": 0.0004050000125202417, - "loss": 0.7575, - "step": 9887 - }, - { - "epoch": 0.7605568802399816, - "learning_rate": 0.00040475231979082, - "loss": 0.9693, - "step": 9888 - }, - { - "epoch": 0.7606337974002, - "learning_rate": 0.00040450469101405227, - "loss": 1.1548, - "step": 9889 - }, - { - "epoch": 0.7607107145604184, - "learning_rate": 0.00040425712620439767, - "loss": 1.2662, - "step": 9890 - }, - { - "epoch": 0.7607876317206369, - "learning_rate": 0.0004040096253763106, - "loss": 1.2688, - "step": 9891 - }, - { - "epoch": 0.7608645488808553, - "learning_rate": 0.00040376218854424424, - "loss": 1.1372, - "step": 9892 - }, - { - "epoch": 0.7609414660410737, - "learning_rate": 0.0004035148157226463, - "loss": 0.8423, - "step": 9893 - }, - { - "epoch": 0.7610183832012922, - "learning_rate": 0.00040326750692596064, - "loss": 1.0708, - "step": 9894 - }, - { - "epoch": 0.7610953003615106, - "learning_rate": 0.0004030202621686285, - "loss": 1.1025, - "step": 9895 - }, - { - "epoch": 0.7611722175217291, - "learning_rate": 0.000402773081465086, - "loss": 1.1715, - "step": 9896 - }, - { - "epoch": 0.7612491346819475, - "learning_rate": 0.00040252596482976706, - "loss": 1.245, - "step": 9897 - }, - { - "epoch": 0.761326051842166, - "learning_rate": 0.000402278912277101, - "loss": 1.3009, - "step": 9898 - }, - { - "epoch": 0.7614029690023845, - "learning_rate": 0.00040203192382151317, - "loss": 1.3432, - "step": 9899 - }, - { - "epoch": 0.7614798861626029, - "learning_rate": 0.00040178499947742566, - "loss": 1.1632, - "step": 9900 - }, - { - "epoch": 0.7615568033228213, - "learning_rate": 0.00040153813925925653, - "loss": 1.2224, - "step": 9901 - }, - { - "epoch": 0.7616337204830398, - "learning_rate": 0.0004012913431814196, - "loss": 1.1971, - "step": 9902 - }, - { - "epoch": 0.7617106376432582, - "learning_rate": 0.0004010446112583274, - "loss": 1.3722, - "step": 9903 - }, - { - "epoch": 0.7617875548034767, - "learning_rate": 0.00040079794350438453, - "loss": 0.9624, - "step": 9904 - }, - { - "epoch": 0.7618644719636951, - "learning_rate": 0.0004005513399339958, - "loss": 1.1057, - "step": 9905 - }, - { - "epoch": 0.7619413891239135, - "learning_rate": 0.00040030480056156017, - "loss": 1.4653, - "step": 9906 - }, - { - "epoch": 0.762018306284132, - "learning_rate": 0.0004000583254014733, - "loss": 1.0507, - "step": 9907 - }, - { - "epoch": 0.7620952234443504, - "learning_rate": 0.0003998119144681272, - "loss": 1.3143, - "step": 9908 - }, - { - "epoch": 0.7621721406045688, - "learning_rate": 0.00039956556777591, - "loss": 1.2349, - "step": 9909 - }, - { - "epoch": 0.7622490577647874, - "learning_rate": 0.0003993192853392056, - "loss": 1.2431, - "step": 9910 - }, - { - "epoch": 0.7623259749250058, - "learning_rate": 0.00039907306717239574, - "loss": 1.3292, - "step": 9911 - }, - { - "epoch": 0.7624028920852242, - "learning_rate": 0.0003988269132898573, - "loss": 1.0823, - "step": 9912 - }, - { - "epoch": 0.7624798092454427, - "learning_rate": 0.00039858082370596205, - "loss": 1.2541, - "step": 9913 - }, - { - "epoch": 0.7625567264056611, - "learning_rate": 0.00039833479843508093, - "loss": 0.9036, - "step": 9914 - }, - { - "epoch": 0.7626336435658796, - "learning_rate": 0.00039808883749157853, - "loss": 1.2089, - "step": 9915 - }, - { - "epoch": 0.762710560726098, - "learning_rate": 0.00039784294088981824, - "loss": 1.0709, - "step": 9916 - }, - { - "epoch": 0.7627874778863164, - "learning_rate": 0.0003975971086441568, - "loss": 1.2823, - "step": 9917 - }, - { - "epoch": 0.7628643950465349, - "learning_rate": 0.0003973513407689486, - "loss": 1.1153, - "step": 9918 - }, - { - "epoch": 0.7629413122067533, - "learning_rate": 0.00039710563727854523, - "loss": 1.0983, - "step": 9919 - }, - { - "epoch": 0.7630182293669717, - "learning_rate": 0.0003968599981872932, - "loss": 1.4162, - "step": 9920 - }, - { - "epoch": 0.7630951465271902, - "learning_rate": 0.0003966144235095354, - "loss": 1.1128, - "step": 9921 - }, - { - "epoch": 0.7631720636874086, - "learning_rate": 0.00039636891325961146, - "loss": 1.2088, - "step": 9922 - }, - { - "epoch": 0.7632489808476272, - "learning_rate": 0.00039612346745185626, - "loss": 1.3528, - "step": 9923 - }, - { - "epoch": 0.7633258980078456, - "learning_rate": 0.0003958780861006028, - "loss": 0.713, - "step": 9924 - }, - { - "epoch": 0.763402815168064, - "learning_rate": 0.0003956327692201789, - "loss": 1.2216, - "step": 9925 - }, - { - "epoch": 0.7634797323282825, - "learning_rate": 0.0003953875168249078, - "loss": 0.7214, - "step": 9926 - }, - { - "epoch": 0.7635566494885009, - "learning_rate": 0.00039514232892911113, - "loss": 0.9942, - "step": 9927 - }, - { - "epoch": 0.7636335666487193, - "learning_rate": 0.0003948972055471054, - "loss": 0.9765, - "step": 9928 - }, - { - "epoch": 0.7637104838089378, - "learning_rate": 0.00039465214669320354, - "loss": 1.1006, - "step": 9929 - }, - { - "epoch": 0.7637874009691562, - "learning_rate": 0.00039440715238171477, - "loss": 1.1188, - "step": 9930 - }, - { - "epoch": 0.7638643181293746, - "learning_rate": 0.00039416222262694464, - "loss": 1.2701, - "step": 9931 - }, - { - "epoch": 0.7639412352895931, - "learning_rate": 0.0003939173574431943, - "loss": 1.4445, - "step": 9932 - }, - { - "epoch": 0.7640181524498115, - "learning_rate": 0.00039367255684476265, - "loss": 1.0762, - "step": 9933 - }, - { - "epoch": 0.76409506961003, - "learning_rate": 0.0003934278208459435, - "loss": 1.1465, - "step": 9934 - }, - { - "epoch": 0.7641719867702484, - "learning_rate": 0.000393183149461027, - "loss": 0.9773, - "step": 9935 - }, - { - "epoch": 0.7642489039304668, - "learning_rate": 0.0003929385427043, - "loss": 1.2645, - "step": 9936 - }, - { - "epoch": 0.7643258210906854, - "learning_rate": 0.00039269400059004474, - "loss": 1.2645, - "step": 9937 - }, - { - "epoch": 0.7644027382509038, - "learning_rate": 0.0003924495231325416, - "loss": 0.9997, - "step": 9938 - }, - { - "epoch": 0.7644796554111222, - "learning_rate": 0.00039220511034606477, - "loss": 1.63, - "step": 9939 - }, - { - "epoch": 0.7645565725713407, - "learning_rate": 0.0003919607622448856, - "loss": 1.1389, - "step": 9940 - }, - { - "epoch": 0.7646334897315591, - "learning_rate": 0.0003917164788432726, - "loss": 1.1261, - "step": 9941 - }, - { - "epoch": 0.7647104068917776, - "learning_rate": 0.0003914722601554894, - "loss": 1.0346, - "step": 9942 - }, - { - "epoch": 0.764787324051996, - "learning_rate": 0.0003912281061957965, - "loss": 1.2263, - "step": 9943 - }, - { - "epoch": 0.7648642412122144, - "learning_rate": 0.0003909840169784498, - "loss": 1.0811, - "step": 9944 - }, - { - "epoch": 0.7649411583724329, - "learning_rate": 0.00039073999251770173, - "loss": 1.1438, - "step": 9945 - }, - { - "epoch": 0.7650180755326513, - "learning_rate": 0.0003904960328278019, - "loss": 1.187, - "step": 9946 - }, - { - "epoch": 0.7650949926928697, - "learning_rate": 0.00039025213792299513, - "loss": 1.1532, - "step": 9947 - }, - { - "epoch": 0.7651719098530883, - "learning_rate": 0.0003900083078175226, - "loss": 1.0514, - "step": 9948 - }, - { - "epoch": 0.7652488270133067, - "learning_rate": 0.0003897645425256216, - "loss": 0.8389, - "step": 9949 - }, - { - "epoch": 0.7653257441735252, - "learning_rate": 0.00038952084206152573, - "loss": 1.209, - "step": 9950 - }, - { - "epoch": 0.7654026613337436, - "learning_rate": 0.0003892772064394657, - "loss": 0.9865, - "step": 9951 - }, - { - "epoch": 0.765479578493962, - "learning_rate": 0.00038903363567366746, - "loss": 1.1961, - "step": 9952 - }, - { - "epoch": 0.7655564956541805, - "learning_rate": 0.0003887901297783523, - "loss": 1.4159, - "step": 9953 - }, - { - "epoch": 0.7656334128143989, - "learning_rate": 0.0003885466887677398, - "loss": 1.653, - "step": 9954 - }, - { - "epoch": 0.7657103299746173, - "learning_rate": 0.00038830331265604466, - "loss": 0.9447, - "step": 9955 - }, - { - "epoch": 0.7657872471348358, - "learning_rate": 0.0003880600014574776, - "loss": 1.2132, - "step": 9956 - }, - { - "epoch": 0.7658641642950542, - "learning_rate": 0.00038781675518624587, - "loss": 1.2148, - "step": 9957 - }, - { - "epoch": 0.7659410814552726, - "learning_rate": 0.00038757357385655293, - "loss": 1.2414, - "step": 9958 - }, - { - "epoch": 0.7660179986154911, - "learning_rate": 0.00038733045748259783, - "loss": 1.5285, - "step": 9959 - }, - { - "epoch": 0.7660949157757095, - "learning_rate": 0.00038708740607857744, - "loss": 1.4117, - "step": 9960 - }, - { - "epoch": 0.766171832935928, - "learning_rate": 0.00038684441965868336, - "loss": 1.2747, - "step": 9961 - }, - { - "epoch": 0.7662487500961465, - "learning_rate": 0.00038660149823710346, - "loss": 0.9704, - "step": 9962 - }, - { - "epoch": 0.7663256672563649, - "learning_rate": 0.0003863586418280226, - "loss": 1.1634, - "step": 9963 - }, - { - "epoch": 0.7664025844165834, - "learning_rate": 0.0003861158504456208, - "loss": 0.9502, - "step": 9964 - }, - { - "epoch": 0.7664795015768018, - "learning_rate": 0.0003858731241040762, - "loss": 1.151, - "step": 9965 - }, - { - "epoch": 0.7665564187370202, - "learning_rate": 0.0003856304628175606, - "loss": 1.2631, - "step": 9966 - }, - { - "epoch": 0.7666333358972387, - "learning_rate": 0.0003853878666002431, - "loss": 1.0425, - "step": 9967 - }, - { - "epoch": 0.7667102530574571, - "learning_rate": 0.00038514533546629026, - "loss": 0.8835, - "step": 9968 - }, - { - "epoch": 0.7667871702176756, - "learning_rate": 0.00038490286942986313, - "loss": 1.1304, - "step": 9969 - }, - { - "epoch": 0.766864087377894, - "learning_rate": 0.00038466046850511957, - "loss": 1.0838, - "step": 9970 - }, - { - "epoch": 0.7669410045381124, - "learning_rate": 0.00038441813270621367, - "loss": 0.9887, - "step": 9971 - }, - { - "epoch": 0.7670179216983309, - "learning_rate": 0.00038417586204729504, - "loss": 1.152, - "step": 9972 - }, - { - "epoch": 0.7670948388585493, - "learning_rate": 0.0003839336565425113, - "loss": 1.3359, - "step": 9973 - }, - { - "epoch": 0.7671717560187677, - "learning_rate": 0.0003836915162060049, - "loss": 1.1356, - "step": 9974 - }, - { - "epoch": 0.7672486731789863, - "learning_rate": 0.0003834494410519134, - "loss": 0.9708, - "step": 9975 - }, - { - "epoch": 0.7673255903392047, - "learning_rate": 0.00038320743109437314, - "loss": 1.2937, - "step": 9976 - }, - { - "epoch": 0.7674025074994231, - "learning_rate": 0.00038296548634751465, - "loss": 1.3575, - "step": 9977 - }, - { - "epoch": 0.7674794246596416, - "learning_rate": 0.00038272360682546576, - "loss": 1.1872, - "step": 9978 - }, - { - "epoch": 0.76755634181986, - "learning_rate": 0.0003824817925423498, - "loss": 1.1093, - "step": 9979 - }, - { - "epoch": 0.7676332589800785, - "learning_rate": 0.0003822400435122863, - "loss": 1.3454, - "step": 9980 - }, - { - "epoch": 0.7677101761402969, - "learning_rate": 0.0003819983597493919, - "loss": 1.2304, - "step": 9981 - }, - { - "epoch": 0.7677870933005153, - "learning_rate": 0.0003817567412677784, - "loss": 1.2618, - "step": 9982 - }, - { - "epoch": 0.7678640104607338, - "learning_rate": 0.0003815151880815542, - "loss": 0.9594, - "step": 9983 - }, - { - "epoch": 0.7679409276209522, - "learning_rate": 0.0003812737002048238, - "loss": 1.2799, - "step": 9984 - }, - { - "epoch": 0.7680178447811706, - "learning_rate": 0.0003810322776516881, - "loss": 1.0407, - "step": 9985 - }, - { - "epoch": 0.7680947619413891, - "learning_rate": 0.0003807909204362432, - "loss": 1.2365, - "step": 9986 - }, - { - "epoch": 0.7681716791016076, - "learning_rate": 0.0003805496285725839, - "loss": 1.1432, - "step": 9987 - }, - { - "epoch": 0.7682485962618261, - "learning_rate": 0.00038030840207479766, - "loss": 1.511, - "step": 9988 - }, - { - "epoch": 0.7683255134220445, - "learning_rate": 0.00038006724095697106, - "loss": 1.3152, - "step": 9989 - }, - { - "epoch": 0.7684024305822629, - "learning_rate": 0.00037982614523318544, - "loss": 1.0888, - "step": 9990 - }, - { - "epoch": 0.7684793477424814, - "learning_rate": 0.0003795851149175186, - "loss": 1.0369, - "step": 9991 - }, - { - "epoch": 0.7685562649026998, - "learning_rate": 0.00037934415002404456, - "loss": 1.7193, - "step": 9992 - }, - { - "epoch": 0.7686331820629182, - "learning_rate": 0.00037910325056683344, - "loss": 0.9679, - "step": 9993 - }, - { - "epoch": 0.7687100992231367, - "learning_rate": 0.0003788624165599513, - "loss": 0.9817, - "step": 9994 - }, - { - "epoch": 0.7687870163833551, - "learning_rate": 0.00037862164801746157, - "loss": 1.168, - "step": 9995 - }, - { - "epoch": 0.7688639335435735, - "learning_rate": 0.000378380944953423, - "loss": 1.3463, - "step": 9996 - }, - { - "epoch": 0.768940850703792, - "learning_rate": 0.00037814030738188905, - "loss": 1.5224, - "step": 9997 - }, - { - "epoch": 0.7690177678640104, - "learning_rate": 0.0003778997353169121, - "loss": 1.3013, - "step": 9998 - }, - { - "epoch": 0.769094685024229, - "learning_rate": 0.00037765922877253853, - "loss": 1.2762, - "step": 9999 - }, - { - "epoch": 0.7691716021844474, - "learning_rate": 0.00037741878776281323, - "loss": 1.1189, - "step": 10000 - }, - { - "epoch": 0.7692485193446658, - "learning_rate": 0.0003771784123017744, - "loss": 0.9666, - "step": 10001 - }, - { - "epoch": 0.7693254365048843, - "learning_rate": 0.00037693810240345784, - "loss": 1.1382, - "step": 10002 - }, - { - "epoch": 0.7694023536651027, - "learning_rate": 0.00037669785808189645, - "loss": 1.1876, - "step": 10003 - }, - { - "epoch": 0.7694792708253211, - "learning_rate": 0.00037645767935111753, - "loss": 1.2285, - "step": 10004 - }, - { - "epoch": 0.7695561879855396, - "learning_rate": 0.00037621756622514585, - "loss": 1.4077, - "step": 10005 - }, - { - "epoch": 0.769633105145758, - "learning_rate": 0.00037597751871800154, - "loss": 0.8036, - "step": 10006 - }, - { - "epoch": 0.7697100223059765, - "learning_rate": 0.0003757375368437011, - "loss": 1.0401, - "step": 10007 - }, - { - "epoch": 0.7697869394661949, - "learning_rate": 0.00037549762061625774, - "loss": 1.4036, - "step": 10008 - }, - { - "epoch": 0.7698638566264133, - "learning_rate": 0.0003752577700496808, - "loss": 1.0638, - "step": 10009 - }, - { - "epoch": 0.7699407737866318, - "learning_rate": 0.0003750179851579741, - "loss": 1.105, - "step": 10010 - }, - { - "epoch": 0.7700176909468502, - "learning_rate": 0.00037477826595513984, - "loss": 1.0871, - "step": 10011 - }, - { - "epoch": 0.7700946081070686, - "learning_rate": 0.00037453861245517544, - "loss": 0.9914, - "step": 10012 - }, - { - "epoch": 0.7701715252672872, - "learning_rate": 0.00037429902467207447, - "loss": 0.9106, - "step": 10013 - }, - { - "epoch": 0.7702484424275056, - "learning_rate": 0.00037405950261982645, - "loss": 1.1255, - "step": 10014 - }, - { - "epoch": 0.770325359587724, - "learning_rate": 0.0003738200463124177, - "loss": 1.3214, - "step": 10015 - }, - { - "epoch": 0.7704022767479425, - "learning_rate": 0.0003735806557638296, - "loss": 1.2187, - "step": 10016 - }, - { - "epoch": 0.7704791939081609, - "learning_rate": 0.00037334133098804124, - "loss": 1.4611, - "step": 10017 - }, - { - "epoch": 0.7705561110683794, - "learning_rate": 0.0003731020719990269, - "loss": 1.0869, - "step": 10018 - }, - { - "epoch": 0.7706330282285978, - "learning_rate": 0.0003728628788107569, - "loss": 1.4927, - "step": 10019 - }, - { - "epoch": 0.7707099453888162, - "learning_rate": 0.0003726237514371983, - "loss": 1.0439, - "step": 10020 - }, - { - "epoch": 0.7707868625490347, - "learning_rate": 0.00037238468989231316, - "loss": 0.9544, - "step": 10021 - }, - { - "epoch": 0.7708637797092531, - "learning_rate": 0.00037214569419006204, - "loss": 1.4451, - "step": 10022 - }, - { - "epoch": 0.7709406968694715, - "learning_rate": 0.0003719067643443988, - "loss": 1.0804, - "step": 10023 - }, - { - "epoch": 0.77101761402969, - "learning_rate": 0.00037166790036927475, - "loss": 1.1747, - "step": 10024 - }, - { - "epoch": 0.7710945311899084, - "learning_rate": 0.0003714291022786385, - "loss": 1.2461, - "step": 10025 - }, - { - "epoch": 0.771171448350127, - "learning_rate": 0.00037119037008643305, - "loss": 1.1186, - "step": 10026 - }, - { - "epoch": 0.7712483655103454, - "learning_rate": 0.0003709517038065984, - "loss": 0.9967, - "step": 10027 - }, - { - "epoch": 0.7713252826705638, - "learning_rate": 0.00037071310345307044, - "loss": 1.1569, - "step": 10028 - }, - { - "epoch": 0.7714021998307823, - "learning_rate": 0.00037047456903978066, - "loss": 1.2187, - "step": 10029 - }, - { - "epoch": 0.7714791169910007, - "learning_rate": 0.0003702361005806585, - "loss": 1.0911, - "step": 10030 - }, - { - "epoch": 0.7715560341512191, - "learning_rate": 0.0003699976980896284, - "loss": 1.1432, - "step": 10031 - }, - { - "epoch": 0.7716329513114376, - "learning_rate": 0.0003697593615806093, - "loss": 1.4127, - "step": 10032 - }, - { - "epoch": 0.771709868471656, - "learning_rate": 0.00036952109106751956, - "loss": 0.9756, - "step": 10033 - }, - { - "epoch": 0.7717867856318744, - "learning_rate": 0.00036928288656427145, - "loss": 0.9238, - "step": 10034 - }, - { - "epoch": 0.7718637027920929, - "learning_rate": 0.00036904474808477394, - "loss": 1.3014, - "step": 10035 - }, - { - "epoch": 0.7719406199523113, - "learning_rate": 0.0003688066756429323, - "loss": 0.9536, - "step": 10036 - }, - { - "epoch": 0.7720175371125299, - "learning_rate": 0.0003685686692526472, - "loss": 1.2614, - "step": 10037 - }, - { - "epoch": 0.7720944542727483, - "learning_rate": 0.00036833072892781724, - "loss": 1.1682, - "step": 10038 - }, - { - "epoch": 0.7721713714329667, - "learning_rate": 0.00036809285468233536, - "loss": 1.233, - "step": 10039 - }, - { - "epoch": 0.7722482885931852, - "learning_rate": 0.0003678550465300913, - "loss": 1.2303, - "step": 10040 - }, - { - "epoch": 0.7723252057534036, - "learning_rate": 0.0003676173044849711, - "loss": 1.0965, - "step": 10041 - }, - { - "epoch": 0.772402122913622, - "learning_rate": 0.00036737962856085645, - "loss": 1.1599, - "step": 10042 - }, - { - "epoch": 0.7724790400738405, - "learning_rate": 0.00036714201877162536, - "loss": 0.7442, - "step": 10043 - }, - { - "epoch": 0.7725559572340589, - "learning_rate": 0.00036690447513115337, - "loss": 1.0247, - "step": 10044 - }, - { - "epoch": 0.7726328743942774, - "learning_rate": 0.00036666699765330895, - "loss": 1.0928, - "step": 10045 - }, - { - "epoch": 0.7727097915544958, - "learning_rate": 0.0003664295863519602, - "loss": 1.065, - "step": 10046 - }, - { - "epoch": 0.7727867087147142, - "learning_rate": 0.00036619224124096925, - "loss": 1.0316, - "step": 10047 - }, - { - "epoch": 0.7728636258749327, - "learning_rate": 0.0003659549623341949, - "loss": 1.351, - "step": 10048 - }, - { - "epoch": 0.7729405430351511, - "learning_rate": 0.00036571774964549227, - "loss": 1.4493, - "step": 10049 - }, - { - "epoch": 0.7730174601953695, - "learning_rate": 0.00036548060318871225, - "loss": 1.2986, - "step": 10050 - }, - { - "epoch": 0.7730943773555881, - "learning_rate": 0.00036524352297770174, - "loss": 1.2326, - "step": 10051 - }, - { - "epoch": 0.7731712945158065, - "learning_rate": 0.00036500650902630507, - "loss": 1.1512, - "step": 10052 - }, - { - "epoch": 0.7732482116760249, - "learning_rate": 0.0003647695613483616, - "loss": 1.2492, - "step": 10053 - }, - { - "epoch": 0.7733251288362434, - "learning_rate": 0.00036453267995770563, - "loss": 1.3303, - "step": 10054 - }, - { - "epoch": 0.7734020459964618, - "learning_rate": 0.0003642958648681704, - "loss": 1.3181, - "step": 10055 - }, - { - "epoch": 0.7734789631566803, - "learning_rate": 0.00036405911609358274, - "loss": 1.6058, - "step": 10056 - }, - { - "epoch": 0.7735558803168987, - "learning_rate": 0.00036382243364776784, - "loss": 1.1234, - "step": 10057 - }, - { - "epoch": 0.7736327974771171, - "learning_rate": 0.0003635858175445449, - "loss": 0.9767, - "step": 10058 - }, - { - "epoch": 0.7737097146373356, - "learning_rate": 0.00036334926779772996, - "loss": 1.3137, - "step": 10059 - }, - { - "epoch": 0.773786631797554, - "learning_rate": 0.0003631127844211362, - "loss": 1.3275, - "step": 10060 - }, - { - "epoch": 0.7738635489577724, - "learning_rate": 0.0003628763674285719, - "loss": 1.0674, - "step": 10061 - }, - { - "epoch": 0.7739404661179909, - "learning_rate": 0.00036264001683384146, - "loss": 1.3303, - "step": 10062 - }, - { - "epoch": 0.7740173832782093, - "learning_rate": 0.00036240373265074586, - "loss": 1.3485, - "step": 10063 - }, - { - "epoch": 0.7740943004384279, - "learning_rate": 0.00036216751489308145, - "loss": 1.1655, - "step": 10064 - }, - { - "epoch": 0.7741712175986463, - "learning_rate": 0.00036193136357464203, - "loss": 1.159, - "step": 10065 - }, - { - "epoch": 0.7742481347588647, - "learning_rate": 0.0003616952787092168, - "loss": 1.4024, - "step": 10066 - }, - { - "epoch": 0.7743250519190832, - "learning_rate": 0.0003614592603105898, - "loss": 0.7212, - "step": 10067 - }, - { - "epoch": 0.7744019690793016, - "learning_rate": 0.0003612233083925436, - "loss": 1.4691, - "step": 10068 - }, - { - "epoch": 0.77447888623952, - "learning_rate": 0.000360987422968855, - "loss": 0.9413, - "step": 10069 - }, - { - "epoch": 0.7745558033997385, - "learning_rate": 0.0003607516040532981, - "loss": 1.1579, - "step": 10070 - }, - { - "epoch": 0.7746327205599569, - "learning_rate": 0.00036051585165964217, - "loss": 1.2264, - "step": 10071 - }, - { - "epoch": 0.7747096377201754, - "learning_rate": 0.0003602801658016527, - "loss": 1.645, - "step": 10072 - }, - { - "epoch": 0.7747865548803938, - "learning_rate": 0.00036004454649309276, - "loss": 1.1012, - "step": 10073 - }, - { - "epoch": 0.7748634720406122, - "learning_rate": 0.00035980899374771957, - "loss": 1.0654, - "step": 10074 - }, - { - "epoch": 0.7749403892008307, - "learning_rate": 0.0003595735075792877, - "loss": 1.3011, - "step": 10075 - }, - { - "epoch": 0.7750173063610492, - "learning_rate": 0.00035933808800154725, - "loss": 0.8951, - "step": 10076 - }, - { - "epoch": 0.7750942235212676, - "learning_rate": 0.00035910273502824447, - "loss": 1.1847, - "step": 10077 - }, - { - "epoch": 0.7751711406814861, - "learning_rate": 0.0003588674486731217, - "loss": 1.1407, - "step": 10078 - }, - { - "epoch": 0.7752480578417045, - "learning_rate": 0.00035863222894991865, - "loss": 0.9388, - "step": 10079 - }, - { - "epoch": 0.7753249750019229, - "learning_rate": 0.0003583970758723688, - "loss": 1.1442, - "step": 10080 - }, - { - "epoch": 0.7754018921621414, - "learning_rate": 0.00035816198945420316, - "loss": 1.0684, - "step": 10081 - }, - { - "epoch": 0.7754788093223598, - "learning_rate": 0.0003579269697091492, - "loss": 1.2297, - "step": 10082 - }, - { - "epoch": 0.7755557264825783, - "learning_rate": 0.00035769201665092977, - "loss": 0.8241, - "step": 10083 - }, - { - "epoch": 0.7756326436427967, - "learning_rate": 0.0003574571302932641, - "loss": 1.2009, - "step": 10084 - }, - { - "epoch": 0.7757095608030151, - "learning_rate": 0.0003572223106498672, - "loss": 1.2966, - "step": 10085 - }, - { - "epoch": 0.7757864779632336, - "learning_rate": 0.0003569875577344501, - "loss": 1.3863, - "step": 10086 - }, - { - "epoch": 0.775863395123452, - "learning_rate": 0.0003567528715607212, - "loss": 1.1067, - "step": 10087 - }, - { - "epoch": 0.7759403122836704, - "learning_rate": 0.0003565182521423837, - "loss": 1.3285, - "step": 10088 - }, - { - "epoch": 0.776017229443889, - "learning_rate": 0.00035628369949313696, - "loss": 1.2468, - "step": 10089 - }, - { - "epoch": 0.7760941466041074, - "learning_rate": 0.00035604921362667716, - "loss": 0.8892, - "step": 10090 - }, - { - "epoch": 0.7761710637643259, - "learning_rate": 0.0003558147945566954, - "loss": 1.2645, - "step": 10091 - }, - { - "epoch": 0.7762479809245443, - "learning_rate": 0.0003555804422968811, - "loss": 1.2205, - "step": 10092 - }, - { - "epoch": 0.7763248980847627, - "learning_rate": 0.00035534615686091703, - "loss": 1.3176, - "step": 10093 - }, - { - "epoch": 0.7764018152449812, - "learning_rate": 0.00035511193826248346, - "loss": 0.812, - "step": 10094 - }, - { - "epoch": 0.7764787324051996, - "learning_rate": 0.00035487778651525724, - "loss": 1.1748, - "step": 10095 - }, - { - "epoch": 0.776555649565418, - "learning_rate": 0.0003546437016329107, - "loss": 1.0933, - "step": 10096 - }, - { - "epoch": 0.7766325667256365, - "learning_rate": 0.000354409683629112, - "loss": 1.075, - "step": 10097 - }, - { - "epoch": 0.7767094838858549, - "learning_rate": 0.0003541757325175258, - "loss": 1.2219, - "step": 10098 - }, - { - "epoch": 0.7767864010460733, - "learning_rate": 0.00035394184831181254, - "loss": 1.5471, - "step": 10099 - }, - { - "epoch": 0.7768633182062918, - "learning_rate": 0.0003537080310256289, - "loss": 1.1618, - "step": 10100 - }, - { - "epoch": 0.7769402353665102, - "learning_rate": 0.00035347428067262837, - "loss": 1.2653, - "step": 10101 - }, - { - "epoch": 0.7770171525267288, - "learning_rate": 0.0003532405972664595, - "loss": 1.2221, - "step": 10102 - }, - { - "epoch": 0.7770940696869472, - "learning_rate": 0.0003530069808207672, - "loss": 0.8411, - "step": 10103 - }, - { - "epoch": 0.7771709868471656, - "learning_rate": 0.0003527734313491927, - "loss": 1.1979, - "step": 10104 - }, - { - "epoch": 0.7772479040073841, - "learning_rate": 0.00035253994886537307, - "loss": 1.4187, - "step": 10105 - }, - { - "epoch": 0.7773248211676025, - "learning_rate": 0.00035230653338294186, - "loss": 1.0576, - "step": 10106 - }, - { - "epoch": 0.7774017383278209, - "learning_rate": 0.00035207318491552815, - "loss": 1.108, - "step": 10107 - }, - { - "epoch": 0.7774786554880394, - "learning_rate": 0.0003518399034767571, - "loss": 0.9282, - "step": 10108 - }, - { - "epoch": 0.7775555726482578, - "learning_rate": 0.0003516066890802511, - "loss": 1.2714, - "step": 10109 - }, - { - "epoch": 0.7776324898084763, - "learning_rate": 0.0003513735417396273, - "loss": 1.3496, - "step": 10110 - }, - { - "epoch": 0.7777094069686947, - "learning_rate": 0.0003511404614684995, - "loss": 1.0805, - "step": 10111 - }, - { - "epoch": 0.7777863241289131, - "learning_rate": 0.00035090744828047754, - "loss": 0.7998, - "step": 10112 - }, - { - "epoch": 0.7778632412891316, - "learning_rate": 0.0003506745021891668, - "loss": 1.0188, - "step": 10113 - }, - { - "epoch": 0.77794015844935, - "learning_rate": 0.00035044162320817, - "loss": 0.9875, - "step": 10114 - }, - { - "epoch": 0.7780170756095685, - "learning_rate": 0.00035020881135108535, - "loss": 1.2888, - "step": 10115 - }, - { - "epoch": 0.778093992769787, - "learning_rate": 0.00034997606663150554, - "loss": 1.2168, - "step": 10116 - }, - { - "epoch": 0.7781709099300054, - "learning_rate": 0.00034974338906302227, - "loss": 1.3169, - "step": 10117 - }, - { - "epoch": 0.7782478270902238, - "learning_rate": 0.00034951077865922134, - "loss": 1.2438, - "step": 10118 - }, - { - "epoch": 0.7783247442504423, - "learning_rate": 0.0003492782354336848, - "loss": 1.2502, - "step": 10119 - }, - { - "epoch": 0.7784016614106607, - "learning_rate": 0.00034904575939999154, - "loss": 1.1903, - "step": 10120 - }, - { - "epoch": 0.7784785785708792, - "learning_rate": 0.00034881335057171555, - "loss": 1.1976, - "step": 10121 - }, - { - "epoch": 0.7785554957310976, - "learning_rate": 0.00034858100896242806, - "loss": 1.09, - "step": 10122 - }, - { - "epoch": 0.778632412891316, - "learning_rate": 0.00034834873458569545, - "loss": 1.3113, - "step": 10123 - }, - { - "epoch": 0.7787093300515345, - "learning_rate": 0.0003481165274550803, - "loss": 0.787, - "step": 10124 - }, - { - "epoch": 0.7787862472117529, - "learning_rate": 0.0003478843875841417, - "loss": 1.4309, - "step": 10125 - }, - { - "epoch": 0.7788631643719713, - "learning_rate": 0.0003476523149864344, - "loss": 1.1621, - "step": 10126 - }, - { - "epoch": 0.7789400815321899, - "learning_rate": 0.00034742030967550906, - "loss": 1.36, - "step": 10127 - }, - { - "epoch": 0.7790169986924083, - "learning_rate": 0.00034718837166491375, - "loss": 1.4446, - "step": 10128 - }, - { - "epoch": 0.7790939158526268, - "learning_rate": 0.00034695650096819004, - "loss": 1.1737, - "step": 10129 - }, - { - "epoch": 0.7791708330128452, - "learning_rate": 0.0003467246975988785, - "loss": 1.2886, - "step": 10130 - }, - { - "epoch": 0.7792477501730636, - "learning_rate": 0.0003464929615705136, - "loss": 1.0131, - "step": 10131 - }, - { - "epoch": 0.7793246673332821, - "learning_rate": 0.0003462612928966268, - "loss": 1.0757, - "step": 10132 - }, - { - "epoch": 0.7794015844935005, - "learning_rate": 0.0003460296915907456, - "loss": 1.1423, - "step": 10133 - }, - { - "epoch": 0.7794785016537189, - "learning_rate": 0.0003457981576663932, - "loss": 1.2314, - "step": 10134 - }, - { - "epoch": 0.7795554188139374, - "learning_rate": 0.000345566691137089, - "loss": 1.246, - "step": 10135 - }, - { - "epoch": 0.7796323359741558, - "learning_rate": 0.0003453352920163491, - "loss": 1.1795, - "step": 10136 - }, - { - "epoch": 0.7797092531343742, - "learning_rate": 0.00034510396031768543, - "loss": 0.9148, - "step": 10137 - }, - { - "epoch": 0.7797861702945927, - "learning_rate": 0.0003448726960546042, - "loss": 1.1476, - "step": 10138 - }, - { - "epoch": 0.7798630874548111, - "learning_rate": 0.0003446414992406106, - "loss": 1.1403, - "step": 10139 - }, - { - "epoch": 0.7799400046150297, - "learning_rate": 0.0003444103698892034, - "loss": 1.257, - "step": 10140 - }, - { - "epoch": 0.7800169217752481, - "learning_rate": 0.0003441793080138799, - "loss": 1.0433, - "step": 10141 - }, - { - "epoch": 0.7800938389354665, - "learning_rate": 0.0003439483136281308, - "loss": 1.2958, - "step": 10142 - }, - { - "epoch": 0.780170756095685, - "learning_rate": 0.000343717386745444, - "loss": 1.4031, - "step": 10143 - }, - { - "epoch": 0.7802476732559034, - "learning_rate": 0.00034348652737930456, - "loss": 1.0079, - "step": 10144 - }, - { - "epoch": 0.7803245904161218, - "learning_rate": 0.00034325573554319205, - "loss": 1.8904, - "step": 10145 - }, - { - "epoch": 0.7804015075763403, - "learning_rate": 0.0003430250112505825, - "loss": 1.1857, - "step": 10146 - }, - { - "epoch": 0.7804784247365587, - "learning_rate": 0.00034279435451494843, - "loss": 1.5649, - "step": 10147 - }, - { - "epoch": 0.7805553418967772, - "learning_rate": 0.00034256376534975763, - "loss": 1.0356, - "step": 10148 - }, - { - "epoch": 0.7806322590569956, - "learning_rate": 0.0003423332437684753, - "loss": 1.4342, - "step": 10149 - }, - { - "epoch": 0.780709176217214, - "learning_rate": 0.00034210278978456164, - "loss": 1.1224, - "step": 10150 - }, - { - "epoch": 0.7807860933774325, - "learning_rate": 0.00034187240341147215, - "loss": 1.1346, - "step": 10151 - }, - { - "epoch": 0.780863010537651, - "learning_rate": 0.0003416420846626601, - "loss": 1.0649, - "step": 10152 - }, - { - "epoch": 0.7809399276978694, - "learning_rate": 0.0003414118335515742, - "loss": 1.3088, - "step": 10153 - }, - { - "epoch": 0.7810168448580879, - "learning_rate": 0.00034118165009165875, - "loss": 1.2056, - "step": 10154 - }, - { - "epoch": 0.7810937620183063, - "learning_rate": 0.0003409515342963544, - "loss": 1.0821, - "step": 10155 - }, - { - "epoch": 0.7811706791785247, - "learning_rate": 0.0003407214861790979, - "loss": 1.1166, - "step": 10156 - }, - { - "epoch": 0.7812475963387432, - "learning_rate": 0.0003404915057533216, - "loss": 1.2853, - "step": 10157 - }, - { - "epoch": 0.7813245134989616, - "learning_rate": 0.0003402615930324552, - "loss": 0.8062, - "step": 10158 - }, - { - "epoch": 0.7814014306591801, - "learning_rate": 0.00034003174802992286, - "loss": 0.9184, - "step": 10159 - }, - { - "epoch": 0.7814783478193985, - "learning_rate": 0.00033980197075914574, - "loss": 1.4058, - "step": 10160 - }, - { - "epoch": 0.7815552649796169, - "learning_rate": 0.0003395722612335406, - "loss": 1.0509, - "step": 10161 - }, - { - "epoch": 0.7816321821398354, - "learning_rate": 0.0003393426194665201, - "loss": 1.1489, - "step": 10162 - }, - { - "epoch": 0.7817090993000538, - "learning_rate": 0.0003391130454714945, - "loss": 0.9629, - "step": 10163 - }, - { - "epoch": 0.7817860164602722, - "learning_rate": 0.0003388835392618676, - "loss": 0.9332, - "step": 10164 - }, - { - "epoch": 0.7818629336204908, - "learning_rate": 0.0003386541008510406, - "loss": 0.8313, - "step": 10165 - }, - { - "epoch": 0.7819398507807092, - "learning_rate": 0.00033842473025241126, - "loss": 1.1998, - "step": 10166 - }, - { - "epoch": 0.7820167679409277, - "learning_rate": 0.0003381954274793726, - "loss": 1.1993, - "step": 10167 - }, - { - "epoch": 0.7820936851011461, - "learning_rate": 0.0003379661925453137, - "loss": 1.1401, - "step": 10168 - }, - { - "epoch": 0.7821706022613645, - "learning_rate": 0.00033773702546361983, - "loss": 1.111, - "step": 10169 - }, - { - "epoch": 0.782247519421583, - "learning_rate": 0.00033750792624767184, - "loss": 1.2488, - "step": 10170 - }, - { - "epoch": 0.7823244365818014, - "learning_rate": 0.00033727889491084816, - "loss": 1.0438, - "step": 10171 - }, - { - "epoch": 0.7824013537420198, - "learning_rate": 0.00033704993146652177, - "loss": 1.3586, - "step": 10172 - }, - { - "epoch": 0.7824782709022383, - "learning_rate": 0.0003368210359280612, - "loss": 1.3086, - "step": 10173 - }, - { - "epoch": 0.7825551880624567, - "learning_rate": 0.00033659220830883294, - "loss": 1.0261, - "step": 10174 - }, - { - "epoch": 0.7826321052226752, - "learning_rate": 0.0003363634486221978, - "loss": 1.0639, - "step": 10175 - }, - { - "epoch": 0.7827090223828936, - "learning_rate": 0.00033613475688151436, - "loss": 1.049, - "step": 10176 - }, - { - "epoch": 0.782785939543112, - "learning_rate": 0.0003359061331001349, - "loss": 0.6166, - "step": 10177 - }, - { - "epoch": 0.7828628567033306, - "learning_rate": 0.0003356775772914091, - "loss": 1.0824, - "step": 10178 - }, - { - "epoch": 0.782939773863549, - "learning_rate": 0.0003354490894686835, - "loss": 1.1148, - "step": 10179 - }, - { - "epoch": 0.7830166910237674, - "learning_rate": 0.00033522066964529923, - "loss": 1.1823, - "step": 10180 - }, - { - "epoch": 0.7830936081839859, - "learning_rate": 0.0003349923178345941, - "loss": 1.5071, - "step": 10181 - }, - { - "epoch": 0.7831705253442043, - "learning_rate": 0.00033476403404990144, - "loss": 1.1846, - "step": 10182 - }, - { - "epoch": 0.7832474425044227, - "learning_rate": 0.0003345358183045513, - "loss": 1.2911, - "step": 10183 - }, - { - "epoch": 0.7833243596646412, - "learning_rate": 0.000334307670611869, - "loss": 1.3045, - "step": 10184 - }, - { - "epoch": 0.7834012768248596, - "learning_rate": 0.0003340795909851774, - "loss": 1.4123, - "step": 10185 - }, - { - "epoch": 0.7834781939850781, - "learning_rate": 0.00033385157943779275, - "loss": 0.7913, - "step": 10186 - }, - { - "epoch": 0.7835551111452965, - "learning_rate": 0.0003336236359830302, - "loss": 1.3126, - "step": 10187 - }, - { - "epoch": 0.7836320283055149, - "learning_rate": 0.000333395760634199, - "loss": 1.0296, - "step": 10188 - }, - { - "epoch": 0.7837089454657334, - "learning_rate": 0.00033316795340460525, - "loss": 1.1184, - "step": 10189 - }, - { - "epoch": 0.7837858626259518, - "learning_rate": 0.0003329402143075506, - "loss": 0.9802, - "step": 10190 - }, - { - "epoch": 0.7838627797861702, - "learning_rate": 0.00033271254335633327, - "loss": 1.1399, - "step": 10191 - }, - { - "epoch": 0.7839396969463888, - "learning_rate": 0.00033248494056424663, - "loss": 1.2731, - "step": 10192 - }, - { - "epoch": 0.7840166141066072, - "learning_rate": 0.00033225740594458147, - "loss": 0.9823, - "step": 10193 - }, - { - "epoch": 0.7840935312668257, - "learning_rate": 0.0003320299395106233, - "loss": 1.3699, - "step": 10194 - }, - { - "epoch": 0.7841704484270441, - "learning_rate": 0.0003318025412756542, - "loss": 1.0667, - "step": 10195 - }, - { - "epoch": 0.7842473655872625, - "learning_rate": 0.0003315752112529523, - "loss": 1.3758, - "step": 10196 - }, - { - "epoch": 0.784324282747481, - "learning_rate": 0.0003313479494557909, - "loss": 1.1681, - "step": 10197 - }, - { - "epoch": 0.7844011999076994, - "learning_rate": 0.0003311207558974416, - "loss": 1.1172, - "step": 10198 - }, - { - "epoch": 0.7844781170679178, - "learning_rate": 0.00033089363059116914, - "loss": 1.0151, - "step": 10199 - }, - { - "epoch": 0.7845550342281363, - "learning_rate": 0.0003306665735502356, - "loss": 1.2513, - "step": 10200 - }, - { - "epoch": 0.7846319513883547, - "learning_rate": 0.0003304395847878998, - "loss": 1.4673, - "step": 10201 - }, - { - "epoch": 0.7847088685485731, - "learning_rate": 0.0003302126643174157, - "loss": 1.2164, - "step": 10202 - }, - { - "epoch": 0.7847857857087916, - "learning_rate": 0.00032998581215203316, - "loss": 1.2011, - "step": 10203 - }, - { - "epoch": 0.78486270286901, - "learning_rate": 0.0003297590283049984, - "loss": 1.0943, - "step": 10204 - }, - { - "epoch": 0.7849396200292286, - "learning_rate": 0.0003295323127895531, - "loss": 1.1943, - "step": 10205 - }, - { - "epoch": 0.785016537189447, - "learning_rate": 0.0003293056656189364, - "loss": 1.2752, - "step": 10206 - }, - { - "epoch": 0.7850934543496654, - "learning_rate": 0.00032907908680638237, - "loss": 1.6692, - "step": 10207 - }, - { - "epoch": 0.7851703715098839, - "learning_rate": 0.00032885257636511983, - "loss": 1.376, - "step": 10208 - }, - { - "epoch": 0.7852472886701023, - "learning_rate": 0.00032862613430837653, - "loss": 1.1115, - "step": 10209 - }, - { - "epoch": 0.7853242058303207, - "learning_rate": 0.00032839976064937383, - "loss": 1.011, - "step": 10210 - }, - { - "epoch": 0.7854011229905392, - "learning_rate": 0.0003281734554013302, - "loss": 1.2191, - "step": 10211 - }, - { - "epoch": 0.7854780401507576, - "learning_rate": 0.0003279472185774597, - "loss": 1.3628, - "step": 10212 - }, - { - "epoch": 0.7855549573109761, - "learning_rate": 0.0003277210501909722, - "loss": 0.9716, - "step": 10213 - }, - { - "epoch": 0.7856318744711945, - "learning_rate": 0.0003274949502550747, - "loss": 1.1441, - "step": 10214 - }, - { - "epoch": 0.7857087916314129, - "learning_rate": 0.000327268918782969, - "loss": 0.9489, - "step": 10215 - }, - { - "epoch": 0.7857857087916315, - "learning_rate": 0.00032704295578785337, - "loss": 0.8836, - "step": 10216 - }, - { - "epoch": 0.7858626259518499, - "learning_rate": 0.00032681706128292196, - "loss": 0.9046, - "step": 10217 - }, - { - "epoch": 0.7859395431120683, - "learning_rate": 0.00032659123528136497, - "loss": 1.2939, - "step": 10218 - }, - { - "epoch": 0.7860164602722868, - "learning_rate": 0.0003263654777963683, - "loss": 1.0273, - "step": 10219 - }, - { - "epoch": 0.7860933774325052, - "learning_rate": 0.0003261397888411153, - "loss": 1.094, - "step": 10220 - }, - { - "epoch": 0.7861702945927236, - "learning_rate": 0.000325914168428783, - "loss": 0.9821, - "step": 10221 - }, - { - "epoch": 0.7862472117529421, - "learning_rate": 0.0003256886165725456, - "loss": 1.2025, - "step": 10222 - }, - { - "epoch": 0.7863241289131605, - "learning_rate": 0.0003254631332855742, - "loss": 0.9703, - "step": 10223 - }, - { - "epoch": 0.786401046073379, - "learning_rate": 0.00032523771858103433, - "loss": 0.7507, - "step": 10224 - }, - { - "epoch": 0.7864779632335974, - "learning_rate": 0.00032501237247208856, - "loss": 1.0983, - "step": 10225 - }, - { - "epoch": 0.7865548803938158, - "learning_rate": 0.00032478709497189476, - "loss": 1.1567, - "step": 10226 - }, - { - "epoch": 0.7866317975540343, - "learning_rate": 0.0003245618860936069, - "loss": 1.1132, - "step": 10227 - }, - { - "epoch": 0.7867087147142527, - "learning_rate": 0.0003243367458503761, - "loss": 1.0461, - "step": 10228 - }, - { - "epoch": 0.7867856318744711, - "learning_rate": 0.0003241116742553482, - "loss": 1.5757, - "step": 10229 - }, - { - "epoch": 0.7868625490346897, - "learning_rate": 0.0003238866713216643, - "loss": 1.1834, - "step": 10230 - }, - { - "epoch": 0.7869394661949081, - "learning_rate": 0.0003236617370624638, - "loss": 1.2352, - "step": 10231 - }, - { - "epoch": 0.7870163833551266, - "learning_rate": 0.00032343687149088003, - "loss": 2.163, - "step": 10232 - }, - { - "epoch": 0.787093300515345, - "learning_rate": 0.0003232120746200444, - "loss": 1.071, - "step": 10233 - }, - { - "epoch": 0.7871702176755634, - "learning_rate": 0.00032298734646308164, - "loss": 0.7298, - "step": 10234 - }, - { - "epoch": 0.7872471348357819, - "learning_rate": 0.00032276268703311403, - "loss": 1.3017, - "step": 10235 - }, - { - "epoch": 0.7873240519960003, - "learning_rate": 0.00032253809634326034, - "loss": 1.0415, - "step": 10236 - }, - { - "epoch": 0.7874009691562187, - "learning_rate": 0.0003223135744066344, - "loss": 1.0901, - "step": 10237 - }, - { - "epoch": 0.7874778863164372, - "learning_rate": 0.0003220891212363461, - "loss": 1.1322, - "step": 10238 - }, - { - "epoch": 0.7875548034766556, - "learning_rate": 0.00032186473684550176, - "loss": 1.0514, - "step": 10239 - }, - { - "epoch": 0.787631720636874, - "learning_rate": 0.0003216404212472031, - "loss": 1.2795, - "step": 10240 - }, - { - "epoch": 0.7877086377970925, - "learning_rate": 0.00032141617445454796, - "loss": 1.1256, - "step": 10241 - }, - { - "epoch": 0.787785554957311, - "learning_rate": 0.0003211919964806312, - "loss": 1.6298, - "step": 10242 - }, - { - "epoch": 0.7878624721175295, - "learning_rate": 0.0003209678873385421, - "loss": 1.0925, - "step": 10243 - }, - { - "epoch": 0.7879393892777479, - "learning_rate": 0.0003207438470413669, - "loss": 1.3912, - "step": 10244 - }, - { - "epoch": 0.7880163064379663, - "learning_rate": 0.0003205198756021876, - "loss": 1.4718, - "step": 10245 - }, - { - "epoch": 0.7880932235981848, - "learning_rate": 0.0003202959730340818, - "loss": 1.2329, - "step": 10246 - }, - { - "epoch": 0.7881701407584032, - "learning_rate": 0.0003200721393501236, - "loss": 1.2347, - "step": 10247 - }, - { - "epoch": 0.7882470579186216, - "learning_rate": 0.00031984837456338293, - "loss": 0.858, - "step": 10248 - }, - { - "epoch": 0.7883239750788401, - "learning_rate": 0.0003196246786869253, - "loss": 1.2483, - "step": 10249 - }, - { - "epoch": 0.7884008922390585, - "learning_rate": 0.000319401051733813, - "loss": 1.0947, - "step": 10250 - }, - { - "epoch": 0.788477809399277, - "learning_rate": 0.0003191774937171038, - "loss": 0.9954, - "step": 10251 - }, - { - "epoch": 0.7885547265594954, - "learning_rate": 0.0003189540046498514, - "loss": 0.6918, - "step": 10252 - }, - { - "epoch": 0.7886316437197138, - "learning_rate": 0.00031873058454510555, - "loss": 0.8014, - "step": 10253 - }, - { - "epoch": 0.7887085608799324, - "learning_rate": 0.0003185072334159115, - "loss": 1.3529, - "step": 10254 - }, - { - "epoch": 0.7887854780401508, - "learning_rate": 0.0003182839512753119, - "loss": 1.1859, - "step": 10255 - }, - { - "epoch": 0.7888623952003692, - "learning_rate": 0.0003180607381363442, - "loss": 1.2967, - "step": 10256 - }, - { - "epoch": 0.7889393123605877, - "learning_rate": 0.00031783759401204107, - "loss": 1.4974, - "step": 10257 - }, - { - "epoch": 0.7890162295208061, - "learning_rate": 0.00031761451891543327, - "loss": 0.9802, - "step": 10258 - }, - { - "epoch": 0.7890931466810245, - "learning_rate": 0.00031739151285954594, - "loss": 0.807, - "step": 10259 - }, - { - "epoch": 0.789170063841243, - "learning_rate": 0.0003171685758574005, - "loss": 0.7691, - "step": 10260 - }, - { - "epoch": 0.7892469810014614, - "learning_rate": 0.00031694570792201485, - "loss": 1.1775, - "step": 10261 - }, - { - "epoch": 0.7893238981616799, - "learning_rate": 0.0003167229090664018, - "loss": 1.1752, - "step": 10262 - }, - { - "epoch": 0.7894008153218983, - "learning_rate": 0.0003165001793035716, - "loss": 1.0563, - "step": 10263 - }, - { - "epoch": 0.7894777324821167, - "learning_rate": 0.0003162775186465295, - "loss": 1.1318, - "step": 10264 - }, - { - "epoch": 0.7895546496423352, - "learning_rate": 0.00031605492710827656, - "loss": 1.0815, - "step": 10265 - }, - { - "epoch": 0.7896315668025536, - "learning_rate": 0.0003158324047018104, - "loss": 1.6239, - "step": 10266 - }, - { - "epoch": 0.789708483962772, - "learning_rate": 0.0003156099514401242, - "loss": 1.1011, - "step": 10267 - }, - { - "epoch": 0.7897854011229906, - "learning_rate": 0.0003153875673362069, - "loss": 1.0138, - "step": 10268 - }, - { - "epoch": 0.789862318283209, - "learning_rate": 0.0003151652524030448, - "loss": 1.3059, - "step": 10269 - }, - { - "epoch": 0.7899392354434275, - "learning_rate": 0.0003149430066536178, - "loss": 0.821, - "step": 10270 - }, - { - "epoch": 0.7900161526036459, - "learning_rate": 0.00031472083010090397, - "loss": 1.2222, - "step": 10271 - }, - { - "epoch": 0.7900930697638643, - "learning_rate": 0.00031449872275787625, - "loss": 1.3313, - "step": 10272 - }, - { - "epoch": 0.7901699869240828, - "learning_rate": 0.00031427668463750353, - "loss": 1.3168, - "step": 10273 - }, - { - "epoch": 0.7902469040843012, - "learning_rate": 0.0003140547157527509, - "loss": 1.4956, - "step": 10274 - }, - { - "epoch": 0.7903238212445196, - "learning_rate": 0.00031383281611657937, - "loss": 1.1706, - "step": 10275 - }, - { - "epoch": 0.7904007384047381, - "learning_rate": 0.00031361098574194556, - "loss": 1.2118, - "step": 10276 - }, - { - "epoch": 0.7904776555649565, - "learning_rate": 0.00031338922464180306, - "loss": 0.9576, - "step": 10277 - }, - { - "epoch": 0.7905545727251749, - "learning_rate": 0.0003131675328291009, - "loss": 1.2488, - "step": 10278 - }, - { - "epoch": 0.7906314898853934, - "learning_rate": 0.0003129459103167824, - "loss": 1.4225, - "step": 10279 - }, - { - "epoch": 0.7907084070456118, - "learning_rate": 0.0003127243571177899, - "loss": 1.0132, - "step": 10280 - }, - { - "epoch": 0.7907853242058304, - "learning_rate": 0.0003125028732450591, - "loss": 1.1335, - "step": 10281 - }, - { - "epoch": 0.7908622413660488, - "learning_rate": 0.00031228145871152387, - "loss": 0.9534, - "step": 10282 - }, - { - "epoch": 0.7909391585262672, - "learning_rate": 0.00031206011353011165, - "loss": 1.1116, - "step": 10283 - }, - { - "epoch": 0.7910160756864857, - "learning_rate": 0.00031183883771374716, - "loss": 1.123, - "step": 10284 - }, - { - "epoch": 0.7910929928467041, - "learning_rate": 0.0003116176312753515, - "loss": 1.1737, - "step": 10285 - }, - { - "epoch": 0.7911699100069225, - "learning_rate": 0.00031139649422784075, - "loss": 1.0924, - "step": 10286 - }, - { - "epoch": 0.791246827167141, - "learning_rate": 0.00031117542658412774, - "loss": 1.2451, - "step": 10287 - }, - { - "epoch": 0.7913237443273594, - "learning_rate": 0.00031095442835712023, - "loss": 0.9532, - "step": 10288 - }, - { - "epoch": 0.7914006614875779, - "learning_rate": 0.00031073349955972263, - "loss": 1.1028, - "step": 10289 - }, - { - "epoch": 0.7914775786477963, - "learning_rate": 0.0003105126402048358, - "loss": 1.0861, - "step": 10290 - }, - { - "epoch": 0.7915544958080147, - "learning_rate": 0.00031029185030535583, - "loss": 1.0757, - "step": 10291 - }, - { - "epoch": 0.7916314129682333, - "learning_rate": 0.00031007112987417384, - "loss": 0.8651, - "step": 10292 - }, - { - "epoch": 0.7917083301284517, - "learning_rate": 0.00030985047892417915, - "loss": 1.0436, - "step": 10293 - }, - { - "epoch": 0.7917852472886701, - "learning_rate": 0.0003096298974682553, - "loss": 1.1169, - "step": 10294 - }, - { - "epoch": 0.7918621644488886, - "learning_rate": 0.0003094093855192823, - "loss": 0.919, - "step": 10295 - }, - { - "epoch": 0.791939081609107, - "learning_rate": 0.0003091889430901362, - "loss": 1.201, - "step": 10296 - }, - { - "epoch": 0.7920159987693255, - "learning_rate": 0.00030896857019368823, - "loss": 1.2314, - "step": 10297 - }, - { - "epoch": 0.7920929159295439, - "learning_rate": 0.0003087482668428072, - "loss": 0.8611, - "step": 10298 - }, - { - "epoch": 0.7921698330897623, - "learning_rate": 0.00030852803305035637, - "loss": 1.1182, - "step": 10299 - }, - { - "epoch": 0.7922467502499808, - "learning_rate": 0.00030830786882919524, - "loss": 1.2974, - "step": 10300 - }, - { - "epoch": 0.7923236674101992, - "learning_rate": 0.0003080877741921798, - "loss": 0.9761, - "step": 10301 - }, - { - "epoch": 0.7924005845704176, - "learning_rate": 0.0003078677491521613, - "loss": 1.2451, - "step": 10302 - }, - { - "epoch": 0.7924775017306361, - "learning_rate": 0.000307647793721987, - "loss": 1.1248, - "step": 10303 - }, - { - "epoch": 0.7925544188908545, - "learning_rate": 0.0003074279079145013, - "loss": 1.1212, - "step": 10304 - }, - { - "epoch": 0.7926313360510729, - "learning_rate": 0.00030720809174254266, - "loss": 1.3907, - "step": 10305 - }, - { - "epoch": 0.7927082532112915, - "learning_rate": 0.00030698834521894627, - "loss": 1.2372, - "step": 10306 - }, - { - "epoch": 0.7927851703715099, - "learning_rate": 0.00030676866835654417, - "loss": 1.4555, - "step": 10307 - }, - { - "epoch": 0.7928620875317284, - "learning_rate": 0.00030654906116816296, - "loss": 0.7656, - "step": 10308 - }, - { - "epoch": 0.7929390046919468, - "learning_rate": 0.0003063295236666259, - "loss": 1.0952, - "step": 10309 - }, - { - "epoch": 0.7930159218521652, - "learning_rate": 0.0003061100558647518, - "loss": 1.3046, - "step": 10310 - }, - { - "epoch": 0.7930928390123837, - "learning_rate": 0.0003058906577753556, - "loss": 0.8801, - "step": 10311 - }, - { - "epoch": 0.7931697561726021, - "learning_rate": 0.0003056713294112487, - "loss": 1.1586, - "step": 10312 - }, - { - "epoch": 0.7932466733328205, - "learning_rate": 0.00030545207078523797, - "loss": 1.227, - "step": 10313 - }, - { - "epoch": 0.793323590493039, - "learning_rate": 0.00030523288191012496, - "loss": 1.1585, - "step": 10314 - }, - { - "epoch": 0.7934005076532574, - "learning_rate": 0.0003050137627987096, - "loss": 1.5249, - "step": 10315 - }, - { - "epoch": 0.7934774248134759, - "learning_rate": 0.00030479471346378544, - "loss": 1.1025, - "step": 10316 - }, - { - "epoch": 0.7935543419736943, - "learning_rate": 0.00030457573391814454, - "loss": 0.9091, - "step": 10317 - }, - { - "epoch": 0.7936312591339127, - "learning_rate": 0.0003043568241745719, - "loss": 1.1504, - "step": 10318 - }, - { - "epoch": 0.7937081762941313, - "learning_rate": 0.00030413798424585, - "loss": 1.0784, - "step": 10319 - }, - { - "epoch": 0.7937850934543497, - "learning_rate": 0.0003039192141447579, - "loss": 1.3788, - "step": 10320 - }, - { - "epoch": 0.7938620106145681, - "learning_rate": 0.0003037005138840693, - "loss": 1.3925, - "step": 10321 - }, - { - "epoch": 0.7939389277747866, - "learning_rate": 0.00030348188347655464, - "loss": 1.193, - "step": 10322 - }, - { - "epoch": 0.794015844935005, - "learning_rate": 0.00030326332293497966, - "loss": 0.8348, - "step": 10323 - }, - { - "epoch": 0.7940927620952234, - "learning_rate": 0.0003030448322721065, - "loss": 1.3422, - "step": 10324 - }, - { - "epoch": 0.7941696792554419, - "learning_rate": 0.00030282641150069264, - "loss": 1.512, - "step": 10325 - }, - { - "epoch": 0.7942465964156603, - "learning_rate": 0.000302608060633493, - "loss": 1.129, - "step": 10326 - }, - { - "epoch": 0.7943235135758788, - "learning_rate": 0.0003023897796832559, - "loss": 1.0456, - "step": 10327 - }, - { - "epoch": 0.7944004307360972, - "learning_rate": 0.000302171568662728, - "loss": 1.1939, - "step": 10328 - }, - { - "epoch": 0.7944773478963156, - "learning_rate": 0.0003019534275846505, - "loss": 1.1195, - "step": 10329 - }, - { - "epoch": 0.7945542650565341, - "learning_rate": 0.00030173535646176107, - "loss": 1.2152, - "step": 10330 - }, - { - "epoch": 0.7946311822167526, - "learning_rate": 0.00030151735530679295, - "loss": 1.0945, - "step": 10331 - }, - { - "epoch": 0.794708099376971, - "learning_rate": 0.00030129942413247544, - "loss": 1.1961, - "step": 10332 - }, - { - "epoch": 0.7947850165371895, - "learning_rate": 0.0003010815629515334, - "loss": 1.0837, - "step": 10333 - }, - { - "epoch": 0.7948619336974079, - "learning_rate": 0.0003008637717766889, - "loss": 1.3923, - "step": 10334 - }, - { - "epoch": 0.7949388508576264, - "learning_rate": 0.0003006460506206583, - "loss": 0.8961, - "step": 10335 - }, - { - "epoch": 0.7950157680178448, - "learning_rate": 0.0003004283994961548, - "loss": 1.2444, - "step": 10336 - }, - { - "epoch": 0.7950926851780632, - "learning_rate": 0.0003002108184158872, - "loss": 1.0956, - "step": 10337 - }, - { - "epoch": 0.7951696023382817, - "learning_rate": 0.00029999330739256005, - "loss": 1.4607, - "step": 10338 - }, - { - "epoch": 0.7952465194985001, - "learning_rate": 0.0002997758664388749, - "loss": 1.1263, - "step": 10339 - }, - { - "epoch": 0.7953234366587185, - "learning_rate": 0.00029955849556752745, - "loss": 1.27, - "step": 10340 - }, - { - "epoch": 0.795400353818937, - "learning_rate": 0.0002993411947912101, - "loss": 1.7407, - "step": 10341 - }, - { - "epoch": 0.7954772709791554, - "learning_rate": 0.0002991239641226122, - "loss": 1.254, - "step": 10342 - }, - { - "epoch": 0.7955541881393738, - "learning_rate": 0.0002989068035744174, - "loss": 1.2726, - "step": 10343 - }, - { - "epoch": 0.7956311052995924, - "learning_rate": 0.00029868971315930617, - "loss": 0.993, - "step": 10344 - }, - { - "epoch": 0.7957080224598108, - "learning_rate": 0.00029847269288995444, - "loss": 1.3377, - "step": 10345 - }, - { - "epoch": 0.7957849396200293, - "learning_rate": 0.0002982557427790341, - "loss": 1.064, - "step": 10346 - }, - { - "epoch": 0.7958618567802477, - "learning_rate": 0.00029803886283921374, - "loss": 0.9866, - "step": 10347 - }, - { - "epoch": 0.7959387739404661, - "learning_rate": 0.0002978220530831571, - "loss": 1.3031, - "step": 10348 - }, - { - "epoch": 0.7960156911006846, - "learning_rate": 0.00029760531352352306, - "loss": 1.13, - "step": 10349 - }, - { - "epoch": 0.796092608260903, - "learning_rate": 0.00029738864417296816, - "loss": 1.2601, - "step": 10350 - }, - { - "epoch": 0.7961695254211214, - "learning_rate": 0.00029717204504414377, - "loss": 1.0851, - "step": 10351 - }, - { - "epoch": 0.7962464425813399, - "learning_rate": 0.0002969555161496971, - "loss": 1.4252, - "step": 10352 - }, - { - "epoch": 0.7963233597415583, - "learning_rate": 0.0002967390575022717, - "loss": 1.2639, - "step": 10353 - }, - { - "epoch": 0.7964002769017768, - "learning_rate": 0.0002965226691145064, - "loss": 1.2395, - "step": 10354 - }, - { - "epoch": 0.7964771940619952, - "learning_rate": 0.00029630635099903683, - "loss": 1.0853, - "step": 10355 - }, - { - "epoch": 0.7965541112222136, - "learning_rate": 0.0002960901031684942, - "loss": 1.3469, - "step": 10356 - }, - { - "epoch": 0.7966310283824322, - "learning_rate": 0.00029587392563550487, - "loss": 1.2555, - "step": 10357 - }, - { - "epoch": 0.7967079455426506, - "learning_rate": 0.00029565781841269217, - "loss": 1.2899, - "step": 10358 - }, - { - "epoch": 0.796784862702869, - "learning_rate": 0.0002954417815126744, - "loss": 1.1721, - "step": 10359 - }, - { - "epoch": 0.7968617798630875, - "learning_rate": 0.000295225814948066, - "loss": 1.0623, - "step": 10360 - }, - { - "epoch": 0.7969386970233059, - "learning_rate": 0.00029500991873147864, - "loss": 1.3483, - "step": 10361 - }, - { - "epoch": 0.7970156141835243, - "learning_rate": 0.00029479409287551763, - "loss": 1.4211, - "step": 10362 - }, - { - "epoch": 0.7970925313437428, - "learning_rate": 0.0002945783373927852, - "loss": 1.2193, - "step": 10363 - }, - { - "epoch": 0.7971694485039612, - "learning_rate": 0.00029436265229588023, - "loss": 1.2345, - "step": 10364 - }, - { - "epoch": 0.7972463656641797, - "learning_rate": 0.00029414703759739636, - "loss": 1.1392, - "step": 10365 - }, - { - "epoch": 0.7973232828243981, - "learning_rate": 0.0002939314933099239, - "loss": 1.3871, - "step": 10366 - }, - { - "epoch": 0.7974001999846165, - "learning_rate": 0.0002937160194460483, - "loss": 1.1328, - "step": 10367 - }, - { - "epoch": 0.797477117144835, - "learning_rate": 0.00029350061601835116, - "loss": 1.1989, - "step": 10368 - }, - { - "epoch": 0.7975540343050534, - "learning_rate": 0.0002932852830394108, - "loss": 0.9767, - "step": 10369 - }, - { - "epoch": 0.7976309514652719, - "learning_rate": 0.00029307002052180093, - "loss": 0.736, - "step": 10370 - }, - { - "epoch": 0.7977078686254904, - "learning_rate": 0.0002928548284780894, - "loss": 1.5841, - "step": 10371 - }, - { - "epoch": 0.7977847857857088, - "learning_rate": 0.0002926397069208429, - "loss": 1.3683, - "step": 10372 - }, - { - "epoch": 0.7978617029459273, - "learning_rate": 0.00029242465586262187, - "loss": 1.1142, - "step": 10373 - }, - { - "epoch": 0.7979386201061457, - "learning_rate": 0.00029220967531598454, - "loss": 1.3105, - "step": 10374 - }, - { - "epoch": 0.7980155372663641, - "learning_rate": 0.00029199476529348257, - "loss": 1.1201, - "step": 10375 - }, - { - "epoch": 0.7980924544265826, - "learning_rate": 0.0002917799258076648, - "loss": 1.2185, - "step": 10376 - }, - { - "epoch": 0.798169371586801, - "learning_rate": 0.0002915651568710766, - "loss": 1.6242, - "step": 10377 - }, - { - "epoch": 0.7982462887470194, - "learning_rate": 0.00029135045849625833, - "loss": 1.3077, - "step": 10378 - }, - { - "epoch": 0.7983232059072379, - "learning_rate": 0.0002911358306957465, - "loss": 0.8829, - "step": 10379 - }, - { - "epoch": 0.7984001230674563, - "learning_rate": 0.0002909212734820734, - "loss": 0.7368, - "step": 10380 - }, - { - "epoch": 0.7984770402276747, - "learning_rate": 0.0002907067868677671, - "loss": 0.8756, - "step": 10381 - }, - { - "epoch": 0.7985539573878933, - "learning_rate": 0.00029049237086535145, - "loss": 0.8449, - "step": 10382 - }, - { - "epoch": 0.7986308745481117, - "learning_rate": 0.0002902780254873477, - "loss": 1.052, - "step": 10383 - }, - { - "epoch": 0.7987077917083302, - "learning_rate": 0.00029006375074626986, - "loss": 1.1996, - "step": 10384 - }, - { - "epoch": 0.7987847088685486, - "learning_rate": 0.00028984954665463097, - "loss": 1.2681, - "step": 10385 - }, - { - "epoch": 0.798861626028767, - "learning_rate": 0.00028963541322493826, - "loss": 1.1129, - "step": 10386 - }, - { - "epoch": 0.7989385431889855, - "learning_rate": 0.00028942135046969514, - "loss": 1.2128, - "step": 10387 - }, - { - "epoch": 0.7990154603492039, - "learning_rate": 0.00028920735840140115, - "loss": 0.8635, - "step": 10388 - }, - { - "epoch": 0.7990923775094223, - "learning_rate": 0.00028899343703255113, - "loss": 1.1278, - "step": 10389 - }, - { - "epoch": 0.7991692946696408, - "learning_rate": 0.0002887795863756363, - "loss": 1.3238, - "step": 10390 - }, - { - "epoch": 0.7992462118298592, - "learning_rate": 0.0002885658064431439, - "loss": 1.143, - "step": 10391 - }, - { - "epoch": 0.7993231289900777, - "learning_rate": 0.00028835209724755665, - "loss": 0.8206, - "step": 10392 - }, - { - "epoch": 0.7994000461502961, - "learning_rate": 0.00028813845880135315, - "loss": 0.9658, - "step": 10393 - }, - { - "epoch": 0.7994769633105145, - "learning_rate": 0.00028792489111700815, - "loss": 1.4044, - "step": 10394 - }, - { - "epoch": 0.7995538804707331, - "learning_rate": 0.0002877113942069914, - "loss": 0.9718, - "step": 10395 - }, - { - "epoch": 0.7996307976309515, - "learning_rate": 0.00028749796808377054, - "loss": 0.5927, - "step": 10396 - }, - { - "epoch": 0.7997077147911699, - "learning_rate": 0.00028728461275980666, - "loss": 1.3003, - "step": 10397 - }, - { - "epoch": 0.7997846319513884, - "learning_rate": 0.0002870713282475578, - "loss": 0.9377, - "step": 10398 - }, - { - "epoch": 0.7998615491116068, - "learning_rate": 0.0002868581145594784, - "loss": 0.8702, - "step": 10399 - }, - { - "epoch": 0.7999384662718253, - "learning_rate": 0.00028664497170801794, - "loss": 1.1248, - "step": 10400 - }, - { - "epoch": 0.8000153834320437, - "learning_rate": 0.00028643189970562226, - "loss": 1.0356, - "step": 10401 - }, - { - "epoch": 0.8000923005922621, - "learning_rate": 0.00028621889856473274, - "loss": 1.3393, - "step": 10402 - }, - { - "epoch": 0.8001692177524806, - "learning_rate": 0.0002860059682977861, - "loss": 0.9831, - "step": 10403 - }, - { - "epoch": 0.800246134912699, - "learning_rate": 0.0002857931089172168, - "loss": 0.8914, - "step": 10404 - }, - { - "epoch": 0.8003230520729174, - "learning_rate": 0.00028558032043545325, - "loss": 1.3209, - "step": 10405 - }, - { - "epoch": 0.8003999692331359, - "learning_rate": 0.0002853676028649202, - "loss": 1.1725, - "step": 10406 - }, - { - "epoch": 0.8004768863933543, - "learning_rate": 0.00028515495621803886, - "loss": 1.2159, - "step": 10407 - }, - { - "epoch": 0.8005538035535728, - "learning_rate": 0.0002849423805072257, - "loss": 1.2795, - "step": 10408 - }, - { - "epoch": 0.8006307207137913, - "learning_rate": 0.00028472987574489295, - "loss": 1.0802, - "step": 10409 - }, - { - "epoch": 0.8007076378740097, - "learning_rate": 0.00028451744194344985, - "loss": 1.1759, - "step": 10410 - }, - { - "epoch": 0.8007845550342282, - "learning_rate": 0.0002843050791152993, - "loss": 0.8882, - "step": 10411 - }, - { - "epoch": 0.8008614721944466, - "learning_rate": 0.0002840927872728427, - "loss": 1.1256, - "step": 10412 - }, - { - "epoch": 0.800938389354665, - "learning_rate": 0.0002838805664284754, - "loss": 1.0765, - "step": 10413 - }, - { - "epoch": 0.8010153065148835, - "learning_rate": 0.00028366841659458906, - "loss": 1.4075, - "step": 10414 - }, - { - "epoch": 0.8010922236751019, - "learning_rate": 0.0002834563377835717, - "loss": 1.1554, - "step": 10415 - }, - { - "epoch": 0.8011691408353203, - "learning_rate": 0.00028324433000780655, - "loss": 1.1122, - "step": 10416 - }, - { - "epoch": 0.8012460579955388, - "learning_rate": 0.0002830323932796728, - "loss": 0.8693, - "step": 10417 - }, - { - "epoch": 0.8013229751557572, - "learning_rate": 0.00028282052761154607, - "loss": 1.1575, - "step": 10418 - }, - { - "epoch": 0.8013998923159757, - "learning_rate": 0.00028260873301579737, - "loss": 1.001, - "step": 10419 - }, - { - "epoch": 0.8014768094761942, - "learning_rate": 0.0002823970095047934, - "loss": 1.0212, - "step": 10420 - }, - { - "epoch": 0.8015537266364126, - "learning_rate": 0.000282185357090897, - "loss": 1.3056, - "step": 10421 - }, - { - "epoch": 0.8016306437966311, - "learning_rate": 0.00028197377578646657, - "loss": 1.321, - "step": 10422 - }, - { - "epoch": 0.8017075609568495, - "learning_rate": 0.0002817622656038574, - "loss": 1.0864, - "step": 10423 - }, - { - "epoch": 0.8017844781170679, - "learning_rate": 0.00028155082655541877, - "loss": 0.9638, - "step": 10424 - }, - { - "epoch": 0.8018613952772864, - "learning_rate": 0.0002813394586534968, - "loss": 1.136, - "step": 10425 - }, - { - "epoch": 0.8019383124375048, - "learning_rate": 0.00028112816191043434, - "loss": 0.99, - "step": 10426 - }, - { - "epoch": 0.8020152295977232, - "learning_rate": 0.0002809169363385686, - "loss": 1.1861, - "step": 10427 - }, - { - "epoch": 0.8020921467579417, - "learning_rate": 0.00028070578195023364, - "loss": 0.9569, - "step": 10428 - }, - { - "epoch": 0.8021690639181601, - "learning_rate": 0.0002804946987577589, - "loss": 1.2844, - "step": 10429 - }, - { - "epoch": 0.8022459810783786, - "learning_rate": 0.00028028368677346897, - "loss": 1.193, - "step": 10430 - }, - { - "epoch": 0.802322898238597, - "learning_rate": 0.0002800727460096864, - "loss": 1.2253, - "step": 10431 - }, - { - "epoch": 0.8023998153988154, - "learning_rate": 0.0002798618764787278, - "loss": 1.1087, - "step": 10432 - }, - { - "epoch": 0.802476732559034, - "learning_rate": 0.00027965107819290496, - "loss": 1.1771, - "step": 10433 - }, - { - "epoch": 0.8025536497192524, - "learning_rate": 0.00027944035116452795, - "loss": 1.2884, - "step": 10434 - }, - { - "epoch": 0.8026305668794708, - "learning_rate": 0.000279229695405901, - "loss": 1.1639, - "step": 10435 - }, - { - "epoch": 0.8027074840396893, - "learning_rate": 0.0002790191109293243, - "loss": 1.49, - "step": 10436 - }, - { - "epoch": 0.8027844011999077, - "learning_rate": 0.00027880859774709403, - "loss": 1.5186, - "step": 10437 - }, - { - "epoch": 0.8028613183601262, - "learning_rate": 0.00027859815587150225, - "loss": 1.1989, - "step": 10438 - }, - { - "epoch": 0.8029382355203446, - "learning_rate": 0.00027838778531483747, - "loss": 0.9743, - "step": 10439 - }, - { - "epoch": 0.803015152680563, - "learning_rate": 0.0002781774860893829, - "loss": 0.9675, - "step": 10440 - }, - { - "epoch": 0.8030920698407815, - "learning_rate": 0.00027796725820741834, - "loss": 1.2377, - "step": 10441 - }, - { - "epoch": 0.8031689870009999, - "learning_rate": 0.00027775710168121917, - "loss": 1.145, - "step": 10442 - }, - { - "epoch": 0.8032459041612183, - "learning_rate": 0.0002775470165230566, - "loss": 1.266, - "step": 10443 - }, - { - "epoch": 0.8033228213214368, - "learning_rate": 0.00027733700274519736, - "loss": 1.2017, - "step": 10444 - }, - { - "epoch": 0.8033997384816552, - "learning_rate": 0.0002771270603599055, - "loss": 1.1181, - "step": 10445 - }, - { - "epoch": 0.8034766556418736, - "learning_rate": 0.0002769171893794387, - "loss": 0.6137, - "step": 10446 - }, - { - "epoch": 0.8035535728020922, - "learning_rate": 0.0002767073898160514, - "loss": 1.1542, - "step": 10447 - }, - { - "epoch": 0.8036304899623106, - "learning_rate": 0.00027649766168199495, - "loss": 1.2338, - "step": 10448 - }, - { - "epoch": 0.8037074071225291, - "learning_rate": 0.00027628800498951517, - "loss": 0.9504, - "step": 10449 - }, - { - "epoch": 0.8037843242827475, - "learning_rate": 0.0002760784197508539, - "loss": 1.4597, - "step": 10450 - }, - { - "epoch": 0.8038612414429659, - "learning_rate": 0.00027586890597824953, - "loss": 1.0946, - "step": 10451 - }, - { - "epoch": 0.8039381586031844, - "learning_rate": 0.000275659463683935, - "loss": 1.1617, - "step": 10452 - }, - { - "epoch": 0.8040150757634028, - "learning_rate": 0.0002754500928801407, - "loss": 1.0196, - "step": 10453 - }, - { - "epoch": 0.8040919929236212, - "learning_rate": 0.00027524079357909203, - "loss": 1.1086, - "step": 10454 - }, - { - "epoch": 0.8041689100838397, - "learning_rate": 0.00027503156579300914, - "loss": 1.1331, - "step": 10455 - }, - { - "epoch": 0.8042458272440581, - "learning_rate": 0.0002748224095341101, - "loss": 1.089, - "step": 10456 - }, - { - "epoch": 0.8043227444042766, - "learning_rate": 0.0002746133248146071, - "loss": 0.9148, - "step": 10457 - }, - { - "epoch": 0.804399661564495, - "learning_rate": 0.00027440431164670975, - "loss": 1.2087, - "step": 10458 - }, - { - "epoch": 0.8044765787247135, - "learning_rate": 0.0002741953700426218, - "loss": 1.1022, - "step": 10459 - }, - { - "epoch": 0.804553495884932, - "learning_rate": 0.0002739865000145431, - "loss": 1.1847, - "step": 10460 - }, - { - "epoch": 0.8046304130451504, - "learning_rate": 0.0002737777015746708, - "loss": 1.2637, - "step": 10461 - }, - { - "epoch": 0.8047073302053688, - "learning_rate": 0.00027356897473519644, - "loss": 1.1909, - "step": 10462 - }, - { - "epoch": 0.8047842473655873, - "learning_rate": 0.00027336031950830794, - "loss": 1.1146, - "step": 10463 - }, - { - "epoch": 0.8048611645258057, - "learning_rate": 0.00027315173590618854, - "loss": 1.2136, - "step": 10464 - }, - { - "epoch": 0.8049380816860241, - "learning_rate": 0.000272943223941018, - "loss": 1.2451, - "step": 10465 - }, - { - "epoch": 0.8050149988462426, - "learning_rate": 0.0002727347836249711, - "loss": 1.2267, - "step": 10466 - }, - { - "epoch": 0.805091916006461, - "learning_rate": 0.0002725264149702198, - "loss": 1.2632, - "step": 10467 - }, - { - "epoch": 0.8051688331666795, - "learning_rate": 0.0002723181179889298, - "loss": 1.211, - "step": 10468 - }, - { - "epoch": 0.8052457503268979, - "learning_rate": 0.00027210989269326456, - "loss": 1.2043, - "step": 10469 - }, - { - "epoch": 0.8053226674871163, - "learning_rate": 0.0002719017390953824, - "loss": 1.116, - "step": 10470 - }, - { - "epoch": 0.8053995846473349, - "learning_rate": 0.0002716936572074377, - "loss": 1.2985, - "step": 10471 - }, - { - "epoch": 0.8054765018075533, - "learning_rate": 0.00027148564704158044, - "loss": 0.9905, - "step": 10472 - }, - { - "epoch": 0.8055534189677717, - "learning_rate": 0.0002712777086099564, - "loss": 1.1847, - "step": 10473 - }, - { - "epoch": 0.8056303361279902, - "learning_rate": 0.0002710698419247074, - "loss": 1.1079, - "step": 10474 - }, - { - "epoch": 0.8057072532882086, - "learning_rate": 0.0002708620469979712, - "loss": 1.5432, - "step": 10475 - }, - { - "epoch": 0.8057841704484271, - "learning_rate": 0.0002706543238418812, - "loss": 1.3157, - "step": 10476 - }, - { - "epoch": 0.8058610876086455, - "learning_rate": 0.0002704466724685665, - "loss": 0.9149, - "step": 10477 - }, - { - "epoch": 0.8059380047688639, - "learning_rate": 0.00027023909289015176, - "loss": 1.751, - "step": 10478 - }, - { - "epoch": 0.8060149219290824, - "learning_rate": 0.00027003158511875787, - "loss": 1.4602, - "step": 10479 - }, - { - "epoch": 0.8060918390893008, - "learning_rate": 0.0002698241491665021, - "loss": 1.0065, - "step": 10480 - }, - { - "epoch": 0.8061687562495192, - "learning_rate": 0.00026961678504549614, - "loss": 0.8948, - "step": 10481 - }, - { - "epoch": 0.8062456734097377, - "learning_rate": 0.0002694094927678479, - "loss": 1.0458, - "step": 10482 - }, - { - "epoch": 0.8063225905699561, - "learning_rate": 0.0002692022723456623, - "loss": 1.2696, - "step": 10483 - }, - { - "epoch": 0.8063995077301745, - "learning_rate": 0.00026899512379103873, - "loss": 1.0358, - "step": 10484 - }, - { - "epoch": 0.8064764248903931, - "learning_rate": 0.00026878804711607276, - "loss": 1.123, - "step": 10485 - }, - { - "epoch": 0.8065533420506115, - "learning_rate": 0.0002685810423328558, - "loss": 1.144, - "step": 10486 - }, - { - "epoch": 0.80663025921083, - "learning_rate": 0.0002683741094534749, - "loss": 1.4369, - "step": 10487 - }, - { - "epoch": 0.8067071763710484, - "learning_rate": 0.00026816724849001354, - "loss": 1.2492, - "step": 10488 - }, - { - "epoch": 0.8067840935312668, - "learning_rate": 0.00026796045945455077, - "loss": 1.2483, - "step": 10489 - }, - { - "epoch": 0.8068610106914853, - "learning_rate": 0.00026775374235916006, - "loss": 0.9917, - "step": 10490 - }, - { - "epoch": 0.8069379278517037, - "learning_rate": 0.0002675470972159129, - "loss": 1.4104, - "step": 10491 - }, - { - "epoch": 0.8070148450119221, - "learning_rate": 0.0002673405240368752, - "loss": 1.2326, - "step": 10492 - }, - { - "epoch": 0.8070917621721406, - "learning_rate": 0.00026713402283410877, - "loss": 0.9989, - "step": 10493 - }, - { - "epoch": 0.807168679332359, - "learning_rate": 0.0002669275936196718, - "loss": 1.417, - "step": 10494 - }, - { - "epoch": 0.8072455964925775, - "learning_rate": 0.0002667212364056173, - "loss": 1.1293, - "step": 10495 - }, - { - "epoch": 0.807322513652796, - "learning_rate": 0.0002665149512039955, - "loss": 0.891, - "step": 10496 - }, - { - "epoch": 0.8073994308130144, - "learning_rate": 0.0002663087380268511, - "loss": 1.0769, - "step": 10497 - }, - { - "epoch": 0.8074763479732329, - "learning_rate": 0.00026610259688622523, - "loss": 0.8323, - "step": 10498 - }, - { - "epoch": 0.8075532651334513, - "learning_rate": 0.00026589652779415485, - "loss": 1.1398, - "step": 10499 - }, - { - "epoch": 0.8076301822936697, - "learning_rate": 0.0002656905307626721, - "loss": 0.9009, - "step": 10500 - }, - { - "epoch": 0.8077070994538882, - "learning_rate": 0.00026548460580380547, - "loss": 1.4085, - "step": 10501 - }, - { - "epoch": 0.8077840166141066, - "learning_rate": 0.0002652787529295799, - "loss": 1.3816, - "step": 10502 - }, - { - "epoch": 0.8078609337743251, - "learning_rate": 0.0002650729721520141, - "loss": 1.3336, - "step": 10503 - }, - { - "epoch": 0.8079378509345435, - "learning_rate": 0.00026486726348312465, - "loss": 0.9549, - "step": 10504 - }, - { - "epoch": 0.8080147680947619, - "learning_rate": 0.000264661626934923, - "loss": 1.2421, - "step": 10505 - }, - { - "epoch": 0.8080916852549804, - "learning_rate": 0.0002644560625194165, - "loss": 0.9791, - "step": 10506 - }, - { - "epoch": 0.8081686024151988, - "learning_rate": 0.00026425057024860804, - "loss": 1.113, - "step": 10507 - }, - { - "epoch": 0.8082455195754172, - "learning_rate": 0.0002640451501344967, - "loss": 1.2074, - "step": 10508 - }, - { - "epoch": 0.8083224367356358, - "learning_rate": 0.00026383980218907686, - "loss": 0.786, - "step": 10509 - }, - { - "epoch": 0.8083993538958542, - "learning_rate": 0.0002636345264243395, - "loss": 1.3403, - "step": 10510 - }, - { - "epoch": 0.8084762710560726, - "learning_rate": 0.0002634293228522711, - "loss": 1.1428, - "step": 10511 - }, - { - "epoch": 0.8085531882162911, - "learning_rate": 0.0002632241914848525, - "loss": 1.1962, - "step": 10512 - }, - { - "epoch": 0.8086301053765095, - "learning_rate": 0.00026301913233406257, - "loss": 1.6917, - "step": 10513 - }, - { - "epoch": 0.808707022536728, - "learning_rate": 0.0002628141454118744, - "loss": 1.0791, - "step": 10514 - }, - { - "epoch": 0.8087839396969464, - "learning_rate": 0.0002626092307302582, - "loss": 0.9986, - "step": 10515 - }, - { - "epoch": 0.8088608568571648, - "learning_rate": 0.00026240438830117853, - "loss": 1.1085, - "step": 10516 - }, - { - "epoch": 0.8089377740173833, - "learning_rate": 0.00026219961813659585, - "loss": 1.2558, - "step": 10517 - }, - { - "epoch": 0.8090146911776017, - "learning_rate": 0.0002619949202484679, - "loss": 1.0907, - "step": 10518 - }, - { - "epoch": 0.8090916083378201, - "learning_rate": 0.00026179029464874667, - "loss": 0.9909, - "step": 10519 - }, - { - "epoch": 0.8091685254980386, - "learning_rate": 0.00026158574134938064, - "loss": 1.5166, - "step": 10520 - }, - { - "epoch": 0.809245442658257, - "learning_rate": 0.0002613812603623137, - "loss": 1.1873, - "step": 10521 - }, - { - "epoch": 0.8093223598184756, - "learning_rate": 0.00026117685169948563, - "loss": 1.0856, - "step": 10522 - }, - { - "epoch": 0.809399276978694, - "learning_rate": 0.00026097251537283257, - "loss": 1.3302, - "step": 10523 - }, - { - "epoch": 0.8094761941389124, - "learning_rate": 0.000260768251394286, - "loss": 1.1469, - "step": 10524 - }, - { - "epoch": 0.8095531112991309, - "learning_rate": 0.0002605640597757721, - "loss": 0.9616, - "step": 10525 - }, - { - "epoch": 0.8096300284593493, - "learning_rate": 0.00026035994052921456, - "loss": 0.9726, - "step": 10526 - }, - { - "epoch": 0.8097069456195677, - "learning_rate": 0.0002601558936665323, - "loss": 1.0679, - "step": 10527 - }, - { - "epoch": 0.8097838627797862, - "learning_rate": 0.0002599519191996394, - "loss": 0.9663, - "step": 10528 - }, - { - "epoch": 0.8098607799400046, - "learning_rate": 0.0002597480171404463, - "loss": 1.2546, - "step": 10529 - }, - { - "epoch": 0.809937697100223, - "learning_rate": 0.0002595441875008591, - "loss": 1.072, - "step": 10530 - }, - { - "epoch": 0.8100146142604415, - "learning_rate": 0.0002593404302927791, - "loss": 1.074, - "step": 10531 - }, - { - "epoch": 0.8100915314206599, - "learning_rate": 0.0002591367455281047, - "loss": 1.1389, - "step": 10532 - }, - { - "epoch": 0.8101684485808784, - "learning_rate": 0.00025893313321872894, - "loss": 1.0699, - "step": 10533 - }, - { - "epoch": 0.8102453657410968, - "learning_rate": 0.00025872959337654096, - "loss": 1.2549, - "step": 10534 - }, - { - "epoch": 0.8103222829013152, - "learning_rate": 0.0002585261260134257, - "loss": 1.3641, - "step": 10535 - }, - { - "epoch": 0.8103992000615338, - "learning_rate": 0.0002583227311412633, - "loss": 0.9831, - "step": 10536 - }, - { - "epoch": 0.8104761172217522, - "learning_rate": 0.0002581194087719315, - "loss": 1.3057, - "step": 10537 - }, - { - "epoch": 0.8105530343819706, - "learning_rate": 0.0002579161589173013, - "loss": 1.1308, - "step": 10538 - }, - { - "epoch": 0.8106299515421891, - "learning_rate": 0.00025771298158924066, - "loss": 1.0699, - "step": 10539 - }, - { - "epoch": 0.8107068687024075, - "learning_rate": 0.00025750987679961415, - "loss": 1.2459, - "step": 10540 - }, - { - "epoch": 0.810783785862626, - "learning_rate": 0.0002573068445602809, - "loss": 1.464, - "step": 10541 - }, - { - "epoch": 0.8108607030228444, - "learning_rate": 0.0002571038848830959, - "loss": 1.5286, - "step": 10542 - }, - { - "epoch": 0.8109376201830628, - "learning_rate": 0.0002569009977799104, - "loss": 1.2353, - "step": 10543 - }, - { - "epoch": 0.8110145373432813, - "learning_rate": 0.0002566981832625709, - "loss": 1.4405, - "step": 10544 - }, - { - "epoch": 0.8110914545034997, - "learning_rate": 0.0002564954413429204, - "loss": 1.166, - "step": 10545 - }, - { - "epoch": 0.8111683716637181, - "learning_rate": 0.00025629277203279715, - "loss": 0.9393, - "step": 10546 - }, - { - "epoch": 0.8112452888239366, - "learning_rate": 0.0002560901753440352, - "loss": 0.853, - "step": 10547 - }, - { - "epoch": 0.811322205984155, - "learning_rate": 0.00025588765128846413, - "loss": 1.2401, - "step": 10548 - }, - { - "epoch": 0.8113991231443735, - "learning_rate": 0.00025568519987790976, - "loss": 1.0539, - "step": 10549 - }, - { - "epoch": 0.811476040304592, - "learning_rate": 0.0002554828211241933, - "loss": 1.314, - "step": 10550 - }, - { - "epoch": 0.8115529574648104, - "learning_rate": 0.0002552805150391321, - "loss": 1.0729, - "step": 10551 - }, - { - "epoch": 0.8116298746250289, - "learning_rate": 0.00025507828163453834, - "loss": 1.1186, - "step": 10552 - }, - { - "epoch": 0.8117067917852473, - "learning_rate": 0.00025487612092222147, - "loss": 1.188, - "step": 10553 - }, - { - "epoch": 0.8117837089454657, - "learning_rate": 0.0002546740329139855, - "loss": 1.4008, - "step": 10554 - }, - { - "epoch": 0.8118606261056842, - "learning_rate": 0.0002544720176216307, - "loss": 1.1238, - "step": 10555 - }, - { - "epoch": 0.8119375432659026, - "learning_rate": 0.00025427007505695275, - "loss": 1.4683, - "step": 10556 - }, - { - "epoch": 0.812014460426121, - "learning_rate": 0.00025406820523174364, - "loss": 1.5652, - "step": 10557 - }, - { - "epoch": 0.8120913775863395, - "learning_rate": 0.00025386640815778985, - "loss": 1.0449, - "step": 10558 - }, - { - "epoch": 0.8121682947465579, - "learning_rate": 0.0002536646838468756, - "loss": 1.2106, - "step": 10559 - }, - { - "epoch": 0.8122452119067765, - "learning_rate": 0.00025346303231077943, - "loss": 1.4836, - "step": 10560 - }, - { - "epoch": 0.8123221290669949, - "learning_rate": 0.000253261453561276, - "loss": 0.9808, - "step": 10561 - }, - { - "epoch": 0.8123990462272133, - "learning_rate": 0.0002530599476101354, - "loss": 0.9315, - "step": 10562 - }, - { - "epoch": 0.8124759633874318, - "learning_rate": 0.00025285851446912405, - "loss": 0.9325, - "step": 10563 - }, - { - "epoch": 0.8125528805476502, - "learning_rate": 0.0002526571541500039, - "loss": 0.9049, - "step": 10564 - }, - { - "epoch": 0.8126297977078686, - "learning_rate": 0.00025245586666453236, - "loss": 1.0214, - "step": 10565 - }, - { - "epoch": 0.8127067148680871, - "learning_rate": 0.0002522546520244627, - "loss": 1.0878, - "step": 10566 - }, - { - "epoch": 0.8127836320283055, - "learning_rate": 0.0002520535102415445, - "loss": 1.2121, - "step": 10567 - }, - { - "epoch": 0.8128605491885239, - "learning_rate": 0.0002518524413275224, - "loss": 1.1159, - "step": 10568 - }, - { - "epoch": 0.8129374663487424, - "learning_rate": 0.00025165144529413716, - "loss": 0.9496, - "step": 10569 - }, - { - "epoch": 0.8130143835089608, - "learning_rate": 0.00025145052215312484, - "loss": 1.2616, - "step": 10570 - }, - { - "epoch": 0.8130913006691793, - "learning_rate": 0.0002512496719162175, - "loss": 1.0543, - "step": 10571 - }, - { - "epoch": 0.8131682178293977, - "learning_rate": 0.0002510488945951437, - "loss": 1.1819, - "step": 10572 - }, - { - "epoch": 0.8132451349896161, - "learning_rate": 0.0002508481902016269, - "loss": 1.0739, - "step": 10573 - }, - { - "epoch": 0.8133220521498347, - "learning_rate": 0.00025064755874738517, - "loss": 0.873, - "step": 10574 - }, - { - "epoch": 0.8133989693100531, - "learning_rate": 0.000250447000244135, - "loss": 1.2985, - "step": 10575 - }, - { - "epoch": 0.8134758864702715, - "learning_rate": 0.0002502465147035868, - "loss": 1.3009, - "step": 10576 - }, - { - "epoch": 0.81355280363049, - "learning_rate": 0.00025004610213744707, - "loss": 1.106, - "step": 10577 - }, - { - "epoch": 0.8136297207907084, - "learning_rate": 0.00024984576255741815, - "loss": 0.9423, - "step": 10578 - }, - { - "epoch": 0.8137066379509269, - "learning_rate": 0.00024964549597519754, - "loss": 1.2203, - "step": 10579 - }, - { - "epoch": 0.8137835551111453, - "learning_rate": 0.00024944530240247984, - "loss": 0.921, - "step": 10580 - }, - { - "epoch": 0.8138604722713637, - "learning_rate": 0.0002492451818509543, - "loss": 0.9101, - "step": 10581 - }, - { - "epoch": 0.8139373894315822, - "learning_rate": 0.0002490451343323061, - "loss": 1.046, - "step": 10582 - }, - { - "epoch": 0.8140143065918006, - "learning_rate": 0.0002488451598582164, - "loss": 0.9688, - "step": 10583 - }, - { - "epoch": 0.814091223752019, - "learning_rate": 0.00024864525844036153, - "loss": 0.7727, - "step": 10584 - }, - { - "epoch": 0.8141681409122375, - "learning_rate": 0.0002484454300904137, - "loss": 1.0728, - "step": 10585 - }, - { - "epoch": 0.814245058072456, - "learning_rate": 0.0002482456748200425, - "loss": 0.9359, - "step": 10586 - }, - { - "epoch": 0.8143219752326744, - "learning_rate": 0.00024804599264091046, - "loss": 1.4848, - "step": 10587 - }, - { - "epoch": 0.8143988923928929, - "learning_rate": 0.00024784638356467717, - "loss": 0.8717, - "step": 10588 - }, - { - "epoch": 0.8144758095531113, - "learning_rate": 0.00024764684760299893, - "loss": 1.0713, - "step": 10589 - }, - { - "epoch": 0.8145527267133298, - "learning_rate": 0.00024744738476752647, - "loss": 1.663, - "step": 10590 - }, - { - "epoch": 0.8146296438735482, - "learning_rate": 0.00024724799506990665, - "loss": 0.8926, - "step": 10591 - }, - { - "epoch": 0.8147065610337666, - "learning_rate": 0.00024704867852178193, - "loss": 0.9111, - "step": 10592 - }, - { - "epoch": 0.8147834781939851, - "learning_rate": 0.00024684943513479034, - "loss": 1.673, - "step": 10593 - }, - { - "epoch": 0.8148603953542035, - "learning_rate": 0.00024665026492056666, - "loss": 0.9266, - "step": 10594 - }, - { - "epoch": 0.8149373125144219, - "learning_rate": 0.00024645116789074044, - "loss": 1.0953, - "step": 10595 - }, - { - "epoch": 0.8150142296746404, - "learning_rate": 0.00024625214405693617, - "loss": 1.0139, - "step": 10596 - }, - { - "epoch": 0.8150911468348588, - "learning_rate": 0.00024605319343077637, - "loss": 0.9901, - "step": 10597 - }, - { - "epoch": 0.8151680639950774, - "learning_rate": 0.0002458543160238769, - "loss": 1.2695, - "step": 10598 - }, - { - "epoch": 0.8152449811552958, - "learning_rate": 0.0002456555118478519, - "loss": 1.116, - "step": 10599 - }, - { - "epoch": 0.8153218983155142, - "learning_rate": 0.00024545678091430817, - "loss": 1.2579, - "step": 10600 - }, - { - "epoch": 0.8153988154757327, - "learning_rate": 0.00024525812323485026, - "loss": 1.2549, - "step": 10601 - }, - { - "epoch": 0.8154757326359511, - "learning_rate": 0.0002450595388210787, - "loss": 1.6772, - "step": 10602 - }, - { - "epoch": 0.8155526497961695, - "learning_rate": 0.0002448610276845883, - "loss": 1.288, - "step": 10603 - }, - { - "epoch": 0.815629566956388, - "learning_rate": 0.00024466258983697064, - "loss": 1.0606, - "step": 10604 - }, - { - "epoch": 0.8157064841166064, - "learning_rate": 0.00024446422528981264, - "loss": 1.4465, - "step": 10605 - }, - { - "epoch": 0.8157834012768248, - "learning_rate": 0.00024426593405469694, - "loss": 1.2004, - "step": 10606 - }, - { - "epoch": 0.8158603184370433, - "learning_rate": 0.0002440677161432019, - "loss": 1.357, - "step": 10607 - }, - { - "epoch": 0.8159372355972617, - "learning_rate": 0.00024386957156690225, - "loss": 1.0785, - "step": 10608 - }, - { - "epoch": 0.8160141527574802, - "learning_rate": 0.00024367150033736696, - "loss": 1.1325, - "step": 10609 - }, - { - "epoch": 0.8160910699176986, - "learning_rate": 0.00024347350246616223, - "loss": 1.4211, - "step": 10610 - }, - { - "epoch": 0.816167987077917, - "learning_rate": 0.00024327557796484934, - "loss": 1.2836, - "step": 10611 - }, - { - "epoch": 0.8162449042381356, - "learning_rate": 0.0002430777268449852, - "loss": 1.1152, - "step": 10612 - }, - { - "epoch": 0.816321821398354, - "learning_rate": 0.00024287994911812255, - "loss": 1.3357, - "step": 10613 - }, - { - "epoch": 0.8163987385585724, - "learning_rate": 0.00024268224479580969, - "loss": 1.209, - "step": 10614 - }, - { - "epoch": 0.8164756557187909, - "learning_rate": 0.00024248461388959053, - "loss": 1.2637, - "step": 10615 - }, - { - "epoch": 0.8165525728790093, - "learning_rate": 0.00024228705641100585, - "loss": 0.9578, - "step": 10616 - }, - { - "epoch": 0.8166294900392278, - "learning_rate": 0.00024208957237159057, - "loss": 1.1937, - "step": 10617 - }, - { - "epoch": 0.8167064071994462, - "learning_rate": 0.00024189216178287614, - "loss": 1.1317, - "step": 10618 - }, - { - "epoch": 0.8167833243596646, - "learning_rate": 0.00024169482465638954, - "loss": 1.4476, - "step": 10619 - }, - { - "epoch": 0.8168602415198831, - "learning_rate": 0.00024149756100365317, - "loss": 1.1497, - "step": 10620 - }, - { - "epoch": 0.8169371586801015, - "learning_rate": 0.0002413003708361865, - "loss": 1.1194, - "step": 10621 - }, - { - "epoch": 0.8170140758403199, - "learning_rate": 0.0002411032541655025, - "loss": 0.917, - "step": 10622 - }, - { - "epoch": 0.8170909930005384, - "learning_rate": 0.00024090621100311126, - "loss": 1.1918, - "step": 10623 - }, - { - "epoch": 0.8171679101607568, - "learning_rate": 0.0002407092413605188, - "loss": 1.0003, - "step": 10624 - }, - { - "epoch": 0.8172448273209754, - "learning_rate": 0.00024051234524922623, - "loss": 1.2944, - "step": 10625 - }, - { - "epoch": 0.8173217444811938, - "learning_rate": 0.0002403155226807305, - "loss": 1.279, - "step": 10626 - }, - { - "epoch": 0.8173986616414122, - "learning_rate": 0.00024011877366652407, - "loss": 1.1764, - "step": 10627 - }, - { - "epoch": 0.8174755788016307, - "learning_rate": 0.00023992209821809514, - "loss": 1.0114, - "step": 10628 - }, - { - "epoch": 0.8175524959618491, - "learning_rate": 0.00023972549634692847, - "loss": 0.8522, - "step": 10629 - }, - { - "epoch": 0.8176294131220675, - "learning_rate": 0.0002395289680645039, - "loss": 1.3193, - "step": 10630 - }, - { - "epoch": 0.817706330282286, - "learning_rate": 0.00023933251338229577, - "loss": 1.3139, - "step": 10631 - }, - { - "epoch": 0.8177832474425044, - "learning_rate": 0.0002391361323117764, - "loss": 1.1277, - "step": 10632 - }, - { - "epoch": 0.8178601646027228, - "learning_rate": 0.0002389398248644124, - "loss": 1.0314, - "step": 10633 - }, - { - "epoch": 0.8179370817629413, - "learning_rate": 0.00023874359105166616, - "loss": 0.8421, - "step": 10634 - }, - { - "epoch": 0.8180139989231597, - "learning_rate": 0.00023854743088499598, - "loss": 1.0115, - "step": 10635 - }, - { - "epoch": 0.8180909160833783, - "learning_rate": 0.0002383513443758557, - "loss": 1.1188, - "step": 10636 - }, - { - "epoch": 0.8181678332435967, - "learning_rate": 0.0002381553315356957, - "loss": 0.9807, - "step": 10637 - }, - { - "epoch": 0.8182447504038151, - "learning_rate": 0.00023795939237596092, - "loss": 1.0553, - "step": 10638 - }, - { - "epoch": 0.8183216675640336, - "learning_rate": 0.00023776352690809245, - "loss": 1.022, - "step": 10639 - }, - { - "epoch": 0.818398584724252, - "learning_rate": 0.00023756773514352708, - "loss": 1.2374, - "step": 10640 - }, - { - "epoch": 0.8184755018844704, - "learning_rate": 0.00023737201709369754, - "loss": 1.1, - "step": 10641 - }, - { - "epoch": 0.8185524190446889, - "learning_rate": 0.00023717637277003117, - "loss": 0.9657, - "step": 10642 - }, - { - "epoch": 0.8186293362049073, - "learning_rate": 0.00023698080218395335, - "loss": 1.0946, - "step": 10643 - }, - { - "epoch": 0.8187062533651258, - "learning_rate": 0.00023678530534688207, - "loss": 1.2417, - "step": 10644 - }, - { - "epoch": 0.8187831705253442, - "learning_rate": 0.0002365898822702336, - "loss": 0.8935, - "step": 10645 - }, - { - "epoch": 0.8188600876855626, - "learning_rate": 0.00023639453296541858, - "loss": 1.094, - "step": 10646 - }, - { - "epoch": 0.8189370048457811, - "learning_rate": 0.00023619925744384373, - "loss": 1.0916, - "step": 10647 - }, - { - "epoch": 0.8190139220059995, - "learning_rate": 0.0002360040557169113, - "loss": 1.0222, - "step": 10648 - }, - { - "epoch": 0.8190908391662179, - "learning_rate": 0.00023580892779601949, - "loss": 1.042, - "step": 10649 - }, - { - "epoch": 0.8191677563264365, - "learning_rate": 0.00023561387369256133, - "loss": 1.0733, - "step": 10650 - }, - { - "epoch": 0.8192446734866549, - "learning_rate": 0.00023541889341792722, - "loss": 1.036, - "step": 10651 - }, - { - "epoch": 0.8193215906468733, - "learning_rate": 0.0002352239869835024, - "loss": 1.2857, - "step": 10652 - }, - { - "epoch": 0.8193985078070918, - "learning_rate": 0.00023502915440066607, - "loss": 0.8074, - "step": 10653 - }, - { - "epoch": 0.8194754249673102, - "learning_rate": 0.00023483439568079635, - "loss": 1.1465, - "step": 10654 - }, - { - "epoch": 0.8195523421275287, - "learning_rate": 0.00023463971083526424, - "loss": 1.1189, - "step": 10655 - }, - { - "epoch": 0.8196292592877471, - "learning_rate": 0.00023444509987543887, - "loss": 1.1119, - "step": 10656 - }, - { - "epoch": 0.8197061764479655, - "learning_rate": 0.0002342505628126828, - "loss": 1.2435, - "step": 10657 - }, - { - "epoch": 0.819783093608184, - "learning_rate": 0.00023405609965835512, - "loss": 0.6836, - "step": 10658 - }, - { - "epoch": 0.8198600107684024, - "learning_rate": 0.00023386171042381155, - "loss": 1.1743, - "step": 10659 - }, - { - "epoch": 0.8199369279286208, - "learning_rate": 0.00023366739512040204, - "loss": 1.7128, - "step": 10660 - }, - { - "epoch": 0.8200138450888393, - "learning_rate": 0.00023347315375947314, - "loss": 1.4693, - "step": 10661 - }, - { - "epoch": 0.8200907622490577, - "learning_rate": 0.00023327898635236682, - "loss": 1.5164, - "step": 10662 - }, - { - "epoch": 0.8201676794092763, - "learning_rate": 0.00023308489291042035, - "loss": 1.0043, - "step": 10663 - }, - { - "epoch": 0.8202445965694947, - "learning_rate": 0.00023289087344496745, - "loss": 1.3352, - "step": 10664 - }, - { - "epoch": 0.8203215137297131, - "learning_rate": 0.00023269692796733742, - "loss": 0.928, - "step": 10665 - }, - { - "epoch": 0.8203984308899316, - "learning_rate": 0.00023250305648885388, - "loss": 1.1806, - "step": 10666 - }, - { - "epoch": 0.82047534805015, - "learning_rate": 0.00023230925902083827, - "loss": 1.5782, - "step": 10667 - }, - { - "epoch": 0.8205522652103684, - "learning_rate": 0.000232115535574606, - "loss": 1.1112, - "step": 10668 - }, - { - "epoch": 0.8206291823705869, - "learning_rate": 0.00023192188616146908, - "loss": 1.3983, - "step": 10669 - }, - { - "epoch": 0.8207060995308053, - "learning_rate": 0.00023172831079273476, - "loss": 0.8936, - "step": 10670 - }, - { - "epoch": 0.8207830166910237, - "learning_rate": 0.000231534809479706, - "loss": 1.1173, - "step": 10671 - }, - { - "epoch": 0.8208599338512422, - "learning_rate": 0.00023134138223368145, - "loss": 1.4427, - "step": 10672 - }, - { - "epoch": 0.8209368510114606, - "learning_rate": 0.00023114802906595622, - "loss": 0.9714, - "step": 10673 - }, - { - "epoch": 0.8210137681716791, - "learning_rate": 0.0002309547499878198, - "loss": 1.3299, - "step": 10674 - }, - { - "epoch": 0.8210906853318976, - "learning_rate": 0.00023076154501055812, - "loss": 1.1207, - "step": 10675 - }, - { - "epoch": 0.821167602492116, - "learning_rate": 0.00023056841414545253, - "loss": 1.7848, - "step": 10676 - }, - { - "epoch": 0.8212445196523345, - "learning_rate": 0.00023037535740377986, - "loss": 1.3192, - "step": 10677 - }, - { - "epoch": 0.8213214368125529, - "learning_rate": 0.00023018237479681403, - "loss": 1.3084, - "step": 10678 - }, - { - "epoch": 0.8213983539727713, - "learning_rate": 0.0002299894663358224, - "loss": 1.2936, - "step": 10679 - }, - { - "epoch": 0.8214752711329898, - "learning_rate": 0.00022979663203206895, - "loss": 1.2973, - "step": 10680 - }, - { - "epoch": 0.8215521882932082, - "learning_rate": 0.0002296038718968142, - "loss": 0.8722, - "step": 10681 - }, - { - "epoch": 0.8216291054534267, - "learning_rate": 0.0002294111859413136, - "loss": 1.0495, - "step": 10682 - }, - { - "epoch": 0.8217060226136451, - "learning_rate": 0.00022921857417681785, - "loss": 1.1018, - "step": 10683 - }, - { - "epoch": 0.8217829397738635, - "learning_rate": 0.00022902603661457393, - "loss": 1.0434, - "step": 10684 - }, - { - "epoch": 0.821859856934082, - "learning_rate": 0.00022883357326582403, - "loss": 1.5451, - "step": 10685 - }, - { - "epoch": 0.8219367740943004, - "learning_rate": 0.00022864118414180686, - "loss": 1.0836, - "step": 10686 - }, - { - "epoch": 0.8220136912545188, - "learning_rate": 0.00022844886925375618, - "loss": 0.9152, - "step": 10687 - }, - { - "epoch": 0.8220906084147374, - "learning_rate": 0.00022825662861290075, - "loss": 1.0145, - "step": 10688 - }, - { - "epoch": 0.8221675255749558, - "learning_rate": 0.0002280644622304664, - "loss": 1.1943, - "step": 10689 - }, - { - "epoch": 0.8222444427351742, - "learning_rate": 0.00022787237011767365, - "loss": 1.2422, - "step": 10690 - }, - { - "epoch": 0.8223213598953927, - "learning_rate": 0.00022768035228573884, - "loss": 1.1322, - "step": 10691 - }, - { - "epoch": 0.8223982770556111, - "learning_rate": 0.0002274884087458744, - "loss": 1.1814, - "step": 10692 - }, - { - "epoch": 0.8224751942158296, - "learning_rate": 0.00022729653950928763, - "loss": 0.9778, - "step": 10693 - }, - { - "epoch": 0.822552111376048, - "learning_rate": 0.0002271047445871826, - "loss": 0.9629, - "step": 10694 - }, - { - "epoch": 0.8226290285362664, - "learning_rate": 0.0002269130239907582, - "loss": 0.7977, - "step": 10695 - }, - { - "epoch": 0.8227059456964849, - "learning_rate": 0.00022672137773120893, - "loss": 1.2588, - "step": 10696 - }, - { - "epoch": 0.8227828628567033, - "learning_rate": 0.00022652980581972553, - "loss": 0.876, - "step": 10697 - }, - { - "epoch": 0.8228597800169217, - "learning_rate": 0.00022633830826749392, - "loss": 0.8721, - "step": 10698 - }, - { - "epoch": 0.8229366971771402, - "learning_rate": 0.00022614688508569558, - "loss": 1.2949, - "step": 10699 - }, - { - "epoch": 0.8230136143373586, - "learning_rate": 0.00022595553628550847, - "loss": 1.1846, - "step": 10700 - }, - { - "epoch": 0.8230905314975772, - "learning_rate": 0.0002257642618781053, - "loss": 0.7855, - "step": 10701 - }, - { - "epoch": 0.8231674486577956, - "learning_rate": 0.00022557306187465494, - "loss": 1.0839, - "step": 10702 - }, - { - "epoch": 0.823244365818014, - "learning_rate": 0.00022538193628632164, - "loss": 0.9837, - "step": 10703 - }, - { - "epoch": 0.8233212829782325, - "learning_rate": 0.0002251908851242655, - "loss": 1.0355, - "step": 10704 - }, - { - "epoch": 0.8233982001384509, - "learning_rate": 0.00022499990839964224, - "loss": 0.9371, - "step": 10705 - }, - { - "epoch": 0.8234751172986693, - "learning_rate": 0.00022480900612360295, - "loss": 1.0933, - "step": 10706 - }, - { - "epoch": 0.8235520344588878, - "learning_rate": 0.00022461817830729435, - "loss": 1.2113, - "step": 10707 - }, - { - "epoch": 0.8236289516191062, - "learning_rate": 0.00022442742496186008, - "loss": 1.0762, - "step": 10708 - }, - { - "epoch": 0.8237058687793246, - "learning_rate": 0.0002242367460984377, - "loss": 1.0536, - "step": 10709 - }, - { - "epoch": 0.8237827859395431, - "learning_rate": 0.0002240461417281613, - "loss": 1.0907, - "step": 10710 - }, - { - "epoch": 0.8238597030997615, - "learning_rate": 0.00022385561186216046, - "loss": 1.6937, - "step": 10711 - }, - { - "epoch": 0.82393662025998, - "learning_rate": 0.00022366515651155999, - "loss": 1.5094, - "step": 10712 - }, - { - "epoch": 0.8240135374201984, - "learning_rate": 0.00022347477568748165, - "loss": 1.0817, - "step": 10713 - }, - { - "epoch": 0.8240904545804169, - "learning_rate": 0.00022328446940104185, - "loss": 1.0013, - "step": 10714 - }, - { - "epoch": 0.8241673717406354, - "learning_rate": 0.00022309423766335158, - "loss": 0.9153, - "step": 10715 - }, - { - "epoch": 0.8242442889008538, - "learning_rate": 0.00022290408048552008, - "loss": 1.051, - "step": 10716 - }, - { - "epoch": 0.8243212060610722, - "learning_rate": 0.00022271399787865016, - "loss": 1.0477, - "step": 10717 - }, - { - "epoch": 0.8243981232212907, - "learning_rate": 0.0002225239898538411, - "loss": 1.0299, - "step": 10718 - }, - { - "epoch": 0.8244750403815091, - "learning_rate": 0.00022233405642218767, - "loss": 1.0845, - "step": 10719 - }, - { - "epoch": 0.8245519575417276, - "learning_rate": 0.00022214419759477989, - "loss": 1.2588, - "step": 10720 - }, - { - "epoch": 0.824628874701946, - "learning_rate": 0.0002219544133827045, - "loss": 1.2467, - "step": 10721 - }, - { - "epoch": 0.8247057918621644, - "learning_rate": 0.00022176470379704283, - "loss": 1.2005, - "step": 10722 - }, - { - "epoch": 0.8247827090223829, - "learning_rate": 0.0002215750688488722, - "loss": 0.8573, - "step": 10723 - }, - { - "epoch": 0.8248596261826013, - "learning_rate": 0.0002213855085492657, - "loss": 1.2205, - "step": 10724 - }, - { - "epoch": 0.8249365433428197, - "learning_rate": 0.00022119602290929187, - "loss": 1.3672, - "step": 10725 - }, - { - "epoch": 0.8250134605030383, - "learning_rate": 0.00022100661194001482, - "loss": 1.318, - "step": 10726 - }, - { - "epoch": 0.8250903776632567, - "learning_rate": 0.00022081727565249505, - "loss": 1.3921, - "step": 10727 - }, - { - "epoch": 0.8251672948234752, - "learning_rate": 0.00022062801405778722, - "loss": 1.3497, - "step": 10728 - }, - { - "epoch": 0.8252442119836936, - "learning_rate": 0.00022043882716694314, - "loss": 0.9178, - "step": 10729 - }, - { - "epoch": 0.825321129143912, - "learning_rate": 0.00022024971499100965, - "loss": 0.969, - "step": 10730 - }, - { - "epoch": 0.8253980463041305, - "learning_rate": 0.00022006067754102893, - "loss": 0.9037, - "step": 10731 - }, - { - "epoch": 0.8254749634643489, - "learning_rate": 0.00021987171482803914, - "loss": 1.1075, - "step": 10732 - }, - { - "epoch": 0.8255518806245673, - "learning_rate": 0.00021968282686307412, - "loss": 1.393, - "step": 10733 - }, - { - "epoch": 0.8256287977847858, - "learning_rate": 0.0002194940136571627, - "loss": 1.3812, - "step": 10734 - }, - { - "epoch": 0.8257057149450042, - "learning_rate": 0.00021930527522133077, - "loss": 0.9854, - "step": 10735 - }, - { - "epoch": 0.8257826321052226, - "learning_rate": 0.0002191166115665988, - "loss": 0.9693, - "step": 10736 - }, - { - "epoch": 0.8258595492654411, - "learning_rate": 0.00021892802270398216, - "loss": 1.1318, - "step": 10737 - }, - { - "epoch": 0.8259364664256595, - "learning_rate": 0.0002187395086444937, - "loss": 1.0286, - "step": 10738 - }, - { - "epoch": 0.8260133835858781, - "learning_rate": 0.0002185510693991403, - "loss": 1.1151, - "step": 10739 - }, - { - "epoch": 0.8260903007460965, - "learning_rate": 0.0002183627049789262, - "loss": 1.4246, - "step": 10740 - }, - { - "epoch": 0.8261672179063149, - "learning_rate": 0.000218174415394849, - "loss": 1.0629, - "step": 10741 - }, - { - "epoch": 0.8262441350665334, - "learning_rate": 0.0002179862006579033, - "loss": 1.3737, - "step": 10742 - }, - { - "epoch": 0.8263210522267518, - "learning_rate": 0.00021779806077907966, - "loss": 1.0107, - "step": 10743 - }, - { - "epoch": 0.8263979693869702, - "learning_rate": 0.00021760999576936357, - "loss": 1.1733, - "step": 10744 - }, - { - "epoch": 0.8264748865471887, - "learning_rate": 0.0002174220056397363, - "loss": 1.6141, - "step": 10745 - }, - { - "epoch": 0.8265518037074071, - "learning_rate": 0.00021723409040117487, - "loss": 1.1024, - "step": 10746 - }, - { - "epoch": 0.8266287208676256, - "learning_rate": 0.00021704625006465134, - "loss": 1.4375, - "step": 10747 - }, - { - "epoch": 0.826705638027844, - "learning_rate": 0.0002168584846411348, - "loss": 1.1996, - "step": 10748 - }, - { - "epoch": 0.8267825551880624, - "learning_rate": 0.00021667079414158875, - "loss": 1.2223, - "step": 10749 - }, - { - "epoch": 0.8268594723482809, - "learning_rate": 0.00021648317857697198, - "loss": 1.2452, - "step": 10750 - }, - { - "epoch": 0.8269363895084993, - "learning_rate": 0.00021629563795824035, - "loss": 1.4031, - "step": 10751 - }, - { - "epoch": 0.8270133066687178, - "learning_rate": 0.00021610817229634417, - "loss": 0.9523, - "step": 10752 - }, - { - "epoch": 0.8270902238289363, - "learning_rate": 0.00021592078160222994, - "loss": 0.6703, - "step": 10753 - }, - { - "epoch": 0.8271671409891547, - "learning_rate": 0.00021573346588683946, - "loss": 1.1687, - "step": 10754 - }, - { - "epoch": 0.8272440581493731, - "learning_rate": 0.00021554622516111043, - "loss": 1.5381, - "step": 10755 - }, - { - "epoch": 0.8273209753095916, - "learning_rate": 0.0002153590594359755, - "loss": 1.1097, - "step": 10756 - }, - { - "epoch": 0.82739789246981, - "learning_rate": 0.0002151719687223642, - "loss": 0.9915, - "step": 10757 - }, - { - "epoch": 0.8274748096300285, - "learning_rate": 0.00021498495303120085, - "loss": 1.5455, - "step": 10758 - }, - { - "epoch": 0.8275517267902469, - "learning_rate": 0.00021479801237340518, - "loss": 1.4729, - "step": 10759 - }, - { - "epoch": 0.8276286439504653, - "learning_rate": 0.00021461114675989297, - "loss": 1.0881, - "step": 10760 - }, - { - "epoch": 0.8277055611106838, - "learning_rate": 0.00021442435620157513, - "loss": 1.2372, - "step": 10761 - }, - { - "epoch": 0.8277824782709022, - "learning_rate": 0.00021423764070935963, - "loss": 1.3053, - "step": 10762 - }, - { - "epoch": 0.8278593954311206, - "learning_rate": 0.0002140510002941481, - "loss": 1.3, - "step": 10763 - }, - { - "epoch": 0.8279363125913392, - "learning_rate": 0.00021386443496683826, - "loss": 0.9578, - "step": 10764 - }, - { - "epoch": 0.8280132297515576, - "learning_rate": 0.00021367794473832496, - "loss": 1.2897, - "step": 10765 - }, - { - "epoch": 0.8280901469117761, - "learning_rate": 0.00021349152961949697, - "loss": 1.1766, - "step": 10766 - }, - { - "epoch": 0.8281670640719945, - "learning_rate": 0.0002133051896212394, - "loss": 1.1793, - "step": 10767 - }, - { - "epoch": 0.8282439812322129, - "learning_rate": 0.00021311892475443258, - "loss": 0.8118, - "step": 10768 - }, - { - "epoch": 0.8283208983924314, - "learning_rate": 0.00021293273502995274, - "loss": 1.0039, - "step": 10769 - }, - { - "epoch": 0.8283978155526498, - "learning_rate": 0.0002127466204586721, - "loss": 0.9122, - "step": 10770 - }, - { - "epoch": 0.8284747327128682, - "learning_rate": 0.00021256058105145826, - "loss": 1.2355, - "step": 10771 - }, - { - "epoch": 0.8285516498730867, - "learning_rate": 0.00021237461681917303, - "loss": 0.9744, - "step": 10772 - }, - { - "epoch": 0.8286285670333051, - "learning_rate": 0.00021218872777267623, - "loss": 0.9132, - "step": 10773 - }, - { - "epoch": 0.8287054841935235, - "learning_rate": 0.00021200291392282184, - "loss": 1.4135, - "step": 10774 - }, - { - "epoch": 0.828782401353742, - "learning_rate": 0.00021181717528045952, - "loss": 1.5483, - "step": 10775 - }, - { - "epoch": 0.8288593185139604, - "learning_rate": 0.00021163151185643492, - "loss": 1.1269, - "step": 10776 - }, - { - "epoch": 0.828936235674179, - "learning_rate": 0.00021144592366158872, - "loss": 1.0945, - "step": 10777 - }, - { - "epoch": 0.8290131528343974, - "learning_rate": 0.00021126041070675821, - "loss": 1.1136, - "step": 10778 - }, - { - "epoch": 0.8290900699946158, - "learning_rate": 0.00021107497300277544, - "loss": 0.9133, - "step": 10779 - }, - { - "epoch": 0.8291669871548343, - "learning_rate": 0.00021088961056046822, - "loss": 0.9289, - "step": 10780 - }, - { - "epoch": 0.8292439043150527, - "learning_rate": 0.00021070432339066026, - "loss": 1.1636, - "step": 10781 - }, - { - "epoch": 0.8293208214752711, - "learning_rate": 0.00021051911150417037, - "loss": 1.0531, - "step": 10782 - }, - { - "epoch": 0.8293977386354896, - "learning_rate": 0.00021033397491181322, - "loss": 1.2099, - "step": 10783 - }, - { - "epoch": 0.829474655795708, - "learning_rate": 0.00021014891362439986, - "loss": 1.0567, - "step": 10784 - }, - { - "epoch": 0.8295515729559265, - "learning_rate": 0.00020996392765273514, - "loss": 0.7669, - "step": 10785 - }, - { - "epoch": 0.8296284901161449, - "learning_rate": 0.0002097790170076214, - "loss": 1.1889, - "step": 10786 - }, - { - "epoch": 0.8297054072763633, - "learning_rate": 0.00020959418169985556, - "loss": 0.9922, - "step": 10787 - }, - { - "epoch": 0.8297823244365818, - "learning_rate": 0.0002094094217402301, - "loss": 1.3012, - "step": 10788 - }, - { - "epoch": 0.8298592415968002, - "learning_rate": 0.00020922473713953373, - "loss": 1.0292, - "step": 10789 - }, - { - "epoch": 0.8299361587570186, - "learning_rate": 0.00020904012790855014, - "loss": 1.3112, - "step": 10790 - }, - { - "epoch": 0.8300130759172372, - "learning_rate": 0.00020885559405805842, - "loss": 1.3005, - "step": 10791 - }, - { - "epoch": 0.8300899930774556, - "learning_rate": 0.0002086711355988346, - "loss": 1.5351, - "step": 10792 - }, - { - "epoch": 0.830166910237674, - "learning_rate": 0.00020848675254164922, - "loss": 1.3236, - "step": 10793 - }, - { - "epoch": 0.8302438273978925, - "learning_rate": 0.00020830244489726784, - "loss": 1.219, - "step": 10794 - }, - { - "epoch": 0.8303207445581109, - "learning_rate": 0.00020811821267645303, - "loss": 1.4288, - "step": 10795 - }, - { - "epoch": 0.8303976617183294, - "learning_rate": 0.00020793405588996202, - "loss": 1.5462, - "step": 10796 - }, - { - "epoch": 0.8304745788785478, - "learning_rate": 0.0002077499745485485, - "loss": 1.122, - "step": 10797 - }, - { - "epoch": 0.8305514960387662, - "learning_rate": 0.0002075659686629604, - "loss": 1.2538, - "step": 10798 - }, - { - "epoch": 0.8306284131989847, - "learning_rate": 0.00020738203824394213, - "loss": 1.1927, - "step": 10799 - }, - { - "epoch": 0.8307053303592031, - "learning_rate": 0.0002071981833022341, - "loss": 1.1776, - "step": 10800 - }, - { - "epoch": 0.8307822475194215, - "learning_rate": 0.00020701440384857139, - "loss": 1.1312, - "step": 10801 - }, - { - "epoch": 0.83085916467964, - "learning_rate": 0.00020683069989368524, - "loss": 1.0845, - "step": 10802 - }, - { - "epoch": 0.8309360818398585, - "learning_rate": 0.00020664707144830207, - "loss": 1.2887, - "step": 10803 - }, - { - "epoch": 0.831012999000077, - "learning_rate": 0.00020646351852314415, - "loss": 1.193, - "step": 10804 - }, - { - "epoch": 0.8310899161602954, - "learning_rate": 0.00020628004112892973, - "loss": 1.2198, - "step": 10805 - }, - { - "epoch": 0.8311668333205138, - "learning_rate": 0.0002060966392763724, - "loss": 1.1254, - "step": 10806 - }, - { - "epoch": 0.8312437504807323, - "learning_rate": 0.00020591331297617994, - "loss": 1.6282, - "step": 10807 - }, - { - "epoch": 0.8313206676409507, - "learning_rate": 0.00020573006223905828, - "loss": 1.0798, - "step": 10808 - }, - { - "epoch": 0.8313975848011691, - "learning_rate": 0.00020554688707570703, - "loss": 1.1077, - "step": 10809 - }, - { - "epoch": 0.8314745019613876, - "learning_rate": 0.00020536378749682227, - "loss": 1.4114, - "step": 10810 - }, - { - "epoch": 0.831551419121606, - "learning_rate": 0.00020518076351309502, - "loss": 1.2684, - "step": 10811 - }, - { - "epoch": 0.8316283362818244, - "learning_rate": 0.00020499781513521248, - "loss": 1.2452, - "step": 10812 - }, - { - "epoch": 0.8317052534420429, - "learning_rate": 0.00020481494237385684, - "loss": 1.1362, - "step": 10813 - }, - { - "epoch": 0.8317821706022613, - "learning_rate": 0.00020463214523970668, - "loss": 1.2861, - "step": 10814 - }, - { - "epoch": 0.8318590877624799, - "learning_rate": 0.00020444942374343584, - "loss": 0.899, - "step": 10815 - }, - { - "epoch": 0.8319360049226983, - "learning_rate": 0.00020426677789571308, - "loss": 1.0705, - "step": 10816 - }, - { - "epoch": 0.8320129220829167, - "learning_rate": 0.0002040842077072037, - "loss": 0.8464, - "step": 10817 - }, - { - "epoch": 0.8320898392431352, - "learning_rate": 0.0002039017131885677, - "loss": 1.2695, - "step": 10818 - }, - { - "epoch": 0.8321667564033536, - "learning_rate": 0.00020371929435046188, - "loss": 1.0746, - "step": 10819 - }, - { - "epoch": 0.832243673563572, - "learning_rate": 0.00020353695120353723, - "loss": 0.9288, - "step": 10820 - }, - { - "epoch": 0.8323205907237905, - "learning_rate": 0.00020335468375844086, - "loss": 1.1749, - "step": 10821 - }, - { - "epoch": 0.8323975078840089, - "learning_rate": 0.0002031724920258161, - "loss": 1.0873, - "step": 10822 - }, - { - "epoch": 0.8324744250442274, - "learning_rate": 0.000202990376016301, - "loss": 1.2259, - "step": 10823 - }, - { - "epoch": 0.8325513422044458, - "learning_rate": 0.00020280833574052966, - "loss": 1.2233, - "step": 10824 - }, - { - "epoch": 0.8326282593646642, - "learning_rate": 0.0002026263712091314, - "loss": 1.1299, - "step": 10825 - }, - { - "epoch": 0.8327051765248827, - "learning_rate": 0.0002024444824327311, - "loss": 1.2207, - "step": 10826 - }, - { - "epoch": 0.8327820936851011, - "learning_rate": 0.00020226266942195025, - "loss": 1.6364, - "step": 10827 - }, - { - "epoch": 0.8328590108453195, - "learning_rate": 0.0002020809321874047, - "loss": 0.9727, - "step": 10828 - }, - { - "epoch": 0.8329359280055381, - "learning_rate": 0.0002018992707397056, - "loss": 1.3102, - "step": 10829 - }, - { - "epoch": 0.8330128451657565, - "learning_rate": 0.0002017176850894613, - "loss": 1.2125, - "step": 10830 - }, - { - "epoch": 0.8330897623259749, - "learning_rate": 0.00020153617524727436, - "loss": 1.2022, - "step": 10831 - }, - { - "epoch": 0.8331666794861934, - "learning_rate": 0.00020135474122374343, - "loss": 0.9467, - "step": 10832 - }, - { - "epoch": 0.8332435966464118, - "learning_rate": 0.00020117338302946252, - "loss": 0.5734, - "step": 10833 - }, - { - "epoch": 0.8333205138066303, - "learning_rate": 0.00020099210067502104, - "loss": 1.1923, - "step": 10834 - }, - { - "epoch": 0.8333974309668487, - "learning_rate": 0.000200810894171005, - "loss": 0.9654, - "step": 10835 - }, - { - "epoch": 0.8334743481270671, - "learning_rate": 0.00020062976352799478, - "loss": 1.4249, - "step": 10836 - }, - { - "epoch": 0.8335512652872856, - "learning_rate": 0.0002004487087565669, - "loss": 1.0348, - "step": 10837 - }, - { - "epoch": 0.833628182447504, - "learning_rate": 0.00020026772986729324, - "loss": 0.9627, - "step": 10838 - }, - { - "epoch": 0.8337050996077224, - "learning_rate": 0.00020008682687074137, - "loss": 1.2621, - "step": 10839 - }, - { - "epoch": 0.833782016767941, - "learning_rate": 0.00019990599977747398, - "loss": 1.4122, - "step": 10840 - }, - { - "epoch": 0.8338589339281594, - "learning_rate": 0.00019972524859805096, - "loss": 0.7352, - "step": 10841 - }, - { - "epoch": 0.8339358510883779, - "learning_rate": 0.00019954457334302522, - "loss": 1.0593, - "step": 10842 - }, - { - "epoch": 0.8340127682485963, - "learning_rate": 0.00019936397402294732, - "loss": 0.8924, - "step": 10843 - }, - { - "epoch": 0.8340896854088147, - "learning_rate": 0.00019918345064836245, - "loss": 1.2799, - "step": 10844 - }, - { - "epoch": 0.8341666025690332, - "learning_rate": 0.00019900300322981174, - "loss": 1.2259, - "step": 10845 - }, - { - "epoch": 0.8342435197292516, - "learning_rate": 0.00019882263177783139, - "loss": 0.8012, - "step": 10846 - }, - { - "epoch": 0.83432043688947, - "learning_rate": 0.00019864233630295365, - "loss": 1.1015, - "step": 10847 - }, - { - "epoch": 0.8343973540496885, - "learning_rate": 0.00019846211681570592, - "loss": 1.0674, - "step": 10848 - }, - { - "epoch": 0.8344742712099069, - "learning_rate": 0.0001982819733266118, - "loss": 1.1149, - "step": 10849 - }, - { - "epoch": 0.8345511883701254, - "learning_rate": 0.00019810190584618987, - "loss": 1.0664, - "step": 10850 - }, - { - "epoch": 0.8346281055303438, - "learning_rate": 0.00019792191438495454, - "loss": 1.2631, - "step": 10851 - }, - { - "epoch": 0.8347050226905622, - "learning_rate": 0.0001977419989534156, - "loss": 0.9738, - "step": 10852 - }, - { - "epoch": 0.8347819398507808, - "learning_rate": 0.0001975621595620783, - "loss": 1.1769, - "step": 10853 - }, - { - "epoch": 0.8348588570109992, - "learning_rate": 0.0001973823962214444, - "loss": 1.1159, - "step": 10854 - }, - { - "epoch": 0.8349357741712176, - "learning_rate": 0.00019720270894200964, - "loss": 1.1199, - "step": 10855 - }, - { - "epoch": 0.8350126913314361, - "learning_rate": 0.00019702309773426618, - "loss": 1.0585, - "step": 10856 - }, - { - "epoch": 0.8350896084916545, - "learning_rate": 0.00019684356260870212, - "loss": 0.7886, - "step": 10857 - }, - { - "epoch": 0.8351665256518729, - "learning_rate": 0.00019666410357580073, - "loss": 1.6249, - "step": 10858 - }, - { - "epoch": 0.8352434428120914, - "learning_rate": 0.00019648472064604062, - "loss": 1.015, - "step": 10859 - }, - { - "epoch": 0.8353203599723098, - "learning_rate": 0.00019630541382989608, - "loss": 1.0349, - "step": 10860 - }, - { - "epoch": 0.8353972771325283, - "learning_rate": 0.00019612618313783693, - "loss": 1.2132, - "step": 10861 - }, - { - "epoch": 0.8354741942927467, - "learning_rate": 0.0001959470285803289, - "loss": 1.3673, - "step": 10862 - }, - { - "epoch": 0.8355511114529651, - "learning_rate": 0.00019576795016783304, - "loss": 0.7714, - "step": 10863 - }, - { - "epoch": 0.8356280286131836, - "learning_rate": 0.00019558894791080596, - "loss": 0.9195, - "step": 10864 - }, - { - "epoch": 0.835704945773402, - "learning_rate": 0.00019541002181969942, - "loss": 1.4968, - "step": 10865 - }, - { - "epoch": 0.8357818629336204, - "learning_rate": 0.00019523117190496143, - "loss": 0.7924, - "step": 10866 - }, - { - "epoch": 0.835858780093839, - "learning_rate": 0.00019505239817703513, - "loss": 1.1613, - "step": 10867 - }, - { - "epoch": 0.8359356972540574, - "learning_rate": 0.00019487370064635918, - "loss": 1.0735, - "step": 10868 - }, - { - "epoch": 0.8360126144142759, - "learning_rate": 0.00019469507932336772, - "loss": 1.4261, - "step": 10869 - }, - { - "epoch": 0.8360895315744943, - "learning_rate": 0.00019451653421849124, - "loss": 1.076, - "step": 10870 - }, - { - "epoch": 0.8361664487347127, - "learning_rate": 0.00019433806534215493, - "loss": 1.3461, - "step": 10871 - }, - { - "epoch": 0.8362433658949312, - "learning_rate": 0.00019415967270477975, - "loss": 0.9975, - "step": 10872 - }, - { - "epoch": 0.8363202830551496, - "learning_rate": 0.0001939813563167822, - "loss": 1.06, - "step": 10873 - }, - { - "epoch": 0.836397200215368, - "learning_rate": 0.00019380311618857432, - "loss": 1.4747, - "step": 10874 - }, - { - "epoch": 0.8364741173755865, - "learning_rate": 0.00019362495233056344, - "loss": 1.2087, - "step": 10875 - }, - { - "epoch": 0.8365510345358049, - "learning_rate": 0.0001934468647531532, - "loss": 1.3013, - "step": 10876 - }, - { - "epoch": 0.8366279516960233, - "learning_rate": 0.00019326885346674283, - "loss": 0.9965, - "step": 10877 - }, - { - "epoch": 0.8367048688562418, - "learning_rate": 0.00019309091848172517, - "loss": 1.1463, - "step": 10878 - }, - { - "epoch": 0.8367817860164602, - "learning_rate": 0.00019291305980849105, - "loss": 1.2645, - "step": 10879 - }, - { - "epoch": 0.8368587031766788, - "learning_rate": 0.00019273527745742537, - "loss": 1.1815, - "step": 10880 - }, - { - "epoch": 0.8369356203368972, - "learning_rate": 0.00019255757143890977, - "loss": 1.062, - "step": 10881 - }, - { - "epoch": 0.8370125374971156, - "learning_rate": 0.00019237994176331984, - "loss": 1.0678, - "step": 10882 - }, - { - "epoch": 0.8370894546573341, - "learning_rate": 0.00019220238844102762, - "loss": 1.1956, - "step": 10883 - }, - { - "epoch": 0.8371663718175525, - "learning_rate": 0.00019202491148240125, - "loss": 1.1834, - "step": 10884 - }, - { - "epoch": 0.8372432889777709, - "learning_rate": 0.0001918475108978035, - "loss": 1.1271, - "step": 10885 - }, - { - "epoch": 0.8373202061379894, - "learning_rate": 0.00019167018669759273, - "loss": 1.2681, - "step": 10886 - }, - { - "epoch": 0.8373971232982078, - "learning_rate": 0.00019149293889212334, - "loss": 1.3992, - "step": 10887 - }, - { - "epoch": 0.8374740404584263, - "learning_rate": 0.00019131576749174467, - "loss": 1.0591, - "step": 10888 - }, - { - "epoch": 0.8375509576186447, - "learning_rate": 0.0001911386725068025, - "loss": 1.1505, - "step": 10889 - }, - { - "epoch": 0.8376278747788631, - "learning_rate": 0.00019096165394763754, - "loss": 0.7873, - "step": 10890 - }, - { - "epoch": 0.8377047919390816, - "learning_rate": 0.00019078471182458535, - "loss": 1.3096, - "step": 10891 - }, - { - "epoch": 0.8377817090993, - "learning_rate": 0.00019060784614797848, - "loss": 1.1801, - "step": 10892 - }, - { - "epoch": 0.8378586262595185, - "learning_rate": 0.0001904310569281442, - "loss": 1.2087, - "step": 10893 - }, - { - "epoch": 0.837935543419737, - "learning_rate": 0.00019025434417540527, - "loss": 1.1831, - "step": 10894 - }, - { - "epoch": 0.8380124605799554, - "learning_rate": 0.00019007770790008006, - "loss": 1.3378, - "step": 10895 - }, - { - "epoch": 0.8380893777401738, - "learning_rate": 0.00018990114811248283, - "loss": 1.2176, - "step": 10896 - }, - { - "epoch": 0.8381662949003923, - "learning_rate": 0.0001897246648229225, - "loss": 0.8967, - "step": 10897 - }, - { - "epoch": 0.8382432120606107, - "learning_rate": 0.00018954825804170483, - "loss": 1.0882, - "step": 10898 - }, - { - "epoch": 0.8383201292208292, - "learning_rate": 0.0001893719277791301, - "loss": 1.1616, - "step": 10899 - }, - { - "epoch": 0.8383970463810476, - "learning_rate": 0.00018919567404549438, - "loss": 1.2519, - "step": 10900 - }, - { - "epoch": 0.838473963541266, - "learning_rate": 0.00018901949685108945, - "loss": 1.2597, - "step": 10901 - }, - { - "epoch": 0.8385508807014845, - "learning_rate": 0.00018884339620620205, - "loss": 1.0918, - "step": 10902 - }, - { - "epoch": 0.8386277978617029, - "learning_rate": 0.0001886673721211157, - "loss": 1.1742, - "step": 10903 - }, - { - "epoch": 0.8387047150219213, - "learning_rate": 0.00018849142460610792, - "loss": 1.1534, - "step": 10904 - }, - { - "epoch": 0.8387816321821399, - "learning_rate": 0.00018831555367145237, - "loss": 1.1128, - "step": 10905 - }, - { - "epoch": 0.8388585493423583, - "learning_rate": 0.00018813975932741882, - "loss": 1.1676, - "step": 10906 - }, - { - "epoch": 0.8389354665025768, - "learning_rate": 0.0001879640415842721, - "loss": 0.8937, - "step": 10907 - }, - { - "epoch": 0.8390123836627952, - "learning_rate": 0.00018778840045227213, - "loss": 1.0896, - "step": 10908 - }, - { - "epoch": 0.8390893008230136, - "learning_rate": 0.0001876128359416752, - "loss": 1.0042, - "step": 10909 - }, - { - "epoch": 0.8391662179832321, - "learning_rate": 0.00018743734806273216, - "loss": 1.146, - "step": 10910 - }, - { - "epoch": 0.8392431351434505, - "learning_rate": 0.0001872619368256906, - "loss": 1.3506, - "step": 10911 - }, - { - "epoch": 0.8393200523036689, - "learning_rate": 0.00018708660224079298, - "loss": 1.035, - "step": 10912 - }, - { - "epoch": 0.8393969694638874, - "learning_rate": 0.00018691134431827634, - "loss": 0.9361, - "step": 10913 - }, - { - "epoch": 0.8394738866241058, - "learning_rate": 0.00018673616306837492, - "loss": 1.183, - "step": 10914 - }, - { - "epoch": 0.8395508037843242, - "learning_rate": 0.00018656105850131777, - "loss": 0.7885, - "step": 10915 - }, - { - "epoch": 0.8396277209445427, - "learning_rate": 0.00018638603062732918, - "loss": 1.3637, - "step": 10916 - }, - { - "epoch": 0.8397046381047611, - "learning_rate": 0.0001862110794566293, - "loss": 1.2342, - "step": 10917 - }, - { - "epoch": 0.8397815552649797, - "learning_rate": 0.0001860362049994333, - "loss": 1.1524, - "step": 10918 - }, - { - "epoch": 0.8398584724251981, - "learning_rate": 0.0001858614072659532, - "loss": 1.0019, - "step": 10919 - }, - { - "epoch": 0.8399353895854165, - "learning_rate": 0.0001856866862663948, - "loss": 1.1074, - "step": 10920 - }, - { - "epoch": 0.840012306745635, - "learning_rate": 0.00018551204201096077, - "loss": 1.1043, - "step": 10921 - }, - { - "epoch": 0.8400892239058534, - "learning_rate": 0.00018533747450984834, - "loss": 1.2372, - "step": 10922 - }, - { - "epoch": 0.8401661410660718, - "learning_rate": 0.00018516298377325096, - "loss": 0.9992, - "step": 10923 - }, - { - "epoch": 0.8402430582262903, - "learning_rate": 0.00018498856981135703, - "loss": 1.3434, - "step": 10924 - }, - { - "epoch": 0.8403199753865087, - "learning_rate": 0.00018481423263435153, - "loss": 1.2279, - "step": 10925 - }, - { - "epoch": 0.8403968925467272, - "learning_rate": 0.00018463997225241302, - "loss": 0.9389, - "step": 10926 - }, - { - "epoch": 0.8404738097069456, - "learning_rate": 0.00018446578867571767, - "loss": 1.5451, - "step": 10927 - }, - { - "epoch": 0.840550726867164, - "learning_rate": 0.00018429168191443586, - "loss": 0.9528, - "step": 10928 - }, - { - "epoch": 0.8406276440273825, - "learning_rate": 0.0001841176519787341, - "loss": 1.0809, - "step": 10929 - }, - { - "epoch": 0.840704561187601, - "learning_rate": 0.00018394369887877394, - "loss": 1.2432, - "step": 10930 - }, - { - "epoch": 0.8407814783478194, - "learning_rate": 0.00018376982262471276, - "loss": 1.2846, - "step": 10931 - }, - { - "epoch": 0.8408583955080379, - "learning_rate": 0.0001835960232267031, - "loss": 1.5822, - "step": 10932 - }, - { - "epoch": 0.8409353126682563, - "learning_rate": 0.00018342230069489385, - "loss": 1.1378, - "step": 10933 - }, - { - "epoch": 0.8410122298284747, - "learning_rate": 0.0001832486550394289, - "loss": 1.1332, - "step": 10934 - }, - { - "epoch": 0.8410891469886932, - "learning_rate": 0.00018307508627044662, - "loss": 1.1435, - "step": 10935 - }, - { - "epoch": 0.8411660641489116, - "learning_rate": 0.00018290159439808274, - "loss": 1.3661, - "step": 10936 - }, - { - "epoch": 0.8412429813091301, - "learning_rate": 0.00018272817943246718, - "loss": 0.9954, - "step": 10937 - }, - { - "epoch": 0.8413198984693485, - "learning_rate": 0.00018255484138372663, - "loss": 1.2505, - "step": 10938 - }, - { - "epoch": 0.8413968156295669, - "learning_rate": 0.0001823815802619817, - "loss": 1.0661, - "step": 10939 - }, - { - "epoch": 0.8414737327897854, - "learning_rate": 0.00018220839607734912, - "loss": 0.9761, - "step": 10940 - }, - { - "epoch": 0.8415506499500038, - "learning_rate": 0.00018203528883994197, - "loss": 1.3057, - "step": 10941 - }, - { - "epoch": 0.8416275671102222, - "learning_rate": 0.00018186225855986783, - "loss": 1.1566, - "step": 10942 - }, - { - "epoch": 0.8417044842704408, - "learning_rate": 0.00018168930524723026, - "loss": 1.2366, - "step": 10943 - }, - { - "epoch": 0.8417814014306592, - "learning_rate": 0.00018151642891212788, - "loss": 1.225, - "step": 10944 - }, - { - "epoch": 0.8418583185908777, - "learning_rate": 0.00018134362956465512, - "loss": 1.2227, - "step": 10945 - }, - { - "epoch": 0.8419352357510961, - "learning_rate": 0.00018117090721490238, - "loss": 0.9353, - "step": 10946 - }, - { - "epoch": 0.8420121529113145, - "learning_rate": 0.0001809982618729551, - "loss": 1.1659, - "step": 10947 - }, - { - "epoch": 0.842089070071533, - "learning_rate": 0.00018082569354889338, - "loss": 1.3555, - "step": 10948 - }, - { - "epoch": 0.8421659872317514, - "learning_rate": 0.00018065320225279453, - "loss": 0.8292, - "step": 10949 - }, - { - "epoch": 0.8422429043919698, - "learning_rate": 0.0001804807879947301, - "loss": 1.7349, - "step": 10950 - }, - { - "epoch": 0.8423198215521883, - "learning_rate": 0.00018030845078476776, - "loss": 1.0104, - "step": 10951 - }, - { - "epoch": 0.8423967387124067, - "learning_rate": 0.00018013619063297028, - "loss": 1.321, - "step": 10952 - }, - { - "epoch": 0.8424736558726252, - "learning_rate": 0.00017996400754939575, - "loss": 1.0762, - "step": 10953 - }, - { - "epoch": 0.8425505730328436, - "learning_rate": 0.00017979190154409898, - "loss": 0.8755, - "step": 10954 - }, - { - "epoch": 0.842627490193062, - "learning_rate": 0.00017961987262712892, - "loss": 1.2364, - "step": 10955 - }, - { - "epoch": 0.8427044073532806, - "learning_rate": 0.00017944792080853056, - "loss": 0.9473, - "step": 10956 - }, - { - "epoch": 0.842781324513499, - "learning_rate": 0.00017927604609834435, - "loss": 1.2731, - "step": 10957 - }, - { - "epoch": 0.8428582416737174, - "learning_rate": 0.0001791042485066061, - "loss": 0.9911, - "step": 10958 - }, - { - "epoch": 0.8429351588339359, - "learning_rate": 0.0001789325280433473, - "loss": 1.261, - "step": 10959 - }, - { - "epoch": 0.8430120759941543, - "learning_rate": 0.00017876088471859542, - "loss": 1.2491, - "step": 10960 - }, - { - "epoch": 0.8430889931543727, - "learning_rate": 0.00017858931854237227, - "loss": 1.2727, - "step": 10961 - }, - { - "epoch": 0.8431659103145912, - "learning_rate": 0.00017841782952469565, - "loss": 1.2604, - "step": 10962 - }, - { - "epoch": 0.8432428274748096, - "learning_rate": 0.0001782464176755794, - "loss": 1.2202, - "step": 10963 - }, - { - "epoch": 0.8433197446350281, - "learning_rate": 0.00017807508300503257, - "loss": 0.8169, - "step": 10964 - }, - { - "epoch": 0.8433966617952465, - "learning_rate": 0.00017790382552305907, - "loss": 1.2243, - "step": 10965 - }, - { - "epoch": 0.8434735789554649, - "learning_rate": 0.00017773264523965925, - "loss": 1.3926, - "step": 10966 - }, - { - "epoch": 0.8435504961156834, - "learning_rate": 0.00017756154216482783, - "loss": 1.0529, - "step": 10967 - }, - { - "epoch": 0.8436274132759018, - "learning_rate": 0.00017739051630855663, - "loss": 1.1103, - "step": 10968 - }, - { - "epoch": 0.8437043304361203, - "learning_rate": 0.0001772195676808318, - "loss": 1.0959, - "step": 10969 - }, - { - "epoch": 0.8437812475963388, - "learning_rate": 0.0001770486962916344, - "loss": 0.7226, - "step": 10970 - }, - { - "epoch": 0.8438581647565572, - "learning_rate": 0.00017687790215094274, - "loss": 0.8492, - "step": 10971 - }, - { - "epoch": 0.8439350819167757, - "learning_rate": 0.00017670718526872887, - "loss": 1.2645, - "step": 10972 - }, - { - "epoch": 0.8440119990769941, - "learning_rate": 0.0001765365456549623, - "loss": 1.0794, - "step": 10973 - }, - { - "epoch": 0.8440889162372125, - "learning_rate": 0.00017636598331960556, - "loss": 1.0627, - "step": 10974 - }, - { - "epoch": 0.844165833397431, - "learning_rate": 0.00017619549827261837, - "loss": 1.168, - "step": 10975 - }, - { - "epoch": 0.8442427505576494, - "learning_rate": 0.0001760250905239561, - "loss": 0.9741, - "step": 10976 - }, - { - "epoch": 0.8443196677178678, - "learning_rate": 0.00017585476008356843, - "loss": 1.2224, - "step": 10977 - }, - { - "epoch": 0.8443965848780863, - "learning_rate": 0.00017568450696140141, - "loss": 1.1392, - "step": 10978 - }, - { - "epoch": 0.8444735020383047, - "learning_rate": 0.00017551433116739612, - "loss": 1.1975, - "step": 10979 - }, - { - "epoch": 0.8445504191985231, - "learning_rate": 0.00017534423271148942, - "loss": 0.9705, - "step": 10980 - }, - { - "epoch": 0.8446273363587417, - "learning_rate": 0.00017517421160361319, - "loss": 0.9703, - "step": 10981 - }, - { - "epoch": 0.8447042535189601, - "learning_rate": 0.000175004267853696, - "loss": 1.2962, - "step": 10982 - }, - { - "epoch": 0.8447811706791786, - "learning_rate": 0.0001748344014716599, - "loss": 1.2797, - "step": 10983 - }, - { - "epoch": 0.844858087839397, - "learning_rate": 0.00017466461246742448, - "loss": 1.0698, - "step": 10984 - }, - { - "epoch": 0.8449350049996154, - "learning_rate": 0.00017449490085090364, - "loss": 1.005, - "step": 10985 - }, - { - "epoch": 0.8450119221598339, - "learning_rate": 0.00017432526663200694, - "loss": 0.8879, - "step": 10986 - }, - { - "epoch": 0.8450888393200523, - "learning_rate": 0.00017415570982063944, - "loss": 1.1531, - "step": 10987 - }, - { - "epoch": 0.8451657564802707, - "learning_rate": 0.00017398623042670175, - "loss": 1.3468, - "step": 10988 - }, - { - "epoch": 0.8452426736404892, - "learning_rate": 0.0001738168284600898, - "loss": 1.2146, - "step": 10989 - }, - { - "epoch": 0.8453195908007076, - "learning_rate": 0.00017364750393069562, - "loss": 1.4544, - "step": 10990 - }, - { - "epoch": 0.8453965079609261, - "learning_rate": 0.000173478256848406, - "loss": 0.9959, - "step": 10991 - }, - { - "epoch": 0.8454734251211445, - "learning_rate": 0.0001733090872231034, - "loss": 1.2112, - "step": 10992 - }, - { - "epoch": 0.8455503422813629, - "learning_rate": 0.0001731399950646657, - "loss": 0.7243, - "step": 10993 - }, - { - "epoch": 0.8456272594415815, - "learning_rate": 0.00017297098038296637, - "loss": 1.3572, - "step": 10994 - }, - { - "epoch": 0.8457041766017999, - "learning_rate": 0.00017280204318787518, - "loss": 1.1198, - "step": 10995 - }, - { - "epoch": 0.8457810937620183, - "learning_rate": 0.0001726331834892554, - "loss": 1.2802, - "step": 10996 - }, - { - "epoch": 0.8458580109222368, - "learning_rate": 0.0001724644012969671, - "loss": 0.6415, - "step": 10997 - }, - { - "epoch": 0.8459349280824552, - "learning_rate": 0.00017229569662086631, - "loss": 1.3411, - "step": 10998 - }, - { - "epoch": 0.8460118452426736, - "learning_rate": 0.00017212706947080343, - "loss": 1.438, - "step": 10999 - }, - { - "epoch": 0.8460887624028921, - "learning_rate": 0.00017195851985662492, - "loss": 1.42, - "step": 11000 - }, - { - "epoch": 0.8461656795631105, - "learning_rate": 0.00017179004778817243, - "loss": 1.189, - "step": 11001 - }, - { - "epoch": 0.846242596723329, - "learning_rate": 0.00017162165327528307, - "loss": 1.1512, - "step": 11002 - }, - { - "epoch": 0.8463195138835474, - "learning_rate": 0.00017145333632778999, - "loss": 1.4032, - "step": 11003 - }, - { - "epoch": 0.8463964310437658, - "learning_rate": 0.00017128509695552114, - "loss": 1.7623, - "step": 11004 - }, - { - "epoch": 0.8464733482039843, - "learning_rate": 0.00017111693516830019, - "loss": 0.8291, - "step": 11005 - }, - { - "epoch": 0.8465502653642027, - "learning_rate": 0.00017094885097594642, - "loss": 1.1812, - "step": 11006 - }, - { - "epoch": 0.8466271825244212, - "learning_rate": 0.00017078084438827435, - "loss": 1.0123, - "step": 11007 - }, - { - "epoch": 0.8467040996846397, - "learning_rate": 0.0001706129154150941, - "loss": 1.2354, - "step": 11008 - }, - { - "epoch": 0.8467810168448581, - "learning_rate": 0.00017044506406621102, - "loss": 1.1638, - "step": 11009 - }, - { - "epoch": 0.8468579340050766, - "learning_rate": 0.00017027729035142607, - "loss": 1.0892, - "step": 11010 - }, - { - "epoch": 0.846934851165295, - "learning_rate": 0.0001701095942805363, - "loss": 1.315, - "step": 11011 - }, - { - "epoch": 0.8470117683255134, - "learning_rate": 0.00016994197586333315, - "loss": 1.2207, - "step": 11012 - }, - { - "epoch": 0.8470886854857319, - "learning_rate": 0.00016977443510960433, - "loss": 1.2968, - "step": 11013 - }, - { - "epoch": 0.8471656026459503, - "learning_rate": 0.0001696069720291325, - "loss": 0.9474, - "step": 11014 - }, - { - "epoch": 0.8472425198061687, - "learning_rate": 0.000169439586631696, - "loss": 1.1572, - "step": 11015 - }, - { - "epoch": 0.8473194369663872, - "learning_rate": 0.00016927227892706852, - "loss": 1.0062, - "step": 11016 - }, - { - "epoch": 0.8473963541266056, - "learning_rate": 0.00016910504892501992, - "loss": 1.2504, - "step": 11017 - }, - { - "epoch": 0.847473271286824, - "learning_rate": 0.00016893789663531471, - "loss": 1.1055, - "step": 11018 - }, - { - "epoch": 0.8475501884470426, - "learning_rate": 0.00016877082206771227, - "loss": 1.0222, - "step": 11019 - }, - { - "epoch": 0.847627105607261, - "learning_rate": 0.00016860382523196926, - "loss": 1.2512, - "step": 11020 - }, - { - "epoch": 0.8477040227674795, - "learning_rate": 0.00016843690613783642, - "loss": 0.9854, - "step": 11021 - }, - { - "epoch": 0.8477809399276979, - "learning_rate": 0.00016827006479506042, - "loss": 1.1565, - "step": 11022 - }, - { - "epoch": 0.8478578570879163, - "learning_rate": 0.00016810330121338313, - "loss": 1.1712, - "step": 11023 - }, - { - "epoch": 0.8479347742481348, - "learning_rate": 0.00016793661540254195, - "loss": 1.1295, - "step": 11024 - }, - { - "epoch": 0.8480116914083532, - "learning_rate": 0.00016777000737227028, - "loss": 0.8593, - "step": 11025 - }, - { - "epoch": 0.8480886085685716, - "learning_rate": 0.0001676034771322963, - "loss": 1.0704, - "step": 11026 - }, - { - "epoch": 0.8481655257287901, - "learning_rate": 0.0001674370246923439, - "loss": 1.2174, - "step": 11027 - }, - { - "epoch": 0.8482424428890085, - "learning_rate": 0.0001672706500621325, - "loss": 1.0962, - "step": 11028 - }, - { - "epoch": 0.848319360049227, - "learning_rate": 0.0001671043532513763, - "loss": 0.9685, - "step": 11029 - }, - { - "epoch": 0.8483962772094454, - "learning_rate": 0.0001669381342697864, - "loss": 1.2179, - "step": 11030 - }, - { - "epoch": 0.8484731943696638, - "learning_rate": 0.00016677199312706848, - "loss": 1.2541, - "step": 11031 - }, - { - "epoch": 0.8485501115298824, - "learning_rate": 0.00016660592983292266, - "loss": 1.2204, - "step": 11032 - }, - { - "epoch": 0.8486270286901008, - "learning_rate": 0.0001664399443970465, - "loss": 1.0257, - "step": 11033 - }, - { - "epoch": 0.8487039458503192, - "learning_rate": 0.00016627403682913179, - "loss": 1.3559, - "step": 11034 - }, - { - "epoch": 0.8487808630105377, - "learning_rate": 0.00016610820713886605, - "loss": 0.806, - "step": 11035 - }, - { - "epoch": 0.8488577801707561, - "learning_rate": 0.00016594245533593238, - "loss": 0.9572, - "step": 11036 - }, - { - "epoch": 0.8489346973309745, - "learning_rate": 0.0001657767814300089, - "loss": 0.7416, - "step": 11037 - }, - { - "epoch": 0.849011614491193, - "learning_rate": 0.00016561118543076937, - "loss": 1.2735, - "step": 11038 - }, - { - "epoch": 0.8490885316514114, - "learning_rate": 0.00016544566734788386, - "loss": 0.7056, - "step": 11039 - }, - { - "epoch": 0.8491654488116299, - "learning_rate": 0.00016528022719101648, - "loss": 1.0106, - "step": 11040 - }, - { - "epoch": 0.8492423659718483, - "learning_rate": 0.0001651148649698277, - "loss": 0.8831, - "step": 11041 - }, - { - "epoch": 0.8493192831320667, - "learning_rate": 0.00016494958069397309, - "loss": 1.3064, - "step": 11042 - }, - { - "epoch": 0.8493962002922852, - "learning_rate": 0.00016478437437310363, - "loss": 1.1316, - "step": 11043 - }, - { - "epoch": 0.8494731174525036, - "learning_rate": 0.00016461924601686656, - "loss": 1.179, - "step": 11044 - }, - { - "epoch": 0.849550034612722, - "learning_rate": 0.0001644541956349032, - "loss": 1.3716, - "step": 11045 - }, - { - "epoch": 0.8496269517729406, - "learning_rate": 0.00016428922323685101, - "loss": 0.8783, - "step": 11046 - }, - { - "epoch": 0.849703868933159, - "learning_rate": 0.0001641243288323433, - "loss": 1.3848, - "step": 11047 - }, - { - "epoch": 0.8497807860933775, - "learning_rate": 0.0001639595124310083, - "loss": 0.7817, - "step": 11048 - }, - { - "epoch": 0.8498577032535959, - "learning_rate": 0.00016379477404246973, - "loss": 0.8854, - "step": 11049 - }, - { - "epoch": 0.8499346204138143, - "learning_rate": 0.00016363011367634685, - "loss": 1.3142, - "step": 11050 - }, - { - "epoch": 0.8500115375740328, - "learning_rate": 0.00016346553134225395, - "loss": 1.1499, - "step": 11051 - }, - { - "epoch": 0.8500884547342512, - "learning_rate": 0.00016330102704980204, - "loss": 0.986, - "step": 11052 - }, - { - "epoch": 0.8501653718944696, - "learning_rate": 0.00016313660080859643, - "loss": 0.9656, - "step": 11053 - }, - { - "epoch": 0.8502422890546881, - "learning_rate": 0.00016297225262823745, - "loss": 1.2177, - "step": 11054 - }, - { - "epoch": 0.8503192062149065, - "learning_rate": 0.0001628079825183224, - "loss": 0.8681, - "step": 11055 - }, - { - "epoch": 0.8503961233751249, - "learning_rate": 0.00016264379048844263, - "loss": 1.1863, - "step": 11056 - }, - { - "epoch": 0.8504730405353434, - "learning_rate": 0.00016247967654818595, - "loss": 1.0493, - "step": 11057 - }, - { - "epoch": 0.8505499576955619, - "learning_rate": 0.00016231564070713473, - "loss": 1.2471, - "step": 11058 - }, - { - "epoch": 0.8506268748557804, - "learning_rate": 0.0001621516829748671, - "loss": 1.0571, - "step": 11059 - }, - { - "epoch": 0.8507037920159988, - "learning_rate": 0.00016198780336095726, - "loss": 1.2245, - "step": 11060 - }, - { - "epoch": 0.8507807091762172, - "learning_rate": 0.00016182400187497386, - "loss": 1.179, - "step": 11061 - }, - { - "epoch": 0.8508576263364357, - "learning_rate": 0.0001616602785264818, - "loss": 1.7001, - "step": 11062 - }, - { - "epoch": 0.8509345434966541, - "learning_rate": 0.0001614966333250409, - "loss": 0.9951, - "step": 11063 - }, - { - "epoch": 0.8510114606568725, - "learning_rate": 0.00016133306628020633, - "loss": 1.1903, - "step": 11064 - }, - { - "epoch": 0.851088377817091, - "learning_rate": 0.00016116957740152898, - "loss": 1.2345, - "step": 11065 - }, - { - "epoch": 0.8511652949773094, - "learning_rate": 0.0001610061666985559, - "loss": 1.3622, - "step": 11066 - }, - { - "epoch": 0.8512422121375279, - "learning_rate": 0.00016084283418082757, - "loss": 0.9611, - "step": 11067 - }, - { - "epoch": 0.8513191292977463, - "learning_rate": 0.00016067957985788222, - "loss": 1.3876, - "step": 11068 - }, - { - "epoch": 0.8513960464579647, - "learning_rate": 0.00016051640373925192, - "loss": 1.1933, - "step": 11069 - }, - { - "epoch": 0.8514729636181833, - "learning_rate": 0.00016035330583446467, - "loss": 0.8288, - "step": 11070 - }, - { - "epoch": 0.8515498807784017, - "learning_rate": 0.0001601902861530442, - "loss": 1.235, - "step": 11071 - }, - { - "epoch": 0.8516267979386201, - "learning_rate": 0.00016002734470450908, - "loss": 0.9444, - "step": 11072 - }, - { - "epoch": 0.8517037150988386, - "learning_rate": 0.00015986448149837368, - "loss": 1.292, - "step": 11073 - }, - { - "epoch": 0.851780632259057, - "learning_rate": 0.00015970169654414807, - "loss": 1.448, - "step": 11074 - }, - { - "epoch": 0.8518575494192755, - "learning_rate": 0.00015953898985133713, - "loss": 1.6343, - "step": 11075 - }, - { - "epoch": 0.8519344665794939, - "learning_rate": 0.0001593763614294416, - "loss": 0.9717, - "step": 11076 - }, - { - "epoch": 0.8520113837397123, - "learning_rate": 0.00015921381128795759, - "loss": 1.1789, - "step": 11077 - }, - { - "epoch": 0.8520883008999308, - "learning_rate": 0.00015905133943637612, - "loss": 1.2542, - "step": 11078 - }, - { - "epoch": 0.8521652180601492, - "learning_rate": 0.00015888894588418512, - "loss": 1.2433, - "step": 11079 - }, - { - "epoch": 0.8522421352203676, - "learning_rate": 0.00015872663064086585, - "loss": 1.2715, - "step": 11080 - }, - { - "epoch": 0.8523190523805861, - "learning_rate": 0.0001585643937158962, - "loss": 1.5103, - "step": 11081 - }, - { - "epoch": 0.8523959695408045, - "learning_rate": 0.00015840223511874997, - "loss": 1.3577, - "step": 11082 - }, - { - "epoch": 0.852472886701023, - "learning_rate": 0.00015824015485889521, - "loss": 1.3692, - "step": 11083 - }, - { - "epoch": 0.8525498038612415, - "learning_rate": 0.00015807815294579635, - "loss": 1.246, - "step": 11084 - }, - { - "epoch": 0.8526267210214599, - "learning_rate": 0.00015791622938891254, - "loss": 1.1344, - "step": 11085 - }, - { - "epoch": 0.8527036381816784, - "learning_rate": 0.0001577543841976985, - "loss": 1.0745, - "step": 11086 - }, - { - "epoch": 0.8527805553418968, - "learning_rate": 0.0001575926173816052, - "loss": 1.0883, - "step": 11087 - }, - { - "epoch": 0.8528574725021152, - "learning_rate": 0.0001574309289500782, - "loss": 0.9141, - "step": 11088 - }, - { - "epoch": 0.8529343896623337, - "learning_rate": 0.000157269318912558, - "loss": 1.255, - "step": 11089 - }, - { - "epoch": 0.8530113068225521, - "learning_rate": 0.00015710778727848174, - "loss": 1.2988, - "step": 11090 - }, - { - "epoch": 0.8530882239827705, - "learning_rate": 0.00015694633405728147, - "loss": 1.0386, - "step": 11091 - }, - { - "epoch": 0.853165141142989, - "learning_rate": 0.00015678495925838438, - "loss": 1.1427, - "step": 11092 - }, - { - "epoch": 0.8532420583032074, - "learning_rate": 0.00015662366289121338, - "loss": 1.1574, - "step": 11093 - }, - { - "epoch": 0.8533189754634259, - "learning_rate": 0.00015646244496518648, - "loss": 1.198, - "step": 11094 - }, - { - "epoch": 0.8533958926236443, - "learning_rate": 0.00015630130548971777, - "loss": 1.2668, - "step": 11095 - }, - { - "epoch": 0.8534728097838628, - "learning_rate": 0.00015614024447421627, - "loss": 1.2075, - "step": 11096 - }, - { - "epoch": 0.8535497269440813, - "learning_rate": 0.00015597926192808658, - "loss": 1.3358, - "step": 11097 - }, - { - "epoch": 0.8536266441042997, - "learning_rate": 0.0001558183578607284, - "loss": 1.3366, - "step": 11098 - }, - { - "epoch": 0.8537035612645181, - "learning_rate": 0.00015565753228153717, - "loss": 1.1686, - "step": 11099 - }, - { - "epoch": 0.8537804784247366, - "learning_rate": 0.00015549678519990323, - "loss": 1.1919, - "step": 11100 - }, - { - "epoch": 0.853857395584955, - "learning_rate": 0.00015533611662521408, - "loss": 0.9732, - "step": 11101 - }, - { - "epoch": 0.8539343127451734, - "learning_rate": 0.00015517552656684985, - "loss": 1.2336, - "step": 11102 - }, - { - "epoch": 0.8540112299053919, - "learning_rate": 0.00015501501503418808, - "loss": 1.2513, - "step": 11103 - }, - { - "epoch": 0.8540881470656103, - "learning_rate": 0.00015485458203660156, - "loss": 1.1273, - "step": 11104 - }, - { - "epoch": 0.8541650642258288, - "learning_rate": 0.00015469422758345786, - "loss": 1.333, - "step": 11105 - }, - { - "epoch": 0.8542419813860472, - "learning_rate": 0.00015453395168412026, - "loss": 0.917, - "step": 11106 - }, - { - "epoch": 0.8543188985462656, - "learning_rate": 0.0001543737543479475, - "loss": 1.2608, - "step": 11107 - }, - { - "epoch": 0.8543958157064842, - "learning_rate": 0.0001542136355842932, - "loss": 1.4238, - "step": 11108 - }, - { - "epoch": 0.8544727328667026, - "learning_rate": 0.00015405359540250757, - "loss": 0.9659, - "step": 11109 - }, - { - "epoch": 0.854549650026921, - "learning_rate": 0.0001538936338119355, - "loss": 1.1839, - "step": 11110 - }, - { - "epoch": 0.8546265671871395, - "learning_rate": 0.0001537337508219165, - "loss": 1.2577, - "step": 11111 - }, - { - "epoch": 0.8547034843473579, - "learning_rate": 0.00015357394644178706, - "loss": 0.7599, - "step": 11112 - }, - { - "epoch": 0.8547804015075764, - "learning_rate": 0.00015341422068087778, - "loss": 1.1779, - "step": 11113 - }, - { - "epoch": 0.8548573186677948, - "learning_rate": 0.00015325457354851618, - "loss": 1.4574, - "step": 11114 - }, - { - "epoch": 0.8549342358280132, - "learning_rate": 0.0001530950050540233, - "loss": 0.805, - "step": 11115 - }, - { - "epoch": 0.8550111529882317, - "learning_rate": 0.0001529355152067165, - "loss": 0.7373, - "step": 11116 - }, - { - "epoch": 0.8550880701484501, - "learning_rate": 0.0001527761040159092, - "loss": 1.3628, - "step": 11117 - }, - { - "epoch": 0.8551649873086685, - "learning_rate": 0.00015261677149090908, - "loss": 1.4228, - "step": 11118 - }, - { - "epoch": 0.855241904468887, - "learning_rate": 0.00015245751764101989, - "loss": 1.4497, - "step": 11119 - }, - { - "epoch": 0.8553188216291054, - "learning_rate": 0.0001522983424755407, - "loss": 1.2204, - "step": 11120 - }, - { - "epoch": 0.8553957387893238, - "learning_rate": 0.0001521392460037659, - "loss": 1.2439, - "step": 11121 - }, - { - "epoch": 0.8554726559495424, - "learning_rate": 0.00015198022823498486, - "loss": 1.0578, - "step": 11122 - }, - { - "epoch": 0.8555495731097608, - "learning_rate": 0.0001518212891784837, - "loss": 1.174, - "step": 11123 - }, - { - "epoch": 0.8556264902699793, - "learning_rate": 0.00015166242884354198, - "loss": 1.1494, - "step": 11124 - }, - { - "epoch": 0.8557034074301977, - "learning_rate": 0.00015150364723943643, - "loss": 1.0044, - "step": 11125 - }, - { - "epoch": 0.8557803245904161, - "learning_rate": 0.00015134494437543834, - "loss": 1.2293, - "step": 11126 - }, - { - "epoch": 0.8558572417506346, - "learning_rate": 0.00015118632026081457, - "loss": 1.3714, - "step": 11127 - }, - { - "epoch": 0.855934158910853, - "learning_rate": 0.00015102777490482727, - "loss": 1.324, - "step": 11128 - }, - { - "epoch": 0.8560110760710714, - "learning_rate": 0.00015086930831673402, - "loss": 1.3615, - "step": 11129 - }, - { - "epoch": 0.8560879932312899, - "learning_rate": 0.00015071092050578756, - "loss": 1.0112, - "step": 11130 - }, - { - "epoch": 0.8561649103915083, - "learning_rate": 0.00015055261148123682, - "loss": 1.5117, - "step": 11131 - }, - { - "epoch": 0.8562418275517268, - "learning_rate": 0.00015039438125232562, - "loss": 0.9133, - "step": 11132 - }, - { - "epoch": 0.8563187447119452, - "learning_rate": 0.00015023622982829305, - "loss": 0.9773, - "step": 11133 - }, - { - "epoch": 0.8563956618721636, - "learning_rate": 0.00015007815721837354, - "loss": 1.2601, - "step": 11134 - }, - { - "epoch": 0.8564725790323822, - "learning_rate": 0.0001499201634317972, - "loss": 1.1649, - "step": 11135 - }, - { - "epoch": 0.8565494961926006, - "learning_rate": 0.00014976224847779003, - "loss": 1.318, - "step": 11136 - }, - { - "epoch": 0.856626413352819, - "learning_rate": 0.00014960441236557193, - "loss": 1.1077, - "step": 11137 - }, - { - "epoch": 0.8567033305130375, - "learning_rate": 0.0001494466551043594, - "loss": 1.1102, - "step": 11138 - }, - { - "epoch": 0.8567802476732559, - "learning_rate": 0.00014928897670336456, - "loss": 1.279, - "step": 11139 - }, - { - "epoch": 0.8568571648334743, - "learning_rate": 0.00014913137717179387, - "loss": 1.0541, - "step": 11140 - }, - { - "epoch": 0.8569340819936928, - "learning_rate": 0.00014897385651884998, - "loss": 1.0891, - "step": 11141 - }, - { - "epoch": 0.8570109991539112, - "learning_rate": 0.00014881641475373054, - "loss": 1.2631, - "step": 11142 - }, - { - "epoch": 0.8570879163141297, - "learning_rate": 0.00014865905188562867, - "loss": 1.088, - "step": 11143 - }, - { - "epoch": 0.8571648334743481, - "learning_rate": 0.00014850176792373338, - "loss": 1.1013, - "step": 11144 - }, - { - "epoch": 0.8572417506345665, - "learning_rate": 0.00014834456287722848, - "loss": 1.132, - "step": 11145 - }, - { - "epoch": 0.857318667794785, - "learning_rate": 0.00014818743675529278, - "loss": 1.3541, - "step": 11146 - }, - { - "epoch": 0.8573955849550035, - "learning_rate": 0.00014803038956710162, - "loss": 1.2979, - "step": 11147 - }, - { - "epoch": 0.8574725021152219, - "learning_rate": 0.0001478734213218252, - "loss": 0.6047, - "step": 11148 - }, - { - "epoch": 0.8575494192754404, - "learning_rate": 0.00014771653202862877, - "loss": 1.199, - "step": 11149 - }, - { - "epoch": 0.8576263364356588, - "learning_rate": 0.00014755972169667325, - "loss": 1.1863, - "step": 11150 - }, - { - "epoch": 0.8577032535958773, - "learning_rate": 0.00014740299033511495, - "loss": 1.0097, - "step": 11151 - }, - { - "epoch": 0.8577801707560957, - "learning_rate": 0.00014724633795310587, - "loss": 1.2456, - "step": 11152 - }, - { - "epoch": 0.8578570879163141, - "learning_rate": 0.0001470897645597929, - "loss": 0.9478, - "step": 11153 - }, - { - "epoch": 0.8579340050765326, - "learning_rate": 0.00014693327016431868, - "loss": 1.3032, - "step": 11154 - }, - { - "epoch": 0.858010922236751, - "learning_rate": 0.00014677685477582075, - "loss": 1.2877, - "step": 11155 - }, - { - "epoch": 0.8580878393969694, - "learning_rate": 0.00014662051840343265, - "loss": 1.2861, - "step": 11156 - }, - { - "epoch": 0.8581647565571879, - "learning_rate": 0.00014646426105628275, - "loss": 1.3966, - "step": 11157 - }, - { - "epoch": 0.8582416737174063, - "learning_rate": 0.00014630808274349573, - "loss": 1.1975, - "step": 11158 - }, - { - "epoch": 0.8583185908776247, - "learning_rate": 0.0001461519834741903, - "loss": 1.2797, - "step": 11159 - }, - { - "epoch": 0.8583955080378433, - "learning_rate": 0.00014599596325748116, - "loss": 0.9818, - "step": 11160 - }, - { - "epoch": 0.8584724251980617, - "learning_rate": 0.00014584002210247922, - "loss": 1.2435, - "step": 11161 - }, - { - "epoch": 0.8585493423582802, - "learning_rate": 0.00014568416001828964, - "loss": 1.1772, - "step": 11162 - }, - { - "epoch": 0.8586262595184986, - "learning_rate": 0.0001455283770140134, - "loss": 0.9049, - "step": 11163 - }, - { - "epoch": 0.858703176678717, - "learning_rate": 0.00014537267309874664, - "loss": 1.3119, - "step": 11164 - }, - { - "epoch": 0.8587800938389355, - "learning_rate": 0.0001452170482815811, - "loss": 0.9825, - "step": 11165 - }, - { - "epoch": 0.8588570109991539, - "learning_rate": 0.00014506150257160438, - "loss": 1.3915, - "step": 11166 - }, - { - "epoch": 0.8589339281593723, - "learning_rate": 0.00014490603597789854, - "loss": 1.8288, - "step": 11167 - }, - { - "epoch": 0.8590108453195908, - "learning_rate": 0.0001447506485095416, - "loss": 1.139, - "step": 11168 - }, - { - "epoch": 0.8590877624798092, - "learning_rate": 0.00014459534017560654, - "loss": 1.1336, - "step": 11169 - }, - { - "epoch": 0.8591646796400277, - "learning_rate": 0.00014444011098516192, - "loss": 1.4344, - "step": 11170 - }, - { - "epoch": 0.8592415968002461, - "learning_rate": 0.00014428496094727228, - "loss": 1.0494, - "step": 11171 - }, - { - "epoch": 0.8593185139604645, - "learning_rate": 0.00014412989007099692, - "loss": 1.2346, - "step": 11172 - }, - { - "epoch": 0.8593954311206831, - "learning_rate": 0.00014397489836538973, - "loss": 1.1324, - "step": 11173 - }, - { - "epoch": 0.8594723482809015, - "learning_rate": 0.00014381998583950186, - "loss": 1.3367, - "step": 11174 - }, - { - "epoch": 0.8595492654411199, - "learning_rate": 0.00014366515250237817, - "loss": 1.2786, - "step": 11175 - }, - { - "epoch": 0.8596261826013384, - "learning_rate": 0.00014351039836305995, - "loss": 0.9845, - "step": 11176 - }, - { - "epoch": 0.8597030997615568, - "learning_rate": 0.00014335572343058316, - "loss": 1.1422, - "step": 11177 - }, - { - "epoch": 0.8597800169217753, - "learning_rate": 0.00014320112771397926, - "loss": 1.3816, - "step": 11178 - }, - { - "epoch": 0.8598569340819937, - "learning_rate": 0.00014304661122227597, - "loss": 1.5254, - "step": 11179 - }, - { - "epoch": 0.8599338512422121, - "learning_rate": 0.0001428921739644951, - "loss": 1.329, - "step": 11180 - }, - { - "epoch": 0.8600107684024306, - "learning_rate": 0.00014273781594965445, - "loss": 0.9533, - "step": 11181 - }, - { - "epoch": 0.860087685562649, - "learning_rate": 0.00014258353718676748, - "loss": 0.9062, - "step": 11182 - }, - { - "epoch": 0.8601646027228674, - "learning_rate": 0.0001424293376848423, - "loss": 0.987, - "step": 11183 - }, - { - "epoch": 0.860241519883086, - "learning_rate": 0.00014227521745288253, - "loss": 1.0842, - "step": 11184 - }, - { - "epoch": 0.8603184370433044, - "learning_rate": 0.00014212117649988848, - "loss": 1.2322, - "step": 11185 - }, - { - "epoch": 0.8603953542035228, - "learning_rate": 0.0001419672148348538, - "loss": 0.941, - "step": 11186 - }, - { - "epoch": 0.8604722713637413, - "learning_rate": 0.00014181333246676846, - "loss": 1.1537, - "step": 11187 - }, - { - "epoch": 0.8605491885239597, - "learning_rate": 0.0001416595294046183, - "loss": 1.3088, - "step": 11188 - }, - { - "epoch": 0.8606261056841782, - "learning_rate": 0.0001415058056573839, - "loss": 1.2471, - "step": 11189 - }, - { - "epoch": 0.8607030228443966, - "learning_rate": 0.0001413521612340415, - "loss": 1.3289, - "step": 11190 - }, - { - "epoch": 0.860779940004615, - "learning_rate": 0.00014119859614356218, - "loss": 1.1891, - "step": 11191 - }, - { - "epoch": 0.8608568571648335, - "learning_rate": 0.00014104511039491263, - "loss": 1.2171, - "step": 11192 - }, - { - "epoch": 0.8609337743250519, - "learning_rate": 0.00014089170399705553, - "loss": 0.9272, - "step": 11193 - }, - { - "epoch": 0.8610106914852703, - "learning_rate": 0.0001407383769589487, - "loss": 1.0157, - "step": 11194 - }, - { - "epoch": 0.8610876086454888, - "learning_rate": 0.00014058512928954414, - "loss": 1.2028, - "step": 11195 - }, - { - "epoch": 0.8611645258057072, - "learning_rate": 0.0001404319609977907, - "loss": 1.1606, - "step": 11196 - }, - { - "epoch": 0.8612414429659258, - "learning_rate": 0.00014027887209263192, - "loss": 0.9443, - "step": 11197 - }, - { - "epoch": 0.8613183601261442, - "learning_rate": 0.00014012586258300725, - "loss": 0.9861, - "step": 11198 - }, - { - "epoch": 0.8613952772863626, - "learning_rate": 0.00013997293247785042, - "loss": 0.9838, - "step": 11199 - }, - { - "epoch": 0.8614721944465811, - "learning_rate": 0.0001398200817860913, - "loss": 1.4646, - "step": 11200 - }, - { - "epoch": 0.8615491116067995, - "learning_rate": 0.0001396673105166552, - "loss": 0.9187, - "step": 11201 - }, - { - "epoch": 0.8616260287670179, - "learning_rate": 0.00013951461867846271, - "loss": 1.0571, - "step": 11202 - }, - { - "epoch": 0.8617029459272364, - "learning_rate": 0.0001393620062804295, - "loss": 0.979, - "step": 11203 - }, - { - "epoch": 0.8617798630874548, - "learning_rate": 0.00013920947333146677, - "loss": 1.1068, - "step": 11204 - }, - { - "epoch": 0.8618567802476732, - "learning_rate": 0.00013905701984048109, - "loss": 1.1729, - "step": 11205 - }, - { - "epoch": 0.8619336974078917, - "learning_rate": 0.00013890464581637395, - "loss": 1.2125, - "step": 11206 - }, - { - "epoch": 0.8620106145681101, - "learning_rate": 0.0001387523512680438, - "loss": 1.3013, - "step": 11207 - }, - { - "epoch": 0.8620875317283286, - "learning_rate": 0.00013860013620438182, - "loss": 1.1944, - "step": 11208 - }, - { - "epoch": 0.862164448888547, - "learning_rate": 0.00013844800063427692, - "loss": 1.2554, - "step": 11209 - }, - { - "epoch": 0.8622413660487654, - "learning_rate": 0.00013829594456661232, - "loss": 1.2009, - "step": 11210 - }, - { - "epoch": 0.862318283208984, - "learning_rate": 0.00013814396801026675, - "loss": 1.027, - "step": 11211 - }, - { - "epoch": 0.8623952003692024, - "learning_rate": 0.0001379920709741141, - "loss": 0.7939, - "step": 11212 - }, - { - "epoch": 0.8624721175294208, - "learning_rate": 0.00013784025346702379, - "loss": 1.0146, - "step": 11213 - }, - { - "epoch": 0.8625490346896393, - "learning_rate": 0.00013768851549786056, - "loss": 1.5782, - "step": 11214 - }, - { - "epoch": 0.8626259518498577, - "learning_rate": 0.0001375368570754848, - "loss": 1.0979, - "step": 11215 - }, - { - "epoch": 0.8627028690100762, - "learning_rate": 0.00013738527820875198, - "loss": 1.163, - "step": 11216 - }, - { - "epoch": 0.8627797861702946, - "learning_rate": 0.00013723377890651278, - "loss": 0.9345, - "step": 11217 - }, - { - "epoch": 0.862856703330513, - "learning_rate": 0.00013708235917761353, - "loss": 1.1542, - "step": 11218 - }, - { - "epoch": 0.8629336204907315, - "learning_rate": 0.00013693101903089545, - "loss": 1.1283, - "step": 11219 - }, - { - "epoch": 0.8630105376509499, - "learning_rate": 0.0001367797584751963, - "loss": 0.9504, - "step": 11220 - }, - { - "epoch": 0.8630874548111683, - "learning_rate": 0.00013662857751934755, - "loss": 0.9247, - "step": 11221 - }, - { - "epoch": 0.8631643719713868, - "learning_rate": 0.0001364774761721766, - "loss": 1.3241, - "step": 11222 - }, - { - "epoch": 0.8632412891316052, - "learning_rate": 0.00013632645444250723, - "loss": 1.2632, - "step": 11223 - }, - { - "epoch": 0.8633182062918237, - "learning_rate": 0.00013617551233915725, - "loss": 0.9872, - "step": 11224 - }, - { - "epoch": 0.8633951234520422, - "learning_rate": 0.0001360246498709406, - "loss": 1.1072, - "step": 11225 - }, - { - "epoch": 0.8634720406122606, - "learning_rate": 0.00013587386704666605, - "loss": 1.4621, - "step": 11226 - }, - { - "epoch": 0.8635489577724791, - "learning_rate": 0.00013572316387513772, - "loss": 1.1754, - "step": 11227 - }, - { - "epoch": 0.8636258749326975, - "learning_rate": 0.00013557254036515609, - "loss": 1.1917, - "step": 11228 - }, - { - "epoch": 0.8637027920929159, - "learning_rate": 0.00013542199652551596, - "loss": 1.0406, - "step": 11229 - }, - { - "epoch": 0.8637797092531344, - "learning_rate": 0.0001352715323650071, - "loss": 1.1595, - "step": 11230 - }, - { - "epoch": 0.8638566264133528, - "learning_rate": 0.00013512114789241604, - "loss": 1.4831, - "step": 11231 - }, - { - "epoch": 0.8639335435735712, - "learning_rate": 0.00013497084311652353, - "loss": 1.4832, - "step": 11232 - }, - { - "epoch": 0.8640104607337897, - "learning_rate": 0.00013482061804610622, - "loss": 1.3306, - "step": 11233 - }, - { - "epoch": 0.8640873778940081, - "learning_rate": 0.00013467047268993565, - "loss": 1.2282, - "step": 11234 - }, - { - "epoch": 0.8641642950542266, - "learning_rate": 0.00013452040705677875, - "loss": 1.1446, - "step": 11235 - }, - { - "epoch": 0.864241212214445, - "learning_rate": 0.0001343704211553989, - "loss": 1.041, - "step": 11236 - }, - { - "epoch": 0.8643181293746635, - "learning_rate": 0.00013422051499455319, - "loss": 1.1237, - "step": 11237 - }, - { - "epoch": 0.864395046534882, - "learning_rate": 0.00013407068858299498, - "loss": 0.8111, - "step": 11238 - }, - { - "epoch": 0.8644719636951004, - "learning_rate": 0.0001339209419294728, - "loss": 1.0908, - "step": 11239 - }, - { - "epoch": 0.8645488808553188, - "learning_rate": 0.00013377127504273062, - "loss": 1.1345, - "step": 11240 - }, - { - "epoch": 0.8646257980155373, - "learning_rate": 0.00013362168793150714, - "loss": 1.269, - "step": 11241 - }, - { - "epoch": 0.8647027151757557, - "learning_rate": 0.00013347218060453787, - "loss": 1.4384, - "step": 11242 - }, - { - "epoch": 0.8647796323359741, - "learning_rate": 0.00013332275307055197, - "loss": 1.1863, - "step": 11243 - }, - { - "epoch": 0.8648565494961926, - "learning_rate": 0.0001331734053382745, - "loss": 1.0937, - "step": 11244 - }, - { - "epoch": 0.864933466656411, - "learning_rate": 0.0001330241374164266, - "loss": 1.4534, - "step": 11245 - }, - { - "epoch": 0.8650103838166295, - "learning_rate": 0.00013287494931372401, - "loss": 1.105, - "step": 11246 - }, - { - "epoch": 0.8650873009768479, - "learning_rate": 0.00013272584103887774, - "loss": 1.1691, - "step": 11247 - }, - { - "epoch": 0.8651642181370663, - "learning_rate": 0.00013257681260059483, - "loss": 1.1537, - "step": 11248 - }, - { - "epoch": 0.8652411352972849, - "learning_rate": 0.00013242786400757651, - "loss": 1.097, - "step": 11249 - }, - { - "epoch": 0.8653180524575033, - "learning_rate": 0.00013227899526852062, - "loss": 0.9412, - "step": 11250 - }, - { - "epoch": 0.8653949696177217, - "learning_rate": 0.00013213020639212003, - "loss": 1.1851, - "step": 11251 - }, - { - "epoch": 0.8654718867779402, - "learning_rate": 0.00013198149738706167, - "loss": 1.1797, - "step": 11252 - }, - { - "epoch": 0.8655488039381586, - "learning_rate": 0.00013183286826202973, - "loss": 1.1292, - "step": 11253 - }, - { - "epoch": 0.8656257210983771, - "learning_rate": 0.0001316843190257021, - "loss": 1.3159, - "step": 11254 - }, - { - "epoch": 0.8657026382585955, - "learning_rate": 0.0001315358496867537, - "loss": 0.8295, - "step": 11255 - }, - { - "epoch": 0.8657795554188139, - "learning_rate": 0.0001313874602538529, - "loss": 1.0835, - "step": 11256 - }, - { - "epoch": 0.8658564725790324, - "learning_rate": 0.00013123915073566427, - "loss": 1.1864, - "step": 11257 - }, - { - "epoch": 0.8659333897392508, - "learning_rate": 0.0001310909211408486, - "loss": 1.0999, - "step": 11258 - }, - { - "epoch": 0.8660103068994692, - "learning_rate": 0.00013094277147806055, - "loss": 1.0201, - "step": 11259 - }, - { - "epoch": 0.8660872240596877, - "learning_rate": 0.00013079470175595092, - "loss": 1.3484, - "step": 11260 - }, - { - "epoch": 0.8661641412199061, - "learning_rate": 0.00013064671198316564, - "loss": 1.1714, - "step": 11261 - }, - { - "epoch": 0.8662410583801246, - "learning_rate": 0.00013049880216834593, - "loss": 1.1316, - "step": 11262 - }, - { - "epoch": 0.8663179755403431, - "learning_rate": 0.00013035097232012805, - "loss": 1.0454, - "step": 11263 - }, - { - "epoch": 0.8663948927005615, - "learning_rate": 0.00013020322244714495, - "loss": 1.5866, - "step": 11264 - }, - { - "epoch": 0.86647180986078, - "learning_rate": 0.0001300555525580227, - "loss": 0.8978, - "step": 11265 - }, - { - "epoch": 0.8665487270209984, - "learning_rate": 0.00012990796266138476, - "loss": 1.2143, - "step": 11266 - }, - { - "epoch": 0.8666256441812168, - "learning_rate": 0.00012976045276584874, - "loss": 1.3761, - "step": 11267 - }, - { - "epoch": 0.8667025613414353, - "learning_rate": 0.00012961302288002792, - "loss": 0.7814, - "step": 11268 - }, - { - "epoch": 0.8667794785016537, - "learning_rate": 0.00012946567301253087, - "loss": 1.6697, - "step": 11269 - }, - { - "epoch": 0.8668563956618721, - "learning_rate": 0.00012931840317196143, - "loss": 0.7685, - "step": 11270 - }, - { - "epoch": 0.8669333128220906, - "learning_rate": 0.00012917121336691883, - "loss": 0.9561, - "step": 11271 - }, - { - "epoch": 0.867010229982309, - "learning_rate": 0.00012902410360599777, - "loss": 1.2734, - "step": 11272 - }, - { - "epoch": 0.8670871471425275, - "learning_rate": 0.0001288770738977883, - "loss": 1.241, - "step": 11273 - }, - { - "epoch": 0.867164064302746, - "learning_rate": 0.00012873012425087526, - "loss": 0.8868, - "step": 11274 - }, - { - "epoch": 0.8672409814629644, - "learning_rate": 0.00012858325467383946, - "loss": 1.0719, - "step": 11275 - }, - { - "epoch": 0.8673178986231829, - "learning_rate": 0.00012843646517525636, - "loss": 1.1167, - "step": 11276 - }, - { - "epoch": 0.8673948157834013, - "learning_rate": 0.00012828975576369812, - "loss": 0.6641, - "step": 11277 - }, - { - "epoch": 0.8674717329436197, - "learning_rate": 0.00012814312644773019, - "loss": 0.9545, - "step": 11278 - }, - { - "epoch": 0.8675486501038382, - "learning_rate": 0.00012799657723591473, - "loss": 1.1492, - "step": 11279 - }, - { - "epoch": 0.8676255672640566, - "learning_rate": 0.0001278501081368091, - "loss": 1.3582, - "step": 11280 - }, - { - "epoch": 0.8677024844242751, - "learning_rate": 0.00012770371915896573, - "loss": 1.2481, - "step": 11281 - }, - { - "epoch": 0.8677794015844935, - "learning_rate": 0.00012755741031093232, - "loss": 1.2525, - "step": 11282 - }, - { - "epoch": 0.8678563187447119, - "learning_rate": 0.0001274111816012522, - "loss": 1.2596, - "step": 11283 - }, - { - "epoch": 0.8679332359049304, - "learning_rate": 0.0001272650330384632, - "loss": 0.8427, - "step": 11284 - }, - { - "epoch": 0.8680101530651488, - "learning_rate": 0.0001271189646311, - "loss": 1.0808, - "step": 11285 - }, - { - "epoch": 0.8680870702253672, - "learning_rate": 0.00012697297638769145, - "loss": 1.0008, - "step": 11286 - }, - { - "epoch": 0.8681639873855858, - "learning_rate": 0.00012682706831676106, - "loss": 1.4559, - "step": 11287 - }, - { - "epoch": 0.8682409045458042, - "learning_rate": 0.00012668124042682967, - "loss": 1.5305, - "step": 11288 - }, - { - "epoch": 0.8683178217060226, - "learning_rate": 0.0001265354927264118, - "loss": 1.0053, - "step": 11289 - }, - { - "epoch": 0.8683947388662411, - "learning_rate": 0.000126389825224018, - "loss": 1.183, - "step": 11290 - }, - { - "epoch": 0.8684716560264595, - "learning_rate": 0.00012624423792815376, - "loss": 1.0886, - "step": 11291 - }, - { - "epoch": 0.868548573186678, - "learning_rate": 0.00012609873084732, - "loss": 1.2597, - "step": 11292 - }, - { - "epoch": 0.8686254903468964, - "learning_rate": 0.00012595330399001336, - "loss": 0.8537, - "step": 11293 - }, - { - "epoch": 0.8687024075071148, - "learning_rate": 0.00012580795736472528, - "loss": 0.8841, - "step": 11294 - }, - { - "epoch": 0.8687793246673333, - "learning_rate": 0.00012566269097994277, - "loss": 1.1812, - "step": 11295 - }, - { - "epoch": 0.8688562418275517, - "learning_rate": 0.00012551750484414802, - "loss": 0.7635, - "step": 11296 - }, - { - "epoch": 0.8689331589877701, - "learning_rate": 0.00012537239896581864, - "loss": 0.9661, - "step": 11297 - }, - { - "epoch": 0.8690100761479886, - "learning_rate": 0.00012522737335342717, - "loss": 1.2354, - "step": 11298 - }, - { - "epoch": 0.869086993308207, - "learning_rate": 0.0001250824280154426, - "loss": 1.0257, - "step": 11299 - }, - { - "epoch": 0.8691639104684256, - "learning_rate": 0.0001249375629603276, - "loss": 1.1796, - "step": 11300 - }, - { - "epoch": 0.869240827628644, - "learning_rate": 0.0001247927781965415, - "loss": 1.1895, - "step": 11301 - }, - { - "epoch": 0.8693177447888624, - "learning_rate": 0.0001246480737325382, - "loss": 1.1967, - "step": 11302 - }, - { - "epoch": 0.8693946619490809, - "learning_rate": 0.0001245034495767675, - "loss": 1.1796, - "step": 11303 - }, - { - "epoch": 0.8694715791092993, - "learning_rate": 0.00012435890573767361, - "loss": 1.1002, - "step": 11304 - }, - { - "epoch": 0.8695484962695177, - "learning_rate": 0.0001242144422236971, - "loss": 1.2278, - "step": 11305 - }, - { - "epoch": 0.8696254134297362, - "learning_rate": 0.00012407005904327274, - "loss": 0.9246, - "step": 11306 - }, - { - "epoch": 0.8697023305899546, - "learning_rate": 0.00012392575620483192, - "loss": 0.9358, - "step": 11307 - }, - { - "epoch": 0.869779247750173, - "learning_rate": 0.0001237815337168001, - "loss": 1.2992, - "step": 11308 - }, - { - "epoch": 0.8698561649103915, - "learning_rate": 0.00012363739158759908, - "loss": 1.2913, - "step": 11309 - }, - { - "epoch": 0.8699330820706099, - "learning_rate": 0.00012349332982564503, - "loss": 1.1436, - "step": 11310 - }, - { - "epoch": 0.8700099992308284, - "learning_rate": 0.0001233493484393498, - "loss": 0.8496, - "step": 11311 - }, - { - "epoch": 0.8700869163910468, - "learning_rate": 0.0001232054474371213, - "loss": 1.3898, - "step": 11312 - }, - { - "epoch": 0.8701638335512653, - "learning_rate": 0.00012306162682736133, - "loss": 1.2717, - "step": 11313 - }, - { - "epoch": 0.8702407507114838, - "learning_rate": 0.00012291788661846764, - "loss": 1.4172, - "step": 11314 - }, - { - "epoch": 0.8703176678717022, - "learning_rate": 0.00012277422681883403, - "loss": 0.8232, - "step": 11315 - }, - { - "epoch": 0.8703945850319206, - "learning_rate": 0.00012263064743684866, - "loss": 1.4226, - "step": 11316 - }, - { - "epoch": 0.8704715021921391, - "learning_rate": 0.00012248714848089526, - "loss": 1.2452, - "step": 11317 - }, - { - "epoch": 0.8705484193523575, - "learning_rate": 0.00012234372995935283, - "loss": 1.2566, - "step": 11318 - }, - { - "epoch": 0.870625336512576, - "learning_rate": 0.0001222003918805955, - "loss": 0.9356, - "step": 11319 - }, - { - "epoch": 0.8707022536727944, - "learning_rate": 0.00012205713425299358, - "loss": 1.2432, - "step": 11320 - }, - { - "epoch": 0.8707791708330128, - "learning_rate": 0.00012191395708491154, - "loss": 1.3988, - "step": 11321 - }, - { - "epoch": 0.8708560879932313, - "learning_rate": 0.0001217708603847097, - "loss": 1.4929, - "step": 11322 - }, - { - "epoch": 0.8709330051534497, - "learning_rate": 0.00012162784416074369, - "loss": 1.0432, - "step": 11323 - }, - { - "epoch": 0.8710099223136681, - "learning_rate": 0.0001214849084213645, - "loss": 1.2808, - "step": 11324 - }, - { - "epoch": 0.8710868394738867, - "learning_rate": 0.00012134205317491798, - "loss": 1.1407, - "step": 11325 - }, - { - "epoch": 0.8711637566341051, - "learning_rate": 0.00012119927842974592, - "loss": 1.1389, - "step": 11326 - }, - { - "epoch": 0.8712406737943235, - "learning_rate": 0.00012105658419418486, - "loss": 1.197, - "step": 11327 - }, - { - "epoch": 0.871317590954542, - "learning_rate": 0.00012091397047656677, - "loss": 1.1079, - "step": 11328 - }, - { - "epoch": 0.8713945081147604, - "learning_rate": 0.00012077143728521933, - "loss": 1.0327, - "step": 11329 - }, - { - "epoch": 0.8714714252749789, - "learning_rate": 0.00012062898462846523, - "loss": 1.0879, - "step": 11330 - }, - { - "epoch": 0.8715483424351973, - "learning_rate": 0.00012048661251462212, - "loss": 0.8193, - "step": 11331 - }, - { - "epoch": 0.8716252595954157, - "learning_rate": 0.00012034432095200354, - "loss": 1.2071, - "step": 11332 - }, - { - "epoch": 0.8717021767556342, - "learning_rate": 0.00012020210994891751, - "loss": 1.6704, - "step": 11333 - }, - { - "epoch": 0.8717790939158526, - "learning_rate": 0.00012005997951366854, - "loss": 1.1841, - "step": 11334 - }, - { - "epoch": 0.871856011076071, - "learning_rate": 0.00011991792965455584, - "loss": 1.469, - "step": 11335 - }, - { - "epoch": 0.8719329282362895, - "learning_rate": 0.00011977596037987293, - "loss": 1.0647, - "step": 11336 - }, - { - "epoch": 0.8720098453965079, - "learning_rate": 0.00011963407169791052, - "loss": 1.1584, - "step": 11337 - }, - { - "epoch": 0.8720867625567265, - "learning_rate": 0.00011949226361695297, - "loss": 1.1649, - "step": 11338 - }, - { - "epoch": 0.8721636797169449, - "learning_rate": 0.00011935053614528118, - "loss": 1.0068, - "step": 11339 - }, - { - "epoch": 0.8722405968771633, - "learning_rate": 0.00011920888929117035, - "loss": 1.155, - "step": 11340 - }, - { - "epoch": 0.8723175140373818, - "learning_rate": 0.0001190673230628912, - "loss": 0.931, - "step": 11341 - }, - { - "epoch": 0.8723944311976002, - "learning_rate": 0.00011892583746871045, - "loss": 0.879, - "step": 11342 - }, - { - "epoch": 0.8724713483578186, - "learning_rate": 0.00011878443251688947, - "loss": 0.7196, - "step": 11343 - }, - { - "epoch": 0.8725482655180371, - "learning_rate": 0.00011864310821568486, - "loss": 1.265, - "step": 11344 - }, - { - "epoch": 0.8726251826782555, - "learning_rate": 0.00011850186457334882, - "loss": 1.2083, - "step": 11345 - }, - { - "epoch": 0.8727020998384739, - "learning_rate": 0.00011836070159812878, - "loss": 1.1793, - "step": 11346 - }, - { - "epoch": 0.8727790169986924, - "learning_rate": 0.0001182196192982668, - "loss": 1.3855, - "step": 11347 - }, - { - "epoch": 0.8728559341589108, - "learning_rate": 0.00011807861768200196, - "loss": 1.7267, - "step": 11348 - }, - { - "epoch": 0.8729328513191293, - "learning_rate": 0.00011793769675756632, - "loss": 1.0257, - "step": 11349 - }, - { - "epoch": 0.8730097684793477, - "learning_rate": 0.00011779685653318916, - "loss": 1.1233, - "step": 11350 - }, - { - "epoch": 0.8730866856395662, - "learning_rate": 0.00011765609701709407, - "loss": 0.7911, - "step": 11351 - }, - { - "epoch": 0.8731636027997847, - "learning_rate": 0.00011751541821750012, - "loss": 1.1124, - "step": 11352 - }, - { - "epoch": 0.8732405199600031, - "learning_rate": 0.00011737482014262157, - "loss": 0.952, - "step": 11353 - }, - { - "epoch": 0.8733174371202215, - "learning_rate": 0.00011723430280066838, - "loss": 1.2934, - "step": 11354 - }, - { - "epoch": 0.87339435428044, - "learning_rate": 0.00011709386619984496, - "loss": 1.0059, - "step": 11355 - }, - { - "epoch": 0.8734712714406584, - "learning_rate": 0.00011695351034835227, - "loss": 1.4836, - "step": 11356 - }, - { - "epoch": 0.8735481886008769, - "learning_rate": 0.00011681323525438542, - "loss": 1.098, - "step": 11357 - }, - { - "epoch": 0.8736251057610953, - "learning_rate": 0.00011667304092613534, - "loss": 0.8327, - "step": 11358 - }, - { - "epoch": 0.8737020229213137, - "learning_rate": 0.000116532927371788, - "loss": 0.9604, - "step": 11359 - }, - { - "epoch": 0.8737789400815322, - "learning_rate": 0.00011639289459952467, - "loss": 1.1297, - "step": 11360 - }, - { - "epoch": 0.8738558572417506, - "learning_rate": 0.00011625294261752268, - "loss": 1.4265, - "step": 11361 - }, - { - "epoch": 0.873932774401969, - "learning_rate": 0.0001161130714339531, - "loss": 1.3408, - "step": 11362 - }, - { - "epoch": 0.8740096915621876, - "learning_rate": 0.00011597328105698329, - "loss": 1.0941, - "step": 11363 - }, - { - "epoch": 0.874086608722406, - "learning_rate": 0.00011583357149477636, - "loss": 0.908, - "step": 11364 - }, - { - "epoch": 0.8741635258826244, - "learning_rate": 0.00011569394275548978, - "loss": 1.6905, - "step": 11365 - }, - { - "epoch": 0.8742404430428429, - "learning_rate": 0.00011555439484727654, - "loss": 0.9637, - "step": 11366 - }, - { - "epoch": 0.8743173602030613, - "learning_rate": 0.00011541492777828493, - "loss": 1.2012, - "step": 11367 - }, - { - "epoch": 0.8743942773632798, - "learning_rate": 0.00011527554155665848, - "loss": 0.9998, - "step": 11368 - }, - { - "epoch": 0.8744711945234982, - "learning_rate": 0.00011513623619053648, - "loss": 1.4104, - "step": 11369 - }, - { - "epoch": 0.8745481116837166, - "learning_rate": 0.00011499701168805326, - "loss": 1.2468, - "step": 11370 - }, - { - "epoch": 0.8746250288439351, - "learning_rate": 0.00011485786805733733, - "loss": 0.9052, - "step": 11371 - }, - { - "epoch": 0.8747019460041535, - "learning_rate": 0.00011471880530651435, - "loss": 1.192, - "step": 11372 - }, - { - "epoch": 0.8747788631643719, - "learning_rate": 0.00011457982344370399, - "loss": 1.2228, - "step": 11373 - }, - { - "epoch": 0.8748557803245904, - "learning_rate": 0.00011444092247702159, - "loss": 1.0131, - "step": 11374 - }, - { - "epoch": 0.8749326974848088, - "learning_rate": 0.00011430210241457784, - "loss": 1.2772, - "step": 11375 - }, - { - "epoch": 0.8750096146450274, - "learning_rate": 0.00011416336326447807, - "loss": 1.3133, - "step": 11376 - }, - { - "epoch": 0.8750865318052458, - "learning_rate": 0.00011402470503482415, - "loss": 1.3592, - "step": 11377 - }, - { - "epoch": 0.8751634489654642, - "learning_rate": 0.0001138861277337121, - "loss": 1.0208, - "step": 11378 - }, - { - "epoch": 0.8752403661256827, - "learning_rate": 0.00011374763136923377, - "loss": 1.2365, - "step": 11379 - }, - { - "epoch": 0.8753172832859011, - "learning_rate": 0.00011360921594947571, - "loss": 1.454, - "step": 11380 - }, - { - "epoch": 0.8753942004461195, - "learning_rate": 0.0001134708814825206, - "loss": 1.2529, - "step": 11381 - }, - { - "epoch": 0.875471117606338, - "learning_rate": 0.00011333262797644533, - "loss": 1.3665, - "step": 11382 - }, - { - "epoch": 0.8755480347665564, - "learning_rate": 0.00011319445543932361, - "loss": 1.1139, - "step": 11383 - }, - { - "epoch": 0.8756249519267748, - "learning_rate": 0.00011305636387922263, - "loss": 1.2085, - "step": 11384 - }, - { - "epoch": 0.8757018690869933, - "learning_rate": 0.00011291835330420596, - "loss": 1.2508, - "step": 11385 - }, - { - "epoch": 0.8757787862472117, - "learning_rate": 0.00011278042372233233, - "loss": 1.2565, - "step": 11386 - }, - { - "epoch": 0.8758557034074302, - "learning_rate": 0.00011264257514165543, - "loss": 1.2357, - "step": 11387 - }, - { - "epoch": 0.8759326205676486, - "learning_rate": 0.00011250480757022452, - "loss": 1.2803, - "step": 11388 - }, - { - "epoch": 0.876009537727867, - "learning_rate": 0.00011236712101608382, - "loss": 1.3607, - "step": 11389 - }, - { - "epoch": 0.8760864548880856, - "learning_rate": 0.00011222951548727289, - "loss": 0.8425, - "step": 11390 - }, - { - "epoch": 0.876163372048304, - "learning_rate": 0.00011209199099182715, - "loss": 1.1194, - "step": 11391 - }, - { - "epoch": 0.8762402892085224, - "learning_rate": 0.00011195454753777668, - "loss": 1.0913, - "step": 11392 - }, - { - "epoch": 0.8763172063687409, - "learning_rate": 0.0001118171851331462, - "loss": 1.1493, - "step": 11393 - }, - { - "epoch": 0.8763941235289593, - "learning_rate": 0.00011167990378595733, - "loss": 1.5724, - "step": 11394 - }, - { - "epoch": 0.8764710406891778, - "learning_rate": 0.00011154270350422547, - "loss": 1.3541, - "step": 11395 - }, - { - "epoch": 0.8765479578493962, - "learning_rate": 0.00011140558429596271, - "loss": 1.1884, - "step": 11396 - }, - { - "epoch": 0.8766248750096146, - "learning_rate": 0.00011126854616917481, - "loss": 1.22, - "step": 11397 - }, - { - "epoch": 0.8767017921698331, - "learning_rate": 0.00011113158913186355, - "loss": 1.2094, - "step": 11398 - }, - { - "epoch": 0.8767787093300515, - "learning_rate": 0.00011099471319202653, - "loss": 1.1837, - "step": 11399 - }, - { - "epoch": 0.8768556264902699, - "learning_rate": 0.00011085791835765568, - "loss": 1.4606, - "step": 11400 - }, - { - "epoch": 0.8769325436504884, - "learning_rate": 0.00011072120463673879, - "loss": 0.9544, - "step": 11401 - }, - { - "epoch": 0.8770094608107069, - "learning_rate": 0.00011058457203725864, - "loss": 0.907, - "step": 11402 - }, - { - "epoch": 0.8770863779709254, - "learning_rate": 0.00011044802056719299, - "loss": 1.0953, - "step": 11403 - }, - { - "epoch": 0.8771632951311438, - "learning_rate": 0.00011031155023451584, - "loss": 1.5112, - "step": 11404 - }, - { - "epoch": 0.8772402122913622, - "learning_rate": 0.00011017516104719593, - "loss": 1.0694, - "step": 11405 - }, - { - "epoch": 0.8773171294515807, - "learning_rate": 0.00011003885301319627, - "loss": 0.8511, - "step": 11406 - }, - { - "epoch": 0.8773940466117991, - "learning_rate": 0.00010990262614047681, - "loss": 0.9416, - "step": 11407 - }, - { - "epoch": 0.8774709637720175, - "learning_rate": 0.00010976648043699167, - "loss": 1.1338, - "step": 11408 - }, - { - "epoch": 0.877547880932236, - "learning_rate": 0.00010963041591069067, - "loss": 0.8955, - "step": 11409 - }, - { - "epoch": 0.8776247980924544, - "learning_rate": 0.00010949443256951864, - "loss": 1.1223, - "step": 11410 - }, - { - "epoch": 0.8777017152526728, - "learning_rate": 0.00010935853042141586, - "loss": 0.9867, - "step": 11411 - }, - { - "epoch": 0.8777786324128913, - "learning_rate": 0.00010922270947431767, - "loss": 0.6568, - "step": 11412 - }, - { - "epoch": 0.8778555495731097, - "learning_rate": 0.00010908696973615506, - "loss": 1.3113, - "step": 11413 - }, - { - "epoch": 0.8779324667333283, - "learning_rate": 0.00010895131121485369, - "loss": 1.2506, - "step": 11414 - }, - { - "epoch": 0.8780093838935467, - "learning_rate": 0.00010881573391833521, - "loss": 1.4292, - "step": 11415 - }, - { - "epoch": 0.8780863010537651, - "learning_rate": 0.00010868023785451581, - "loss": 0.8695, - "step": 11416 - }, - { - "epoch": 0.8781632182139836, - "learning_rate": 0.00010854482303130698, - "loss": 1.0954, - "step": 11417 - }, - { - "epoch": 0.878240135374202, - "learning_rate": 0.00010840948945661671, - "loss": 1.2705, - "step": 11418 - }, - { - "epoch": 0.8783170525344204, - "learning_rate": 0.00010827423713834638, - "loss": 1.3253, - "step": 11419 - }, - { - "epoch": 0.8783939696946389, - "learning_rate": 0.00010813906608439333, - "loss": 0.9382, - "step": 11420 - }, - { - "epoch": 0.8784708868548573, - "learning_rate": 0.00010800397630265125, - "loss": 1.2581, - "step": 11421 - }, - { - "epoch": 0.8785478040150758, - "learning_rate": 0.00010786896780100763, - "loss": 1.7604, - "step": 11422 - }, - { - "epoch": 0.8786247211752942, - "learning_rate": 0.00010773404058734571, - "loss": 1.1944, - "step": 11423 - }, - { - "epoch": 0.8787016383355126, - "learning_rate": 0.00010759919466954432, - "loss": 1.0712, - "step": 11424 - }, - { - "epoch": 0.8787785554957311, - "learning_rate": 0.00010746443005547668, - "loss": 1.0611, - "step": 11425 - }, - { - "epoch": 0.8788554726559495, - "learning_rate": 0.0001073297467530125, - "loss": 1.0331, - "step": 11426 - }, - { - "epoch": 0.878932389816168, - "learning_rate": 0.00010719514477001613, - "loss": 0.9628, - "step": 11427 - }, - { - "epoch": 0.8790093069763865, - "learning_rate": 0.00010706062411434631, - "loss": 1.0546, - "step": 11428 - }, - { - "epoch": 0.8790862241366049, - "learning_rate": 0.00010692618479385857, - "loss": 1.1125, - "step": 11429 - }, - { - "epoch": 0.8791631412968233, - "learning_rate": 0.00010679182681640264, - "loss": 1.4027, - "step": 11430 - }, - { - "epoch": 0.8792400584570418, - "learning_rate": 0.00010665755018982392, - "loss": 1.3157, - "step": 11431 - }, - { - "epoch": 0.8793169756172602, - "learning_rate": 0.00010652335492196314, - "loss": 0.9309, - "step": 11432 - }, - { - "epoch": 0.8793938927774787, - "learning_rate": 0.0001063892410206555, - "loss": 1.253, - "step": 11433 - }, - { - "epoch": 0.8794708099376971, - "learning_rate": 0.00010625520849373277, - "loss": 0.9708, - "step": 11434 - }, - { - "epoch": 0.8795477270979155, - "learning_rate": 0.00010612125734902083, - "loss": 1.188, - "step": 11435 - }, - { - "epoch": 0.879624644258134, - "learning_rate": 0.00010598738759434146, - "loss": 1.3302, - "step": 11436 - }, - { - "epoch": 0.8797015614183524, - "learning_rate": 0.00010585359923751137, - "loss": 1.0724, - "step": 11437 - }, - { - "epoch": 0.8797784785785708, - "learning_rate": 0.00010571989228634249, - "loss": 1.0742, - "step": 11438 - }, - { - "epoch": 0.8798553957387893, - "learning_rate": 0.00010558626674864191, - "loss": 0.8101, - "step": 11439 - }, - { - "epoch": 0.8799323128990078, - "learning_rate": 0.00010545272263221289, - "loss": 0.9729, - "step": 11440 - }, - { - "epoch": 0.8800092300592263, - "learning_rate": 0.00010531925994485236, - "loss": 1.0727, - "step": 11441 - }, - { - "epoch": 0.8800861472194447, - "learning_rate": 0.00010518587869435392, - "loss": 1.1956, - "step": 11442 - }, - { - "epoch": 0.8801630643796631, - "learning_rate": 0.00010505257888850584, - "loss": 1.2086, - "step": 11443 - }, - { - "epoch": 0.8802399815398816, - "learning_rate": 0.00010491936053509122, - "loss": 1.3478, - "step": 11444 - }, - { - "epoch": 0.8803168987001, - "learning_rate": 0.00010478622364188917, - "loss": 1.0753, - "step": 11445 - }, - { - "epoch": 0.8803938158603184, - "learning_rate": 0.00010465316821667364, - "loss": 1.152, - "step": 11446 - }, - { - "epoch": 0.8804707330205369, - "learning_rate": 0.00010452019426721344, - "loss": 1.4289, - "step": 11447 - }, - { - "epoch": 0.8805476501807553, - "learning_rate": 0.00010438730180127382, - "loss": 1.1304, - "step": 11448 - }, - { - "epoch": 0.8806245673409737, - "learning_rate": 0.00010425449082661425, - "loss": 1.1484, - "step": 11449 - }, - { - "epoch": 0.8807014845011922, - "learning_rate": 0.00010412176135098905, - "loss": 1.1992, - "step": 11450 - }, - { - "epoch": 0.8807784016614106, - "learning_rate": 0.00010398911338214934, - "loss": 0.9903, - "step": 11451 - }, - { - "epoch": 0.8808553188216292, - "learning_rate": 0.00010385654692783991, - "loss": 1.2172, - "step": 11452 - }, - { - "epoch": 0.8809322359818476, - "learning_rate": 0.00010372406199580209, - "loss": 0.9563, - "step": 11453 - }, - { - "epoch": 0.881009153142066, - "learning_rate": 0.00010359165859377135, - "loss": 1.0242, - "step": 11454 - }, - { - "epoch": 0.8810860703022845, - "learning_rate": 0.00010345933672947866, - "loss": 1.1362, - "step": 11455 - }, - { - "epoch": 0.8811629874625029, - "learning_rate": 0.00010332709641065102, - "loss": 1.1323, - "step": 11456 - }, - { - "epoch": 0.8812399046227213, - "learning_rate": 0.00010319493764500976, - "loss": 1.2717, - "step": 11457 - }, - { - "epoch": 0.8813168217829398, - "learning_rate": 0.00010306286044027168, - "loss": 1.0881, - "step": 11458 - }, - { - "epoch": 0.8813937389431582, - "learning_rate": 0.00010293086480414932, - "loss": 0.8135, - "step": 11459 - }, - { - "epoch": 0.8814706561033767, - "learning_rate": 0.00010279895074434931, - "loss": 1.1207, - "step": 11460 - }, - { - "epoch": 0.8815475732635951, - "learning_rate": 0.000102667118268575, - "loss": 1.0214, - "step": 11461 - }, - { - "epoch": 0.8816244904238135, - "learning_rate": 0.0001025353673845239, - "loss": 1.2395, - "step": 11462 - }, - { - "epoch": 0.881701407584032, - "learning_rate": 0.00010240369809988903, - "loss": 1.1762, - "step": 11463 - }, - { - "epoch": 0.8817783247442504, - "learning_rate": 0.0001022721104223589, - "loss": 0.9741, - "step": 11464 - }, - { - "epoch": 0.8818552419044688, - "learning_rate": 0.00010214060435961686, - "loss": 1.0985, - "step": 11465 - }, - { - "epoch": 0.8819321590646874, - "learning_rate": 0.00010200917991934177, - "loss": 1.1406, - "step": 11466 - }, - { - "epoch": 0.8820090762249058, - "learning_rate": 0.0001018778371092075, - "loss": 1.1153, - "step": 11467 - }, - { - "epoch": 0.8820859933851242, - "learning_rate": 0.00010174657593688358, - "loss": 0.7835, - "step": 11468 - }, - { - "epoch": 0.8821629105453427, - "learning_rate": 0.00010161539641003404, - "loss": 1.6216, - "step": 11469 - }, - { - "epoch": 0.8822398277055611, - "learning_rate": 0.00010148429853631907, - "loss": 1.2589, - "step": 11470 - }, - { - "epoch": 0.8823167448657796, - "learning_rate": 0.00010135328232339341, - "loss": 1.2109, - "step": 11471 - }, - { - "epoch": 0.882393662025998, - "learning_rate": 0.0001012223477789071, - "loss": 0.8953, - "step": 11472 - }, - { - "epoch": 0.8824705791862164, - "learning_rate": 0.00010109149491050585, - "loss": 1.0965, - "step": 11473 - }, - { - "epoch": 0.8825474963464349, - "learning_rate": 0.00010096072372582987, - "loss": 0.8941, - "step": 11474 - }, - { - "epoch": 0.8826244135066533, - "learning_rate": 0.0001008300342325154, - "loss": 0.9229, - "step": 11475 - }, - { - "epoch": 0.8827013306668717, - "learning_rate": 0.00010069942643819368, - "loss": 1.3066, - "step": 11476 - }, - { - "epoch": 0.8827782478270902, - "learning_rate": 0.00010056890035049043, - "loss": 1.253, - "step": 11477 - }, - { - "epoch": 0.8828551649873086, - "learning_rate": 0.00010043845597702756, - "loss": 1.2333, - "step": 11478 - }, - { - "epoch": 0.8829320821475272, - "learning_rate": 0.00010030809332542195, - "loss": 1.166, - "step": 11479 - }, - { - "epoch": 0.8830089993077456, - "learning_rate": 0.00010017781240328555, - "loss": 1.142, - "step": 11480 - }, - { - "epoch": 0.883085916467964, - "learning_rate": 0.00010004761321822559, - "loss": 1.2751, - "step": 11481 - }, - { - "epoch": 0.8831628336281825, - "learning_rate": 9.991749577784415e-05, - "loss": 0.9672, - "step": 11482 - }, - { - "epoch": 0.8832397507884009, - "learning_rate": 9.978746008973949e-05, - "loss": 0.7696, - "step": 11483 - }, - { - "epoch": 0.8833166679486193, - "learning_rate": 9.96575061615042e-05, - "loss": 0.9291, - "step": 11484 - }, - { - "epoch": 0.8833935851088378, - "learning_rate": 9.952763400072673e-05, - "loss": 1.2045, - "step": 11485 - }, - { - "epoch": 0.8834705022690562, - "learning_rate": 9.939784361499016e-05, - "loss": 1.2746, - "step": 11486 - }, - { - "epoch": 0.8835474194292746, - "learning_rate": 9.92681350118731e-05, - "loss": 0.9339, - "step": 11487 - }, - { - "epoch": 0.8836243365894931, - "learning_rate": 9.913850819894931e-05, - "loss": 0.9674, - "step": 11488 - }, - { - "epoch": 0.8837012537497115, - "learning_rate": 9.900896318378844e-05, - "loss": 1.3145, - "step": 11489 - }, - { - "epoch": 0.88377817090993, - "learning_rate": 9.887949997395373e-05, - "loss": 1.014, - "step": 11490 - }, - { - "epoch": 0.8838550880701485, - "learning_rate": 9.87501185770055e-05, - "loss": 0.8902, - "step": 11491 - }, - { - "epoch": 0.8839320052303669, - "learning_rate": 9.862081900049819e-05, - "loss": 0.862, - "step": 11492 - }, - { - "epoch": 0.8840089223905854, - "learning_rate": 9.849160125198192e-05, - "loss": 1.4363, - "step": 11493 - }, - { - "epoch": 0.8840858395508038, - "learning_rate": 9.836246533900151e-05, - "loss": 1.3161, - "step": 11494 - }, - { - "epoch": 0.8841627567110222, - "learning_rate": 9.823341126909741e-05, - "loss": 1.1359, - "step": 11495 - }, - { - "epoch": 0.8842396738712407, - "learning_rate": 9.810443904980525e-05, - "loss": 1.0161, - "step": 11496 - }, - { - "epoch": 0.8843165910314591, - "learning_rate": 9.797554868865604e-05, - "loss": 1.4108, - "step": 11497 - }, - { - "epoch": 0.8843935081916776, - "learning_rate": 9.784674019317574e-05, - "loss": 1.1903, - "step": 11498 - }, - { - "epoch": 0.884470425351896, - "learning_rate": 9.77180135708855e-05, - "loss": 1.2703, - "step": 11499 - }, - { - "epoch": 0.8845473425121144, - "learning_rate": 9.758936882930197e-05, - "loss": 1.2174, - "step": 11500 - }, - { - "epoch": 0.8846242596723329, - "learning_rate": 9.746080597593648e-05, - "loss": 1.3184, - "step": 11501 - }, - { - "epoch": 0.8847011768325513, - "learning_rate": 9.73323250182967e-05, - "loss": 1.0426, - "step": 11502 - }, - { - "epoch": 0.8847780939927697, - "learning_rate": 9.720392596388395e-05, - "loss": 1.0595, - "step": 11503 - }, - { - "epoch": 0.8848550111529883, - "learning_rate": 9.707560882019589e-05, - "loss": 1.3072, - "step": 11504 - }, - { - "epoch": 0.8849319283132067, - "learning_rate": 9.694737359472522e-05, - "loss": 1.1604, - "step": 11505 - }, - { - "epoch": 0.8850088454734252, - "learning_rate": 9.68192202949596e-05, - "loss": 1.2572, - "step": 11506 - }, - { - "epoch": 0.8850857626336436, - "learning_rate": 9.669114892838221e-05, - "loss": 1.3691, - "step": 11507 - }, - { - "epoch": 0.885162679793862, - "learning_rate": 9.656315950247108e-05, - "loss": 1.0542, - "step": 11508 - }, - { - "epoch": 0.8852395969540805, - "learning_rate": 9.643525202469938e-05, - "loss": 1.2763, - "step": 11509 - }, - { - "epoch": 0.8853165141142989, - "learning_rate": 9.630742650253632e-05, - "loss": 1.1088, - "step": 11510 - }, - { - "epoch": 0.8853934312745173, - "learning_rate": 9.617968294344576e-05, - "loss": 1.1865, - "step": 11511 - }, - { - "epoch": 0.8854703484347358, - "learning_rate": 9.605202135488605e-05, - "loss": 1.0677, - "step": 11512 - }, - { - "epoch": 0.8855472655949542, - "learning_rate": 9.592444174431226e-05, - "loss": 1.1806, - "step": 11513 - }, - { - "epoch": 0.8856241827551726, - "learning_rate": 9.579694411917373e-05, - "loss": 0.933, - "step": 11514 - }, - { - "epoch": 0.8857010999153911, - "learning_rate": 9.566952848691484e-05, - "loss": 1.099, - "step": 11515 - }, - { - "epoch": 0.8857780170756095, - "learning_rate": 9.554219485497584e-05, - "loss": 1.0346, - "step": 11516 - }, - { - "epoch": 0.8858549342358281, - "learning_rate": 9.541494323079159e-05, - "loss": 1.0292, - "step": 11517 - }, - { - "epoch": 0.8859318513960465, - "learning_rate": 9.52877736217928e-05, - "loss": 0.9372, - "step": 11518 - }, - { - "epoch": 0.8860087685562649, - "learning_rate": 9.516068603540473e-05, - "loss": 1.1542, - "step": 11519 - }, - { - "epoch": 0.8860856857164834, - "learning_rate": 9.503368047904842e-05, - "loss": 1.0551, - "step": 11520 - }, - { - "epoch": 0.8861626028767018, - "learning_rate": 9.490675696013978e-05, - "loss": 0.8332, - "step": 11521 - }, - { - "epoch": 0.8862395200369202, - "learning_rate": 9.477991548608989e-05, - "loss": 1.1166, - "step": 11522 - }, - { - "epoch": 0.8863164371971387, - "learning_rate": 9.465315606430513e-05, - "loss": 0.8214, - "step": 11523 - }, - { - "epoch": 0.8863933543573571, - "learning_rate": 9.452647870218745e-05, - "loss": 1.0597, - "step": 11524 - }, - { - "epoch": 0.8864702715175756, - "learning_rate": 9.439988340713306e-05, - "loss": 1.2656, - "step": 11525 - }, - { - "epoch": 0.886547188677794, - "learning_rate": 9.42733701865346e-05, - "loss": 1.2766, - "step": 11526 - }, - { - "epoch": 0.8866241058380124, - "learning_rate": 9.41469390477791e-05, - "loss": 1.1914, - "step": 11527 - }, - { - "epoch": 0.886701022998231, - "learning_rate": 9.402058999824886e-05, - "loss": 1.3831, - "step": 11528 - }, - { - "epoch": 0.8867779401584494, - "learning_rate": 9.389432304532181e-05, - "loss": 1.2206, - "step": 11529 - }, - { - "epoch": 0.8868548573186678, - "learning_rate": 9.376813819637054e-05, - "loss": 1.1589, - "step": 11530 - }, - { - "epoch": 0.8869317744788863, - "learning_rate": 9.364203545876298e-05, - "loss": 1.1878, - "step": 11531 - }, - { - "epoch": 0.8870086916391047, - "learning_rate": 9.351601483986294e-05, - "loss": 1.0108, - "step": 11532 - }, - { - "epoch": 0.8870856087993231, - "learning_rate": 9.339007634702867e-05, - "loss": 0.888, - "step": 11533 - }, - { - "epoch": 0.8871625259595416, - "learning_rate": 9.326421998761347e-05, - "loss": 1.2003, - "step": 11534 - }, - { - "epoch": 0.88723944311976, - "learning_rate": 9.313844576896662e-05, - "loss": 1.4812, - "step": 11535 - }, - { - "epoch": 0.8873163602799785, - "learning_rate": 9.30127536984321e-05, - "loss": 0.8595, - "step": 11536 - }, - { - "epoch": 0.8873932774401969, - "learning_rate": 9.28871437833495e-05, - "loss": 1.125, - "step": 11537 - }, - { - "epoch": 0.8874701946004153, - "learning_rate": 9.276161603105282e-05, - "loss": 1.3038, - "step": 11538 - }, - { - "epoch": 0.8875471117606338, - "learning_rate": 9.263617044887185e-05, - "loss": 1.1381, - "step": 11539 - }, - { - "epoch": 0.8876240289208522, - "learning_rate": 9.251080704413173e-05, - "loss": 0.8655, - "step": 11540 - }, - { - "epoch": 0.8877009460810706, - "learning_rate": 9.23855258241526e-05, - "loss": 1.2277, - "step": 11541 - }, - { - "epoch": 0.8877778632412892, - "learning_rate": 9.226032679624946e-05, - "loss": 0.9171, - "step": 11542 - }, - { - "epoch": 0.8878547804015076, - "learning_rate": 9.213520996773295e-05, - "loss": 1.4198, - "step": 11543 - }, - { - "epoch": 0.8879316975617261, - "learning_rate": 9.201017534590855e-05, - "loss": 1.0412, - "step": 11544 - }, - { - "epoch": 0.8880086147219445, - "learning_rate": 9.18852229380776e-05, - "loss": 1.0927, - "step": 11545 - }, - { - "epoch": 0.8880855318821629, - "learning_rate": 9.176035275153626e-05, - "loss": 1.1522, - "step": 11546 - }, - { - "epoch": 0.8881624490423814, - "learning_rate": 9.163556479357504e-05, - "loss": 1.1021, - "step": 11547 - }, - { - "epoch": 0.8882393662025998, - "learning_rate": 9.151085907148127e-05, - "loss": 1.2182, - "step": 11548 - }, - { - "epoch": 0.8883162833628182, - "learning_rate": 9.138623559253612e-05, - "loss": 0.8963, - "step": 11549 - }, - { - "epoch": 0.8883932005230367, - "learning_rate": 9.126169436401693e-05, - "loss": 0.8112, - "step": 11550 - }, - { - "epoch": 0.8884701176832551, - "learning_rate": 9.11372353931954e-05, - "loss": 0.9991, - "step": 11551 - }, - { - "epoch": 0.8885470348434735, - "learning_rate": 9.101285868733905e-05, - "loss": 1.1995, - "step": 11552 - }, - { - "epoch": 0.888623952003692, - "learning_rate": 9.088856425371005e-05, - "loss": 1.1579, - "step": 11553 - }, - { - "epoch": 0.8887008691639104, - "learning_rate": 9.076435209956663e-05, - "loss": 0.8164, - "step": 11554 - }, - { - "epoch": 0.888777786324129, - "learning_rate": 9.064022223216129e-05, - "loss": 1.2062, - "step": 11555 - }, - { - "epoch": 0.8888547034843474, - "learning_rate": 9.051617465874223e-05, - "loss": 1.3546, - "step": 11556 - }, - { - "epoch": 0.8889316206445658, - "learning_rate": 9.039220938655268e-05, - "loss": 1.2306, - "step": 11557 - }, - { - "epoch": 0.8890085378047843, - "learning_rate": 9.026832642283067e-05, - "loss": 0.76, - "step": 11558 - }, - { - "epoch": 0.8890854549650027, - "learning_rate": 9.014452577481092e-05, - "loss": 1.4175, - "step": 11559 - }, - { - "epoch": 0.8891623721252211, - "learning_rate": 9.00208074497213e-05, - "loss": 1.4658, - "step": 11560 - }, - { - "epoch": 0.8892392892854396, - "learning_rate": 8.989717145478587e-05, - "loss": 1.3656, - "step": 11561 - }, - { - "epoch": 0.889316206445658, - "learning_rate": 8.977361779722453e-05, - "loss": 1.3049, - "step": 11562 - }, - { - "epoch": 0.8893931236058765, - "learning_rate": 8.965014648425135e-05, - "loss": 1.3541, - "step": 11563 - }, - { - "epoch": 0.8894700407660949, - "learning_rate": 8.952675752307588e-05, - "loss": 1.1139, - "step": 11564 - }, - { - "epoch": 0.8895469579263133, - "learning_rate": 8.940345092090318e-05, - "loss": 1.5293, - "step": 11565 - }, - { - "epoch": 0.8896238750865318, - "learning_rate": 8.928022668493269e-05, - "loss": 1.3007, - "step": 11566 - }, - { - "epoch": 0.8897007922467502, - "learning_rate": 8.915708482236013e-05, - "loss": 0.9564, - "step": 11567 - }, - { - "epoch": 0.8897777094069687, - "learning_rate": 8.903402534037608e-05, - "loss": 1.2317, - "step": 11568 - }, - { - "epoch": 0.8898546265671872, - "learning_rate": 8.89110482461653e-05, - "loss": 1.1855, - "step": 11569 - }, - { - "epoch": 0.8899315437274056, - "learning_rate": 8.878815354690923e-05, - "loss": 1.3336, - "step": 11570 - }, - { - "epoch": 0.890008460887624, - "learning_rate": 8.866534124978359e-05, - "loss": 1.3226, - "step": 11571 - }, - { - "epoch": 0.8900853780478425, - "learning_rate": 8.854261136195952e-05, - "loss": 1.1299, - "step": 11572 - }, - { - "epoch": 0.8901622952080609, - "learning_rate": 8.841996389060325e-05, - "loss": 0.9352, - "step": 11573 - }, - { - "epoch": 0.8902392123682794, - "learning_rate": 8.829739884287641e-05, - "loss": 0.9524, - "step": 11574 - }, - { - "epoch": 0.8903161295284978, - "learning_rate": 8.817491622593576e-05, - "loss": 0.7863, - "step": 11575 - }, - { - "epoch": 0.8903930466887162, - "learning_rate": 8.805251604693326e-05, - "loss": 1.3396, - "step": 11576 - }, - { - "epoch": 0.8904699638489347, - "learning_rate": 8.793019831301569e-05, - "loss": 1.0705, - "step": 11577 - }, - { - "epoch": 0.8905468810091531, - "learning_rate": 8.780796303132548e-05, - "loss": 0.8763, - "step": 11578 - }, - { - "epoch": 0.8906237981693715, - "learning_rate": 8.768581020900012e-05, - "loss": 1.0995, - "step": 11579 - }, - { - "epoch": 0.89070071532959, - "learning_rate": 8.756373985317189e-05, - "loss": 1.2277, - "step": 11580 - }, - { - "epoch": 0.8907776324898085, - "learning_rate": 8.744175197096943e-05, - "loss": 1.2304, - "step": 11581 - }, - { - "epoch": 0.890854549650027, - "learning_rate": 8.73198465695147e-05, - "loss": 1.4348, - "step": 11582 - }, - { - "epoch": 0.8909314668102454, - "learning_rate": 8.719802365592667e-05, - "loss": 1.0804, - "step": 11583 - }, - { - "epoch": 0.8910083839704638, - "learning_rate": 8.707628323731848e-05, - "loss": 1.4491, - "step": 11584 - }, - { - "epoch": 0.8910853011306823, - "learning_rate": 8.695462532079846e-05, - "loss": 1.4276, - "step": 11585 - }, - { - "epoch": 0.8911622182909007, - "learning_rate": 8.683304991347057e-05, - "loss": 1.1899, - "step": 11586 - }, - { - "epoch": 0.8912391354511191, - "learning_rate": 8.671155702243366e-05, - "loss": 0.8688, - "step": 11587 - }, - { - "epoch": 0.8913160526113376, - "learning_rate": 8.659014665478171e-05, - "loss": 1.2816, - "step": 11588 - }, - { - "epoch": 0.891392969771556, - "learning_rate": 8.646881881760421e-05, - "loss": 0.819, - "step": 11589 - }, - { - "epoch": 0.8914698869317744, - "learning_rate": 8.634757351798583e-05, - "loss": 1.4842, - "step": 11590 - }, - { - "epoch": 0.8915468040919929, - "learning_rate": 8.622641076300541e-05, - "loss": 1.0664, - "step": 11591 - }, - { - "epoch": 0.8916237212522113, - "learning_rate": 8.610533055973847e-05, - "loss": 0.8228, - "step": 11592 - }, - { - "epoch": 0.8917006384124299, - "learning_rate": 8.598433291525464e-05, - "loss": 0.9508, - "step": 11593 - }, - { - "epoch": 0.8917775555726483, - "learning_rate": 8.586341783661966e-05, - "loss": 0.8014, - "step": 11594 - }, - { - "epoch": 0.8918544727328667, - "learning_rate": 8.574258533089335e-05, - "loss": 1.1235, - "step": 11595 - }, - { - "epoch": 0.8919313898930852, - "learning_rate": 8.562183540513107e-05, - "loss": 1.075, - "step": 11596 - }, - { - "epoch": 0.8920083070533036, - "learning_rate": 8.550116806638403e-05, - "loss": 1.0194, - "step": 11597 - }, - { - "epoch": 0.892085224213522, - "learning_rate": 8.538058332169807e-05, - "loss": 1.0437, - "step": 11598 - }, - { - "epoch": 0.8921621413737405, - "learning_rate": 8.526008117811407e-05, - "loss": 1.1943, - "step": 11599 - }, - { - "epoch": 0.8922390585339589, - "learning_rate": 8.51396616426684e-05, - "loss": 0.6849, - "step": 11600 - }, - { - "epoch": 0.8923159756941774, - "learning_rate": 8.50193247223921e-05, - "loss": 1.1121, - "step": 11601 - }, - { - "epoch": 0.8923928928543958, - "learning_rate": 8.489907042431238e-05, - "loss": 1.0803, - "step": 11602 - }, - { - "epoch": 0.8924698100146142, - "learning_rate": 8.47788987554508e-05, - "loss": 1.3998, - "step": 11603 - }, - { - "epoch": 0.8925467271748327, - "learning_rate": 8.465880972282391e-05, - "loss": 0.9228, - "step": 11604 - }, - { - "epoch": 0.8926236443350511, - "learning_rate": 8.453880333344444e-05, - "loss": 1.2237, - "step": 11605 - }, - { - "epoch": 0.8927005614952696, - "learning_rate": 8.441887959431926e-05, - "loss": 1.1221, - "step": 11606 - }, - { - "epoch": 0.8927774786554881, - "learning_rate": 8.429903851245114e-05, - "loss": 1.1364, - "step": 11607 - }, - { - "epoch": 0.8928543958157065, - "learning_rate": 8.417928009483744e-05, - "loss": 1.4891, - "step": 11608 - }, - { - "epoch": 0.8929313129759249, - "learning_rate": 8.405960434847109e-05, - "loss": 1.404, - "step": 11609 - }, - { - "epoch": 0.8930082301361434, - "learning_rate": 8.394001128034001e-05, - "loss": 1.1013, - "step": 11610 - }, - { - "epoch": 0.8930851472963618, - "learning_rate": 8.38205008974276e-05, - "loss": 0.9517, - "step": 11611 - }, - { - "epoch": 0.8931620644565803, - "learning_rate": 8.370107320671194e-05, - "loss": 0.8864, - "step": 11612 - }, - { - "epoch": 0.8932389816167987, - "learning_rate": 8.35817282151668e-05, - "loss": 0.9827, - "step": 11613 - }, - { - "epoch": 0.8933158987770171, - "learning_rate": 8.346246592976075e-05, - "loss": 1.2023, - "step": 11614 - }, - { - "epoch": 0.8933928159372356, - "learning_rate": 8.334328635745724e-05, - "loss": 0.9427, - "step": 11615 - }, - { - "epoch": 0.893469733097454, - "learning_rate": 8.322418950521605e-05, - "loss": 1.4364, - "step": 11616 - }, - { - "epoch": 0.8935466502576724, - "learning_rate": 8.310517537999091e-05, - "loss": 1.4936, - "step": 11617 - }, - { - "epoch": 0.893623567417891, - "learning_rate": 8.298624398873095e-05, - "loss": 1.0511, - "step": 11618 - }, - { - "epoch": 0.8937004845781094, - "learning_rate": 8.286739533838111e-05, - "loss": 1.1745, - "step": 11619 - }, - { - "epoch": 0.8937774017383279, - "learning_rate": 8.274862943588102e-05, - "loss": 1.005, - "step": 11620 - }, - { - "epoch": 0.8938543188985463, - "learning_rate": 8.262994628816545e-05, - "loss": 1.0272, - "step": 11621 - }, - { - "epoch": 0.8939312360587647, - "learning_rate": 8.251134590216452e-05, - "loss": 0.984, - "step": 11622 - }, - { - "epoch": 0.8940081532189832, - "learning_rate": 8.23928282848032e-05, - "loss": 1.2201, - "step": 11623 - }, - { - "epoch": 0.8940850703792016, - "learning_rate": 8.227439344300214e-05, - "loss": 1.3198, - "step": 11624 - }, - { - "epoch": 0.89416198753942, - "learning_rate": 8.215604138367678e-05, - "loss": 0.9837, - "step": 11625 - }, - { - "epoch": 0.8942389046996385, - "learning_rate": 8.203777211373792e-05, - "loss": 1.4332, - "step": 11626 - }, - { - "epoch": 0.8943158218598569, - "learning_rate": 8.191958564009122e-05, - "loss": 1.2763, - "step": 11627 - }, - { - "epoch": 0.8943927390200754, - "learning_rate": 8.18014819696375e-05, - "loss": 0.8833, - "step": 11628 - }, - { - "epoch": 0.8944696561802938, - "learning_rate": 8.168346110927388e-05, - "loss": 1.1425, - "step": 11629 - }, - { - "epoch": 0.8945465733405122, - "learning_rate": 8.156552306589071e-05, - "loss": 1.0878, - "step": 11630 - }, - { - "epoch": 0.8946234905007308, - "learning_rate": 8.144766784637481e-05, - "loss": 0.8387, - "step": 11631 - }, - { - "epoch": 0.8947004076609492, - "learning_rate": 8.132989545760816e-05, - "loss": 1.0278, - "step": 11632 - }, - { - "epoch": 0.8947773248211676, - "learning_rate": 8.121220590646727e-05, - "loss": 1.01, - "step": 11633 - }, - { - "epoch": 0.8948542419813861, - "learning_rate": 8.109459919982448e-05, - "loss": 1.0479, - "step": 11634 - }, - { - "epoch": 0.8949311591416045, - "learning_rate": 8.097707534454678e-05, - "loss": 1.2038, - "step": 11635 - }, - { - "epoch": 0.8950080763018229, - "learning_rate": 8.085963434749639e-05, - "loss": 0.6738, - "step": 11636 - }, - { - "epoch": 0.8950849934620414, - "learning_rate": 8.074227621553076e-05, - "loss": 0.9903, - "step": 11637 - }, - { - "epoch": 0.8951619106222598, - "learning_rate": 8.062500095550313e-05, - "loss": 1.1692, - "step": 11638 - }, - { - "epoch": 0.8952388277824783, - "learning_rate": 8.050780857426082e-05, - "loss": 1.1571, - "step": 11639 - }, - { - "epoch": 0.8953157449426967, - "learning_rate": 8.039069907864688e-05, - "loss": 1.0482, - "step": 11640 - }, - { - "epoch": 0.8953926621029151, - "learning_rate": 8.027367247549949e-05, - "loss": 1.3399, - "step": 11641 - }, - { - "epoch": 0.8954695792631336, - "learning_rate": 8.015672877165187e-05, - "loss": 1.1453, - "step": 11642 - }, - { - "epoch": 0.895546496423352, - "learning_rate": 8.003986797393286e-05, - "loss": 1.1411, - "step": 11643 - }, - { - "epoch": 0.8956234135835704, - "learning_rate": 7.99230900891657e-05, - "loss": 1.2096, - "step": 11644 - }, - { - "epoch": 0.895700330743789, - "learning_rate": 7.980639512416893e-05, - "loss": 0.8934, - "step": 11645 - }, - { - "epoch": 0.8957772479040074, - "learning_rate": 7.968978308575725e-05, - "loss": 0.8719, - "step": 11646 - }, - { - "epoch": 0.8958541650642259, - "learning_rate": 7.957325398073922e-05, - "loss": 1.1567, - "step": 11647 - }, - { - "epoch": 0.8959310822244443, - "learning_rate": 7.945680781591924e-05, - "loss": 1.2281, - "step": 11648 - }, - { - "epoch": 0.8960079993846627, - "learning_rate": 7.93404445980967e-05, - "loss": 0.8987, - "step": 11649 - }, - { - "epoch": 0.8960849165448812, - "learning_rate": 7.922416433406598e-05, - "loss": 1.1464, - "step": 11650 - }, - { - "epoch": 0.8961618337050996, - "learning_rate": 7.910796703061734e-05, - "loss": 1.4386, - "step": 11651 - }, - { - "epoch": 0.896238750865318, - "learning_rate": 7.899185269453534e-05, - "loss": 1.4999, - "step": 11652 - }, - { - "epoch": 0.8963156680255365, - "learning_rate": 7.887582133259974e-05, - "loss": 1.1821, - "step": 11653 - }, - { - "epoch": 0.8963925851857549, - "learning_rate": 7.87598729515861e-05, - "loss": 1.2184, - "step": 11654 - }, - { - "epoch": 0.8964695023459733, - "learning_rate": 7.864400755826467e-05, - "loss": 1.1649, - "step": 11655 - }, - { - "epoch": 0.8965464195061918, - "learning_rate": 7.85282251594009e-05, - "loss": 1.0316, - "step": 11656 - }, - { - "epoch": 0.8966233366664103, - "learning_rate": 7.841252576175551e-05, - "loss": 1.9651, - "step": 11657 - }, - { - "epoch": 0.8967002538266288, - "learning_rate": 7.829690937208412e-05, - "loss": 1.2488, - "step": 11658 - }, - { - "epoch": 0.8967771709868472, - "learning_rate": 7.818137599713798e-05, - "loss": 0.976, - "step": 11659 - }, - { - "epoch": 0.8968540881470656, - "learning_rate": 7.806592564366322e-05, - "loss": 0.83, - "step": 11660 - }, - { - "epoch": 0.8969310053072841, - "learning_rate": 7.795055831840092e-05, - "loss": 1.1996, - "step": 11661 - }, - { - "epoch": 0.8970079224675025, - "learning_rate": 7.783527402808737e-05, - "loss": 1.0164, - "step": 11662 - }, - { - "epoch": 0.8970848396277209, - "learning_rate": 7.772007277945454e-05, - "loss": 1.0251, - "step": 11663 - }, - { - "epoch": 0.8971617567879394, - "learning_rate": 7.760495457922868e-05, - "loss": 1.5952, - "step": 11664 - }, - { - "epoch": 0.8972386739481578, - "learning_rate": 7.748991943413225e-05, - "loss": 0.7782, - "step": 11665 - }, - { - "epoch": 0.8973155911083763, - "learning_rate": 7.737496735088156e-05, - "loss": 1.1199, - "step": 11666 - }, - { - "epoch": 0.8973925082685947, - "learning_rate": 7.726009833618925e-05, - "loss": 1.1331, - "step": 11667 - }, - { - "epoch": 0.8974694254288131, - "learning_rate": 7.714531239676259e-05, - "loss": 1.1998, - "step": 11668 - }, - { - "epoch": 0.8975463425890317, - "learning_rate": 7.703060953930408e-05, - "loss": 1.2644, - "step": 11669 - }, - { - "epoch": 0.8976232597492501, - "learning_rate": 7.691598977051117e-05, - "loss": 1.1396, - "step": 11670 - }, - { - "epoch": 0.8977001769094685, - "learning_rate": 7.680145309707687e-05, - "loss": 1.3195, - "step": 11671 - }, - { - "epoch": 0.897777094069687, - "learning_rate": 7.668699952568863e-05, - "loss": 1.2649, - "step": 11672 - }, - { - "epoch": 0.8978540112299054, - "learning_rate": 7.657262906302998e-05, - "loss": 0.6796, - "step": 11673 - }, - { - "epoch": 0.8979309283901238, - "learning_rate": 7.645834171577936e-05, - "loss": 1.3104, - "step": 11674 - }, - { - "epoch": 0.8980078455503423, - "learning_rate": 7.634413749060915e-05, - "loss": 1.3297, - "step": 11675 - }, - { - "epoch": 0.8980847627105607, - "learning_rate": 7.62300163941888e-05, - "loss": 1.1521, - "step": 11676 - }, - { - "epoch": 0.8981616798707792, - "learning_rate": 7.611597843318118e-05, - "loss": 1.1726, - "step": 11677 - }, - { - "epoch": 0.8982385970309976, - "learning_rate": 7.600202361424613e-05, - "loss": 1.2127, - "step": 11678 - }, - { - "epoch": 0.898315514191216, - "learning_rate": 7.588815194403681e-05, - "loss": 0.9094, - "step": 11679 - }, - { - "epoch": 0.8983924313514345, - "learning_rate": 7.577436342920208e-05, - "loss": 1.1092, - "step": 11680 - }, - { - "epoch": 0.8984693485116529, - "learning_rate": 7.566065807638694e-05, - "loss": 1.2423, - "step": 11681 - }, - { - "epoch": 0.8985462656718713, - "learning_rate": 7.554703589223028e-05, - "loss": 1.2819, - "step": 11682 - }, - { - "epoch": 0.8986231828320899, - "learning_rate": 7.543349688336676e-05, - "loss": 0.9137, - "step": 11683 - }, - { - "epoch": 0.8987000999923083, - "learning_rate": 7.532004105642609e-05, - "loss": 0.9771, - "step": 11684 - }, - { - "epoch": 0.8987770171525268, - "learning_rate": 7.520666841803265e-05, - "loss": 1.3371, - "step": 11685 - }, - { - "epoch": 0.8988539343127452, - "learning_rate": 7.509337897480695e-05, - "loss": 1.3606, - "step": 11686 - }, - { - "epoch": 0.8989308514729636, - "learning_rate": 7.498017273336405e-05, - "loss": 1.3945, - "step": 11687 - }, - { - "epoch": 0.8990077686331821, - "learning_rate": 7.486704970031366e-05, - "loss": 0.8943, - "step": 11688 - }, - { - "epoch": 0.8990846857934005, - "learning_rate": 7.475400988226166e-05, - "loss": 1.0878, - "step": 11689 - }, - { - "epoch": 0.8991616029536189, - "learning_rate": 7.464105328580828e-05, - "loss": 1.2263, - "step": 11690 - }, - { - "epoch": 0.8992385201138374, - "learning_rate": 7.45281799175494e-05, - "loss": 1.3543, - "step": 11691 - }, - { - "epoch": 0.8993154372740558, - "learning_rate": 7.441538978407558e-05, - "loss": 0.594, - "step": 11692 - }, - { - "epoch": 0.8993923544342742, - "learning_rate": 7.430268289197289e-05, - "loss": 1.0057, - "step": 11693 - }, - { - "epoch": 0.8994692715944927, - "learning_rate": 7.419005924782224e-05, - "loss": 1.1454, - "step": 11694 - }, - { - "epoch": 0.8995461887547112, - "learning_rate": 7.407751885820002e-05, - "loss": 1.2633, - "step": 11695 - }, - { - "epoch": 0.8996231059149297, - "learning_rate": 7.396506172967766e-05, - "loss": 1.1363, - "step": 11696 - }, - { - "epoch": 0.8997000230751481, - "learning_rate": 7.385268786882154e-05, - "loss": 1.1124, - "step": 11697 - }, - { - "epoch": 0.8997769402353665, - "learning_rate": 7.374039728219329e-05, - "loss": 1.1851, - "step": 11698 - }, - { - "epoch": 0.899853857395585, - "learning_rate": 7.362818997634929e-05, - "loss": 1.4856, - "step": 11699 - }, - { - "epoch": 0.8999307745558034, - "learning_rate": 7.351606595784233e-05, - "loss": 0.9974, - "step": 11700 - }, - { - "epoch": 0.9000076917160218, - "learning_rate": 7.340402523321865e-05, - "loss": 1.0596, - "step": 11701 - }, - { - "epoch": 0.9000846088762403, - "learning_rate": 7.329206780902054e-05, - "loss": 1.3564, - "step": 11702 - }, - { - "epoch": 0.9001615260364587, - "learning_rate": 7.318019369178574e-05, - "loss": 1.1683, - "step": 11703 - }, - { - "epoch": 0.9002384431966772, - "learning_rate": 7.30684028880464e-05, - "loss": 1.2756, - "step": 11704 - }, - { - "epoch": 0.9003153603568956, - "learning_rate": 7.295669540433009e-05, - "loss": 1.3974, - "step": 11705 - }, - { - "epoch": 0.900392277517114, - "learning_rate": 7.28450712471596e-05, - "loss": 1.5267, - "step": 11706 - }, - { - "epoch": 0.9004691946773326, - "learning_rate": 7.273353042305241e-05, - "loss": 1.3843, - "step": 11707 - }, - { - "epoch": 0.900546111837551, - "learning_rate": 7.262207293852214e-05, - "loss": 0.9572, - "step": 11708 - }, - { - "epoch": 0.9006230289977694, - "learning_rate": 7.251069880007688e-05, - "loss": 1.2914, - "step": 11709 - }, - { - "epoch": 0.9006999461579879, - "learning_rate": 7.239940801421913e-05, - "loss": 1.0633, - "step": 11710 - }, - { - "epoch": 0.9007768633182063, - "learning_rate": 7.2288200587448e-05, - "loss": 0.968, - "step": 11711 - }, - { - "epoch": 0.9008537804784247, - "learning_rate": 7.217707652625666e-05, - "loss": 1.3555, - "step": 11712 - }, - { - "epoch": 0.9009306976386432, - "learning_rate": 7.206603583713405e-05, - "loss": 0.9122, - "step": 11713 - }, - { - "epoch": 0.9010076147988616, - "learning_rate": 7.19550785265637e-05, - "loss": 1.3127, - "step": 11714 - }, - { - "epoch": 0.9010845319590801, - "learning_rate": 7.18442046010242e-05, - "loss": 1.1678, - "step": 11715 - }, - { - "epoch": 0.9011614491192985, - "learning_rate": 7.173341406699041e-05, - "loss": 1.2193, - "step": 11716 - }, - { - "epoch": 0.9012383662795169, - "learning_rate": 7.162270693093098e-05, - "loss": 1.2674, - "step": 11717 - }, - { - "epoch": 0.9013152834397354, - "learning_rate": 7.151208319931024e-05, - "loss": 1.3066, - "step": 11718 - }, - { - "epoch": 0.9013922005999538, - "learning_rate": 7.140154287858786e-05, - "loss": 1.194, - "step": 11719 - }, - { - "epoch": 0.9014691177601722, - "learning_rate": 7.129108597521816e-05, - "loss": 1.1326, - "step": 11720 - }, - { - "epoch": 0.9015460349203908, - "learning_rate": 7.118071249565067e-05, - "loss": 1.4602, - "step": 11721 - }, - { - "epoch": 0.9016229520806092, - "learning_rate": 7.107042244633088e-05, - "loss": 1.2625, - "step": 11722 - }, - { - "epoch": 0.9016998692408277, - "learning_rate": 7.0960215833698e-05, - "loss": 1.1909, - "step": 11723 - }, - { - "epoch": 0.9017767864010461, - "learning_rate": 7.085009266418752e-05, - "loss": 0.9271, - "step": 11724 - }, - { - "epoch": 0.9018537035612645, - "learning_rate": 7.074005294422964e-05, - "loss": 0.9104, - "step": 11725 - }, - { - "epoch": 0.901930620721483, - "learning_rate": 7.063009668024956e-05, - "loss": 1.2579, - "step": 11726 - }, - { - "epoch": 0.9020075378817014, - "learning_rate": 7.05202238786678e-05, - "loss": 0.9298, - "step": 11727 - }, - { - "epoch": 0.9020844550419198, - "learning_rate": 7.04104345458999e-05, - "loss": 1.3275, - "step": 11728 - }, - { - "epoch": 0.9021613722021383, - "learning_rate": 7.030072868835641e-05, - "loss": 1.109, - "step": 11729 - }, - { - "epoch": 0.9022382893623567, - "learning_rate": 7.019110631244369e-05, - "loss": 1.2055, - "step": 11730 - }, - { - "epoch": 0.9023152065225752, - "learning_rate": 7.008156742456229e-05, - "loss": 0.8952, - "step": 11731 - }, - { - "epoch": 0.9023921236827936, - "learning_rate": 6.997211203110825e-05, - "loss": 1.0172, - "step": 11732 - }, - { - "epoch": 0.902469040843012, - "learning_rate": 6.986274013847316e-05, - "loss": 0.9172, - "step": 11733 - }, - { - "epoch": 0.9025459580032306, - "learning_rate": 6.975345175304287e-05, - "loss": 1.2287, - "step": 11734 - }, - { - "epoch": 0.902622875163449, - "learning_rate": 6.964424688119947e-05, - "loss": 1.2064, - "step": 11735 - }, - { - "epoch": 0.9026997923236674, - "learning_rate": 6.953512552931901e-05, - "loss": 1.1301, - "step": 11736 - }, - { - "epoch": 0.9027767094838859, - "learning_rate": 6.942608770377307e-05, - "loss": 1.4835, - "step": 11737 - }, - { - "epoch": 0.9028536266441043, - "learning_rate": 6.931713341092905e-05, - "loss": 1.1486, - "step": 11738 - }, - { - "epoch": 0.9029305438043227, - "learning_rate": 6.920826265714852e-05, - "loss": 1.1691, - "step": 11739 - }, - { - "epoch": 0.9030074609645412, - "learning_rate": 6.909947544878875e-05, - "loss": 1.2209, - "step": 11740 - }, - { - "epoch": 0.9030843781247596, - "learning_rate": 6.899077179220198e-05, - "loss": 1.6455, - "step": 11741 - }, - { - "epoch": 0.9031612952849781, - "learning_rate": 6.888215169373512e-05, - "loss": 1.069, - "step": 11742 - }, - { - "epoch": 0.9032382124451965, - "learning_rate": 6.877361515973096e-05, - "loss": 1.3897, - "step": 11743 - }, - { - "epoch": 0.9033151296054149, - "learning_rate": 6.86651621965274e-05, - "loss": 1.392, - "step": 11744 - }, - { - "epoch": 0.9033920467656334, - "learning_rate": 6.855679281045624e-05, - "loss": 1.4806, - "step": 11745 - }, - { - "epoch": 0.9034689639258519, - "learning_rate": 6.844850700784588e-05, - "loss": 1.0319, - "step": 11746 - }, - { - "epoch": 0.9035458810860703, - "learning_rate": 6.834030479501929e-05, - "loss": 1.1348, - "step": 11747 - }, - { - "epoch": 0.9036227982462888, - "learning_rate": 6.823218617829424e-05, - "loss": 1.0737, - "step": 11748 - }, - { - "epoch": 0.9036997154065072, - "learning_rate": 6.812415116398401e-05, - "loss": 1.2461, - "step": 11749 - }, - { - "epoch": 0.9037766325667257, - "learning_rate": 6.801619975839673e-05, - "loss": 0.7658, - "step": 11750 - }, - { - "epoch": 0.9038535497269441, - "learning_rate": 6.790833196783602e-05, - "loss": 1.1775, - "step": 11751 - }, - { - "epoch": 0.9039304668871625, - "learning_rate": 6.780054779860034e-05, - "loss": 1.1126, - "step": 11752 - }, - { - "epoch": 0.904007384047381, - "learning_rate": 6.769284725698316e-05, - "loss": 1.1478, - "step": 11753 - }, - { - "epoch": 0.9040843012075994, - "learning_rate": 6.758523034927344e-05, - "loss": 1.1646, - "step": 11754 - }, - { - "epoch": 0.9041612183678178, - "learning_rate": 6.7477697081755e-05, - "loss": 1.466, - "step": 11755 - }, - { - "epoch": 0.9042381355280363, - "learning_rate": 6.737024746070647e-05, - "loss": 1.5361, - "step": 11756 - }, - { - "epoch": 0.9043150526882547, - "learning_rate": 6.726288149240284e-05, - "loss": 0.8641, - "step": 11757 - }, - { - "epoch": 0.9043919698484731, - "learning_rate": 6.715559918311226e-05, - "loss": 1.3044, - "step": 11758 - }, - { - "epoch": 0.9044688870086917, - "learning_rate": 6.704840053909955e-05, - "loss": 1.1893, - "step": 11759 - }, - { - "epoch": 0.9045458041689101, - "learning_rate": 6.694128556662422e-05, - "loss": 0.8801, - "step": 11760 - }, - { - "epoch": 0.9046227213291286, - "learning_rate": 6.683425427194057e-05, - "loss": 1.366, - "step": 11761 - }, - { - "epoch": 0.904699638489347, - "learning_rate": 6.672730666129861e-05, - "loss": 1.1066, - "step": 11762 - }, - { - "epoch": 0.9047765556495654, - "learning_rate": 6.662044274094286e-05, - "loss": 1.1511, - "step": 11763 - }, - { - "epoch": 0.9048534728097839, - "learning_rate": 6.651366251711316e-05, - "loss": 1.4392, - "step": 11764 - }, - { - "epoch": 0.9049303899700023, - "learning_rate": 6.640696599604485e-05, - "loss": 1.2444, - "step": 11765 - }, - { - "epoch": 0.9050073071302207, - "learning_rate": 6.630035318396776e-05, - "loss": 1.4095, - "step": 11766 - }, - { - "epoch": 0.9050842242904392, - "learning_rate": 6.619382408710728e-05, - "loss": 0.9544, - "step": 11767 - }, - { - "epoch": 0.9051611414506576, - "learning_rate": 6.608737871168357e-05, - "loss": 1.0499, - "step": 11768 - }, - { - "epoch": 0.9052380586108761, - "learning_rate": 6.598101706391219e-05, - "loss": 1.0724, - "step": 11769 - }, - { - "epoch": 0.9053149757710945, - "learning_rate": 6.587473915000397e-05, - "loss": 1.0502, - "step": 11770 - }, - { - "epoch": 0.905391892931313, - "learning_rate": 6.57685449761643e-05, - "loss": 0.8725, - "step": 11771 - }, - { - "epoch": 0.9054688100915315, - "learning_rate": 6.566243454859372e-05, - "loss": 1.0236, - "step": 11772 - }, - { - "epoch": 0.9055457272517499, - "learning_rate": 6.55564078734886e-05, - "loss": 0.8038, - "step": 11773 - }, - { - "epoch": 0.9056226444119683, - "learning_rate": 6.545046495703966e-05, - "loss": 1.3477, - "step": 11774 - }, - { - "epoch": 0.9056995615721868, - "learning_rate": 6.534460580543327e-05, - "loss": 1.1728, - "step": 11775 - }, - { - "epoch": 0.9057764787324052, - "learning_rate": 6.523883042485052e-05, - "loss": 1.3801, - "step": 11776 - }, - { - "epoch": 0.9058533958926236, - "learning_rate": 6.51331388214676e-05, - "loss": 1.0779, - "step": 11777 - }, - { - "epoch": 0.9059303130528421, - "learning_rate": 6.502753100145609e-05, - "loss": 1.2253, - "step": 11778 - }, - { - "epoch": 0.9060072302130605, - "learning_rate": 6.492200697098255e-05, - "loss": 0.7492, - "step": 11779 - }, - { - "epoch": 0.906084147373279, - "learning_rate": 6.481656673620873e-05, - "loss": 1.4086, - "step": 11780 - }, - { - "epoch": 0.9061610645334974, - "learning_rate": 6.471121030329119e-05, - "loss": 1.1758, - "step": 11781 - }, - { - "epoch": 0.9062379816937158, - "learning_rate": 6.460593767838203e-05, - "loss": 1.1186, - "step": 11782 - }, - { - "epoch": 0.9063148988539343, - "learning_rate": 6.450074886762813e-05, - "loss": 1.094, - "step": 11783 - }, - { - "epoch": 0.9063918160141528, - "learning_rate": 6.439564387717145e-05, - "loss": 1.0366, - "step": 11784 - }, - { - "epoch": 0.9064687331743712, - "learning_rate": 6.429062271314923e-05, - "loss": 1.0036, - "step": 11785 - }, - { - "epoch": 0.9065456503345897, - "learning_rate": 6.418568538169372e-05, - "loss": 1.1917, - "step": 11786 - }, - { - "epoch": 0.9066225674948081, - "learning_rate": 6.408083188893255e-05, - "loss": 1.2543, - "step": 11787 - }, - { - "epoch": 0.9066994846550266, - "learning_rate": 6.397606224098812e-05, - "loss": 1.7143, - "step": 11788 - }, - { - "epoch": 0.906776401815245, - "learning_rate": 6.387137644397789e-05, - "loss": 1.221, - "step": 11789 - }, - { - "epoch": 0.9068533189754634, - "learning_rate": 6.376677450401481e-05, - "loss": 1.5333, - "step": 11790 - }, - { - "epoch": 0.9069302361356819, - "learning_rate": 6.366225642720629e-05, - "loss": 1.2567, - "step": 11791 - }, - { - "epoch": 0.9070071532959003, - "learning_rate": 6.355782221965583e-05, - "loss": 1.2335, - "step": 11792 - }, - { - "epoch": 0.9070840704561187, - "learning_rate": 6.34534718874612e-05, - "loss": 1.305, - "step": 11793 - }, - { - "epoch": 0.9071609876163372, - "learning_rate": 6.334920543671519e-05, - "loss": 0.6841, - "step": 11794 - }, - { - "epoch": 0.9072379047765556, - "learning_rate": 6.324502287350659e-05, - "loss": 1.4732, - "step": 11795 - }, - { - "epoch": 0.907314821936774, - "learning_rate": 6.314092420391837e-05, - "loss": 1.0183, - "step": 11796 - }, - { - "epoch": 0.9073917390969926, - "learning_rate": 6.303690943402902e-05, - "loss": 1.175, - "step": 11797 - }, - { - "epoch": 0.907468656257211, - "learning_rate": 6.293297856991214e-05, - "loss": 1.0436, - "step": 11798 - }, - { - "epoch": 0.9075455734174295, - "learning_rate": 6.282913161763609e-05, - "loss": 0.7594, - "step": 11799 - }, - { - "epoch": 0.9076224905776479, - "learning_rate": 6.272536858326499e-05, - "loss": 0.7875, - "step": 11800 - }, - { - "epoch": 0.9076994077378663, - "learning_rate": 6.262168947285768e-05, - "loss": 1.0691, - "step": 11801 - }, - { - "epoch": 0.9077763248980848, - "learning_rate": 6.25180942924678e-05, - "loss": 1.4212, - "step": 11802 - }, - { - "epoch": 0.9078532420583032, - "learning_rate": 6.241458304814434e-05, - "loss": 0.9927, - "step": 11803 - }, - { - "epoch": 0.9079301592185216, - "learning_rate": 6.231115574593182e-05, - "loss": 1.0773, - "step": 11804 - }, - { - "epoch": 0.9080070763787401, - "learning_rate": 6.220781239186906e-05, - "loss": 1.1155, - "step": 11805 - }, - { - "epoch": 0.9080839935389585, - "learning_rate": 6.210455299199075e-05, - "loss": 1.5424, - "step": 11806 - }, - { - "epoch": 0.908160910699177, - "learning_rate": 6.200137755232587e-05, - "loss": 1.1167, - "step": 11807 - }, - { - "epoch": 0.9082378278593954, - "learning_rate": 6.189828607889947e-05, - "loss": 1.1444, - "step": 11808 - }, - { - "epoch": 0.9083147450196138, - "learning_rate": 6.179527857773071e-05, - "loss": 0.7555, - "step": 11809 - }, - { - "epoch": 0.9083916621798324, - "learning_rate": 6.169235505483462e-05, - "loss": 1.2785, - "step": 11810 - }, - { - "epoch": 0.9084685793400508, - "learning_rate": 6.158951551622088e-05, - "loss": 1.3092, - "step": 11811 - }, - { - "epoch": 0.9085454965002692, - "learning_rate": 6.148675996789437e-05, - "loss": 1.1576, - "step": 11812 - }, - { - "epoch": 0.9086224136604877, - "learning_rate": 6.138408841585497e-05, - "loss": 1.1901, - "step": 11813 - }, - { - "epoch": 0.9086993308207061, - "learning_rate": 6.128150086609819e-05, - "loss": 1.3538, - "step": 11814 - }, - { - "epoch": 0.9087762479809245, - "learning_rate": 6.117899732461408e-05, - "loss": 0.8202, - "step": 11815 - }, - { - "epoch": 0.908853165141143, - "learning_rate": 6.107657779738735e-05, - "loss": 0.7978, - "step": 11816 - }, - { - "epoch": 0.9089300823013614, - "learning_rate": 6.097424229039922e-05, - "loss": 1.192, - "step": 11817 - }, - { - "epoch": 0.9090069994615799, - "learning_rate": 6.087199080962458e-05, - "loss": 1.1199, - "step": 11818 - }, - { - "epoch": 0.9090839166217983, - "learning_rate": 6.0769823361034484e-05, - "loss": 1.6453, - "step": 11819 - }, - { - "epoch": 0.9091608337820167, - "learning_rate": 6.066773995059416e-05, - "loss": 1.581, - "step": 11820 - }, - { - "epoch": 0.9092377509422352, - "learning_rate": 6.056574058426451e-05, - "loss": 1.4239, - "step": 11821 - }, - { - "epoch": 0.9093146681024536, - "learning_rate": 6.0463825268001425e-05, - "loss": 1.0648, - "step": 11822 - }, - { - "epoch": 0.909391585262672, - "learning_rate": 6.0361994007755826e-05, - "loss": 1.4888, - "step": 11823 - }, - { - "epoch": 0.9094685024228906, - "learning_rate": 6.026024680947379e-05, - "loss": 0.6561, - "step": 11824 - }, - { - "epoch": 0.909545419583109, - "learning_rate": 6.0158583679096547e-05, - "loss": 1.6584, - "step": 11825 - }, - { - "epoch": 0.9096223367433275, - "learning_rate": 6.005700462255986e-05, - "loss": 1.0234, - "step": 11826 - }, - { - "epoch": 0.9096992539035459, - "learning_rate": 5.995550964579549e-05, - "loss": 0.9035, - "step": 11827 - }, - { - "epoch": 0.9097761710637643, - "learning_rate": 5.985409875472986e-05, - "loss": 1.1757, - "step": 11828 - }, - { - "epoch": 0.9098530882239828, - "learning_rate": 5.975277195528406e-05, - "loss": 1.0951, - "step": 11829 - }, - { - "epoch": 0.9099300053842012, - "learning_rate": 5.965152925337503e-05, - "loss": 1.3039, - "step": 11830 - }, - { - "epoch": 0.9100069225444196, - "learning_rate": 5.9550370654914195e-05, - "loss": 1.0342, - "step": 11831 - }, - { - "epoch": 0.9100838397046381, - "learning_rate": 5.944929616580852e-05, - "loss": 1.0479, - "step": 11832 - }, - { - "epoch": 0.9101607568648565, - "learning_rate": 5.934830579195977e-05, - "loss": 0.9659, - "step": 11833 - }, - { - "epoch": 0.9102376740250749, - "learning_rate": 5.924739953926472e-05, - "loss": 1.0638, - "step": 11834 - }, - { - "epoch": 0.9103145911852935, - "learning_rate": 5.914657741361551e-05, - "loss": 1.254, - "step": 11835 - }, - { - "epoch": 0.9103915083455119, - "learning_rate": 5.904583942089925e-05, - "loss": 1.3537, - "step": 11836 - }, - { - "epoch": 0.9104684255057304, - "learning_rate": 5.8945185566998405e-05, - "loss": 1.1721, - "step": 11837 - }, - { - "epoch": 0.9105453426659488, - "learning_rate": 5.8844615857789774e-05, - "loss": 1.312, - "step": 11838 - }, - { - "epoch": 0.9106222598261672, - "learning_rate": 5.874413029914616e-05, - "loss": 0.9252, - "step": 11839 - }, - { - "epoch": 0.9106991769863857, - "learning_rate": 5.864372889693454e-05, - "loss": 1.2886, - "step": 11840 - }, - { - "epoch": 0.9107760941466041, - "learning_rate": 5.854341165701804e-05, - "loss": 1.248, - "step": 11841 - }, - { - "epoch": 0.9108530113068225, - "learning_rate": 5.844317858525383e-05, - "loss": 1.3042, - "step": 11842 - }, - { - "epoch": 0.910929928467041, - "learning_rate": 5.834302968749472e-05, - "loss": 1.1956, - "step": 11843 - }, - { - "epoch": 0.9110068456272594, - "learning_rate": 5.824296496958853e-05, - "loss": 1.3257, - "step": 11844 - }, - { - "epoch": 0.9110837627874779, - "learning_rate": 5.8142984437378424e-05, - "loss": 1.1969, - "step": 11845 - }, - { - "epoch": 0.9111606799476963, - "learning_rate": 5.804308809670189e-05, - "loss": 1.0234, - "step": 11846 - }, - { - "epoch": 0.9112375971079147, - "learning_rate": 5.794327595339227e-05, - "loss": 1.3848, - "step": 11847 - }, - { - "epoch": 0.9113145142681333, - "learning_rate": 5.784354801327757e-05, - "loss": 1.2557, - "step": 11848 - }, - { - "epoch": 0.9113914314283517, - "learning_rate": 5.7743904282181293e-05, - "loss": 1.4429, - "step": 11849 - }, - { - "epoch": 0.9114683485885701, - "learning_rate": 5.764434476592162e-05, - "loss": 1.1427, - "step": 11850 - }, - { - "epoch": 0.9115452657487886, - "learning_rate": 5.75448694703114e-05, - "loss": 0.9271, - "step": 11851 - }, - { - "epoch": 0.911622182909007, - "learning_rate": 5.744547840115982e-05, - "loss": 1.1305, - "step": 11852 - }, - { - "epoch": 0.9116991000692255, - "learning_rate": 5.7346171564269914e-05, - "loss": 1.236, - "step": 11853 - }, - { - "epoch": 0.9117760172294439, - "learning_rate": 5.7246948965441025e-05, - "loss": 1.1155, - "step": 11854 - }, - { - "epoch": 0.9118529343896623, - "learning_rate": 5.714781061046603e-05, - "loss": 1.3916, - "step": 11855 - }, - { - "epoch": 0.9119298515498808, - "learning_rate": 5.704875650513414e-05, - "loss": 1.1753, - "step": 11856 - }, - { - "epoch": 0.9120067687100992, - "learning_rate": 5.694978665522921e-05, - "loss": 1.5762, - "step": 11857 - }, - { - "epoch": 0.9120836858703176, - "learning_rate": 5.6850901066530126e-05, - "loss": 1.3602, - "step": 11858 - }, - { - "epoch": 0.9121606030305361, - "learning_rate": 5.6752099744810937e-05, - "loss": 1.1882, - "step": 11859 - }, - { - "epoch": 0.9122375201907545, - "learning_rate": 5.6653382695840696e-05, - "loss": 1.444, - "step": 11860 - }, - { - "epoch": 0.912314437350973, - "learning_rate": 5.655474992538378e-05, - "loss": 0.9654, - "step": 11861 - }, - { - "epoch": 0.9123913545111915, - "learning_rate": 5.6456201439199105e-05, - "loss": 1.1835, - "step": 11862 - }, - { - "epoch": 0.9124682716714099, - "learning_rate": 5.635773724304172e-05, - "loss": 1.0858, - "step": 11863 - }, - { - "epoch": 0.9125451888316284, - "learning_rate": 5.6259357342660024e-05, - "loss": 0.9491, - "step": 11864 - }, - { - "epoch": 0.9126221059918468, - "learning_rate": 5.6161061743799424e-05, - "loss": 1.1276, - "step": 11865 - }, - { - "epoch": 0.9126990231520652, - "learning_rate": 5.606285045219917e-05, - "loss": 1.0894, - "step": 11866 - }, - { - "epoch": 0.9127759403122837, - "learning_rate": 5.5964723473593826e-05, - "loss": 1.0319, - "step": 11867 - }, - { - "epoch": 0.9128528574725021, - "learning_rate": 5.5866680813713324e-05, - "loss": 1.1197, - "step": 11868 - }, - { - "epoch": 0.9129297746327205, - "learning_rate": 5.576872247828224e-05, - "loss": 1.1269, - "step": 11869 - }, - { - "epoch": 0.913006691792939, - "learning_rate": 5.567084847302051e-05, - "loss": 1.0615, - "step": 11870 - }, - { - "epoch": 0.9130836089531574, - "learning_rate": 5.557305880364322e-05, - "loss": 1.0588, - "step": 11871 - }, - { - "epoch": 0.913160526113376, - "learning_rate": 5.5475353475860466e-05, - "loss": 1.1178, - "step": 11872 - }, - { - "epoch": 0.9132374432735944, - "learning_rate": 5.5377732495377364e-05, - "loss": 1.5532, - "step": 11873 - }, - { - "epoch": 0.9133143604338128, - "learning_rate": 5.5280195867893843e-05, - "loss": 1.2373, - "step": 11874 - }, - { - "epoch": 0.9133912775940313, - "learning_rate": 5.518274359910519e-05, - "loss": 1.2619, - "step": 11875 - }, - { - "epoch": 0.9134681947542497, - "learning_rate": 5.508537569470234e-05, - "loss": 1.2821, - "step": 11876 - }, - { - "epoch": 0.9135451119144681, - "learning_rate": 5.49880921603701e-05, - "loss": 1.1162, - "step": 11877 - }, - { - "epoch": 0.9136220290746866, - "learning_rate": 5.489089300178873e-05, - "loss": 0.8705, - "step": 11878 - }, - { - "epoch": 0.913698946234905, - "learning_rate": 5.479377822463455e-05, - "loss": 1.3552, - "step": 11879 - }, - { - "epoch": 0.9137758633951234, - "learning_rate": 5.4696747834577846e-05, - "loss": 0.9738, - "step": 11880 - }, - { - "epoch": 0.9138527805553419, - "learning_rate": 5.459980183728408e-05, - "loss": 1.042, - "step": 11881 - }, - { - "epoch": 0.9139296977155603, - "learning_rate": 5.45029402384144e-05, - "loss": 1.2623, - "step": 11882 - }, - { - "epoch": 0.9140066148757788, - "learning_rate": 5.440616304362428e-05, - "loss": 1.2096, - "step": 11883 - }, - { - "epoch": 0.9140835320359972, - "learning_rate": 5.430947025856503e-05, - "loss": 1.2831, - "step": 11884 - }, - { - "epoch": 0.9141604491962156, - "learning_rate": 5.42128618888828e-05, - "loss": 1.2196, - "step": 11885 - }, - { - "epoch": 0.9142373663564342, - "learning_rate": 5.4116337940217756e-05, - "loss": 0.8838, - "step": 11886 - }, - { - "epoch": 0.9143142835166526, - "learning_rate": 5.401989841820687e-05, - "loss": 1.0429, - "step": 11887 - }, - { - "epoch": 0.914391200676871, - "learning_rate": 5.392354332848115e-05, - "loss": 1.365, - "step": 11888 - }, - { - "epoch": 0.9144681178370895, - "learning_rate": 5.382727267666676e-05, - "loss": 0.9185, - "step": 11889 - }, - { - "epoch": 0.9145450349973079, - "learning_rate": 5.3731086468385035e-05, - "loss": 1.2586, - "step": 11890 - }, - { - "epoch": 0.9146219521575264, - "learning_rate": 5.3634984709252156e-05, - "loss": 1.1925, - "step": 11891 - }, - { - "epoch": 0.9146988693177448, - "learning_rate": 5.353896740488029e-05, - "loss": 1.3063, - "step": 11892 - }, - { - "epoch": 0.9147757864779632, - "learning_rate": 5.344303456087546e-05, - "loss": 1.0249, - "step": 11893 - }, - { - "epoch": 0.9148527036381817, - "learning_rate": 5.3347186182839344e-05, - "loss": 1.1116, - "step": 11894 - }, - { - "epoch": 0.9149296207984001, - "learning_rate": 5.325142227636881e-05, - "loss": 1.2574, - "step": 11895 - }, - { - "epoch": 0.9150065379586185, - "learning_rate": 5.3155742847055544e-05, - "loss": 0.8567, - "step": 11896 - }, - { - "epoch": 0.915083455118837, - "learning_rate": 5.306014790048608e-05, - "loss": 1.3659, - "step": 11897 - }, - { - "epoch": 0.9151603722790554, - "learning_rate": 5.296463744224278e-05, - "loss": 1.1399, - "step": 11898 - }, - { - "epoch": 0.9152372894392738, - "learning_rate": 5.2869211477902354e-05, - "loss": 1.3039, - "step": 11899 - }, - { - "epoch": 0.9153142065994924, - "learning_rate": 5.277387001303652e-05, - "loss": 0.8714, - "step": 11900 - }, - { - "epoch": 0.9153911237597108, - "learning_rate": 5.267861305321298e-05, - "loss": 1.0065, - "step": 11901 - }, - { - "epoch": 0.9154680409199293, - "learning_rate": 5.258344060399345e-05, - "loss": 1.0591, - "step": 11902 - }, - { - "epoch": 0.9155449580801477, - "learning_rate": 5.2488352670935325e-05, - "loss": 1.2966, - "step": 11903 - }, - { - "epoch": 0.9156218752403661, - "learning_rate": 5.239334925959099e-05, - "loss": 1.0625, - "step": 11904 - }, - { - "epoch": 0.9156987924005846, - "learning_rate": 5.2298430375507354e-05, - "loss": 1.1937, - "step": 11905 - }, - { - "epoch": 0.915775709560803, - "learning_rate": 5.220359602422731e-05, - "loss": 0.8867, - "step": 11906 - }, - { - "epoch": 0.9158526267210214, - "learning_rate": 5.210884621128825e-05, - "loss": 1.236, - "step": 11907 - }, - { - "epoch": 0.9159295438812399, - "learning_rate": 5.2014180942222435e-05, - "loss": 1.1658, - "step": 11908 - }, - { - "epoch": 0.9160064610414583, - "learning_rate": 5.191960022255776e-05, - "loss": 1.125, - "step": 11909 - }, - { - "epoch": 0.9160833782016768, - "learning_rate": 5.182510405781649e-05, - "loss": 1.4948, - "step": 11910 - }, - { - "epoch": 0.9161602953618952, - "learning_rate": 5.173069245351719e-05, - "loss": 1.0294, - "step": 11911 - }, - { - "epoch": 0.9162372125221137, - "learning_rate": 5.163636541517181e-05, - "loss": 1.3724, - "step": 11912 - }, - { - "epoch": 0.9163141296823322, - "learning_rate": 5.154212294828825e-05, - "loss": 0.9865, - "step": 11913 - }, - { - "epoch": 0.9163910468425506, - "learning_rate": 5.144796505836996e-05, - "loss": 1.3347, - "step": 11914 - }, - { - "epoch": 0.916467964002769, - "learning_rate": 5.13538917509147e-05, - "loss": 1.3131, - "step": 11915 - }, - { - "epoch": 0.9165448811629875, - "learning_rate": 5.1259903031415254e-05, - "loss": 1.2446, - "step": 11916 - }, - { - "epoch": 0.9166217983232059, - "learning_rate": 5.1165998905360055e-05, - "loss": 1.2039, - "step": 11917 - }, - { - "epoch": 0.9166987154834243, - "learning_rate": 5.107217937823205e-05, - "loss": 1.0967, - "step": 11918 - }, - { - "epoch": 0.9167756326436428, - "learning_rate": 5.097844445550953e-05, - "loss": 1.3681, - "step": 11919 - }, - { - "epoch": 0.9168525498038612, - "learning_rate": 5.0884794142665944e-05, - "loss": 0.7595, - "step": 11920 - }, - { - "epoch": 0.9169294669640797, - "learning_rate": 5.079122844516909e-05, - "loss": 1.2948, - "step": 11921 - }, - { - "epoch": 0.9170063841242981, - "learning_rate": 5.069774736848309e-05, - "loss": 1.2925, - "step": 11922 - }, - { - "epoch": 0.9170833012845165, - "learning_rate": 5.060435091806592e-05, - "loss": 0.9102, - "step": 11923 - }, - { - "epoch": 0.917160218444735, - "learning_rate": 5.051103909937121e-05, - "loss": 1.0557, - "step": 11924 - }, - { - "epoch": 0.9172371356049535, - "learning_rate": 5.041781191784761e-05, - "loss": 1.4557, - "step": 11925 - }, - { - "epoch": 0.9173140527651719, - "learning_rate": 5.032466937893859e-05, - "loss": 0.9919, - "step": 11926 - }, - { - "epoch": 0.9173909699253904, - "learning_rate": 5.0231611488082806e-05, - "loss": 1.1075, - "step": 11927 - }, - { - "epoch": 0.9174678870856088, - "learning_rate": 5.0138638250714406e-05, - "loss": 1.0142, - "step": 11928 - }, - { - "epoch": 0.9175448042458273, - "learning_rate": 5.004574967226172e-05, - "loss": 0.9889, - "step": 11929 - }, - { - "epoch": 0.9176217214060457, - "learning_rate": 4.995294575814907e-05, - "loss": 0.9621, - "step": 11930 - }, - { - "epoch": 0.9176986385662641, - "learning_rate": 4.986022651379496e-05, - "loss": 1.2588, - "step": 11931 - }, - { - "epoch": 0.9177755557264826, - "learning_rate": 4.976759194461339e-05, - "loss": 0.7398, - "step": 11932 - }, - { - "epoch": 0.917852472886701, - "learning_rate": 4.9675042056013695e-05, - "loss": 1.2111, - "step": 11933 - }, - { - "epoch": 0.9179293900469194, - "learning_rate": 4.9582576853399894e-05, - "loss": 1.2539, - "step": 11934 - }, - { - "epoch": 0.9180063072071379, - "learning_rate": 4.9490196342170836e-05, - "loss": 1.0612, - "step": 11935 - }, - { - "epoch": 0.9180832243673563, - "learning_rate": 4.939790052772103e-05, - "loss": 1.5774, - "step": 11936 - }, - { - "epoch": 0.9181601415275747, - "learning_rate": 4.93056894154395e-05, - "loss": 0.9043, - "step": 11937 - }, - { - "epoch": 0.9182370586877933, - "learning_rate": 4.9213563010710614e-05, - "loss": 1.3238, - "step": 11938 - }, - { - "epoch": 0.9183139758480117, - "learning_rate": 4.91215213189139e-05, - "loss": 1.0755, - "step": 11939 - }, - { - "epoch": 0.9183908930082302, - "learning_rate": 4.902956434542338e-05, - "loss": 1.2284, - "step": 11940 - }, - { - "epoch": 0.9184678101684486, - "learning_rate": 4.893769209560894e-05, - "loss": 0.9964, - "step": 11941 - }, - { - "epoch": 0.918544727328667, - "learning_rate": 4.8845904574834956e-05, - "loss": 1.2363, - "step": 11942 - }, - { - "epoch": 0.9186216444888855, - "learning_rate": 4.875420178846079e-05, - "loss": 1.29, - "step": 11943 - }, - { - "epoch": 0.9186985616491039, - "learning_rate": 4.8662583741841515e-05, - "loss": 0.6637, - "step": 11944 - }, - { - "epoch": 0.9187754788093223, - "learning_rate": 4.8571050440326333e-05, - "loss": 0.9039, - "step": 11945 - }, - { - "epoch": 0.9188523959695408, - "learning_rate": 4.8479601889259984e-05, - "loss": 1.3058, - "step": 11946 - }, - { - "epoch": 0.9189293131297592, - "learning_rate": 4.838823809398285e-05, - "loss": 1.5151, - "step": 11947 - }, - { - "epoch": 0.9190062302899777, - "learning_rate": 4.829695905982884e-05, - "loss": 0.7713, - "step": 11948 - }, - { - "epoch": 0.9190831474501961, - "learning_rate": 4.820576479212868e-05, - "loss": 0.9511, - "step": 11949 - }, - { - "epoch": 0.9191600646104146, - "learning_rate": 4.811465529620679e-05, - "loss": 1.5938, - "step": 11950 - }, - { - "epoch": 0.9192369817706331, - "learning_rate": 4.8023630577383404e-05, - "loss": 0.6984, - "step": 11951 - }, - { - "epoch": 0.9193138989308515, - "learning_rate": 4.793269064097344e-05, - "loss": 1.1737, - "step": 11952 - }, - { - "epoch": 0.9193908160910699, - "learning_rate": 4.7841835492286823e-05, - "loss": 1.3361, - "step": 11953 - }, - { - "epoch": 0.9194677332512884, - "learning_rate": 4.77510651366288e-05, - "loss": 1.1423, - "step": 11954 - }, - { - "epoch": 0.9195446504115068, - "learning_rate": 4.76603795792998e-05, - "loss": 1.0514, - "step": 11955 - }, - { - "epoch": 0.9196215675717253, - "learning_rate": 4.7569778825594604e-05, - "loss": 0.7971, - "step": 11956 - }, - { - "epoch": 0.9196984847319437, - "learning_rate": 4.7479262880803955e-05, - "loss": 1.0356, - "step": 11957 - }, - { - "epoch": 0.9197754018921621, - "learning_rate": 4.73888317502128e-05, - "loss": 1.0134, - "step": 11958 - }, - { - "epoch": 0.9198523190523806, - "learning_rate": 4.7298485439101426e-05, - "loss": 1.2458, - "step": 11959 - }, - { - "epoch": 0.919929236212599, - "learning_rate": 4.7208223952745935e-05, - "loss": 1.1579, - "step": 11960 - }, - { - "epoch": 0.9200061533728174, - "learning_rate": 4.711804729641611e-05, - "loss": 1.464, - "step": 11961 - }, - { - "epoch": 0.920083070533036, - "learning_rate": 4.7027955475377416e-05, - "loss": 1.223, - "step": 11962 - }, - { - "epoch": 0.9201599876932544, - "learning_rate": 4.6937948494890805e-05, - "loss": 1.3812, - "step": 11963 - }, - { - "epoch": 0.9202369048534728, - "learning_rate": 4.684802636021174e-05, - "loss": 1.089, - "step": 11964 - }, - { - "epoch": 0.9203138220136913, - "learning_rate": 4.6758189076591015e-05, - "loss": 1.0512, - "step": 11965 - }, - { - "epoch": 0.9203907391739097, - "learning_rate": 4.666843664927395e-05, - "loss": 0.7737, - "step": 11966 - }, - { - "epoch": 0.9204676563341282, - "learning_rate": 4.65787690835015e-05, - "loss": 0.8502, - "step": 11967 - }, - { - "epoch": 0.9205445734943466, - "learning_rate": 4.64891863845095e-05, - "loss": 1.0651, - "step": 11968 - }, - { - "epoch": 0.920621490654565, - "learning_rate": 4.6399688557528907e-05, - "loss": 1.2638, - "step": 11969 - }, - { - "epoch": 0.9206984078147835, - "learning_rate": 4.631027560778506e-05, - "loss": 1.1519, - "step": 11970 - }, - { - "epoch": 0.9207753249750019, - "learning_rate": 4.6220947540499433e-05, - "loss": 1.3492, - "step": 11971 - }, - { - "epoch": 0.9208522421352203, - "learning_rate": 4.6131704360887706e-05, - "loss": 1.1754, - "step": 11972 - }, - { - "epoch": 0.9209291592954388, - "learning_rate": 4.604254607416103e-05, - "loss": 1.3276, - "step": 11973 - }, - { - "epoch": 0.9210060764556572, - "learning_rate": 4.595347268552524e-05, - "loss": 1.0822, - "step": 11974 - }, - { - "epoch": 0.9210829936158758, - "learning_rate": 4.586448420018152e-05, - "loss": 1.2038, - "step": 11975 - }, - { - "epoch": 0.9211599107760942, - "learning_rate": 4.577558062332604e-05, - "loss": 1.0735, - "step": 11976 - }, - { - "epoch": 0.9212368279363126, - "learning_rate": 4.568676196015015e-05, - "loss": 0.9305, - "step": 11977 - }, - { - "epoch": 0.9213137450965311, - "learning_rate": 4.559802821583969e-05, - "loss": 1.2265, - "step": 11978 - }, - { - "epoch": 0.9213906622567495, - "learning_rate": 4.55093793955762e-05, - "loss": 1.3947, - "step": 11979 - }, - { - "epoch": 0.9214675794169679, - "learning_rate": 4.542081550453603e-05, - "loss": 1.1411, - "step": 11980 - }, - { - "epoch": 0.9215444965771864, - "learning_rate": 4.533233654789004e-05, - "loss": 1.0006, - "step": 11981 - }, - { - "epoch": 0.9216214137374048, - "learning_rate": 4.524394253080527e-05, - "loss": 0.8626, - "step": 11982 - }, - { - "epoch": 0.9216983308976232, - "learning_rate": 4.515563345844259e-05, - "loss": 0.9308, - "step": 11983 - }, - { - "epoch": 0.9217752480578417, - "learning_rate": 4.506740933595854e-05, - "loss": 0.9866, - "step": 11984 - }, - { - "epoch": 0.9218521652180601, - "learning_rate": 4.4979270168505004e-05, - "loss": 1.2695, - "step": 11985 - }, - { - "epoch": 0.9219290823782786, - "learning_rate": 4.489121596122819e-05, - "loss": 1.0283, - "step": 11986 - }, - { - "epoch": 0.922005999538497, - "learning_rate": 4.480324671926983e-05, - "loss": 1.3385, - "step": 11987 - }, - { - "epoch": 0.9220829166987154, - "learning_rate": 4.471536244776647e-05, - "loss": 0.9881, - "step": 11988 - }, - { - "epoch": 0.922159833858934, - "learning_rate": 4.462756315184951e-05, - "loss": 1.2224, - "step": 11989 - }, - { - "epoch": 0.9222367510191524, - "learning_rate": 4.453984883664603e-05, - "loss": 1.3394, - "step": 11990 - }, - { - "epoch": 0.9223136681793708, - "learning_rate": 4.44522195072779e-05, - "loss": 1.0757, - "step": 11991 - }, - { - "epoch": 0.9223905853395893, - "learning_rate": 4.4364675168861226e-05, - "loss": 1.2237, - "step": 11992 - }, - { - "epoch": 0.9224675024998077, - "learning_rate": 4.42772158265084e-05, - "loss": 0.7563, - "step": 11993 - }, - { - "epoch": 0.9225444196600262, - "learning_rate": 4.4189841485325844e-05, - "loss": 1.928, - "step": 11994 - }, - { - "epoch": 0.9226213368202446, - "learning_rate": 4.410255215041598e-05, - "loss": 1.156, - "step": 11995 - }, - { - "epoch": 0.922698253980463, - "learning_rate": 4.401534782687522e-05, - "loss": 1.1637, - "step": 11996 - }, - { - "epoch": 0.9227751711406815, - "learning_rate": 4.39282285197955e-05, - "loss": 0.9752, - "step": 11997 - }, - { - "epoch": 0.9228520883008999, - "learning_rate": 4.384119423426425e-05, - "loss": 1.1092, - "step": 11998 - }, - { - "epoch": 0.9229290054611183, - "learning_rate": 4.375424497536323e-05, - "loss": 1.0258, - "step": 11999 - }, - { - "epoch": 0.9230059226213368, - "learning_rate": 4.3667380748169396e-05, - "loss": 1.3356, - "step": 12000 - }, - { - "epoch": 0.9230828397815553, - "learning_rate": 4.3580601557755004e-05, - "loss": 0.8829, - "step": 12001 - }, - { - "epoch": 0.9231597569417737, - "learning_rate": 4.349390740918718e-05, - "loss": 1.0759, - "step": 12002 - }, - { - "epoch": 0.9232366741019922, - "learning_rate": 4.3407298307527876e-05, - "loss": 1.1812, - "step": 12003 - }, - { - "epoch": 0.9233135912622106, - "learning_rate": 4.3320774257834704e-05, - "loss": 1.2493, - "step": 12004 - }, - { - "epoch": 0.9233905084224291, - "learning_rate": 4.3234335265159466e-05, - "loss": 0.9774, - "step": 12005 - }, - { - "epoch": 0.9234674255826475, - "learning_rate": 4.314798133454961e-05, - "loss": 0.9717, - "step": 12006 - }, - { - "epoch": 0.9235443427428659, - "learning_rate": 4.306171247104746e-05, - "loss": 1.1435, - "step": 12007 - }, - { - "epoch": 0.9236212599030844, - "learning_rate": 4.297552867969029e-05, - "loss": 1.2968, - "step": 12008 - }, - { - "epoch": 0.9236981770633028, - "learning_rate": 4.28894299655106e-05, - "loss": 1.1827, - "step": 12009 - }, - { - "epoch": 0.9237750942235212, - "learning_rate": 4.2803416333535514e-05, - "loss": 1.2274, - "step": 12010 - }, - { - "epoch": 0.9238520113837397, - "learning_rate": 4.2717487788787536e-05, - "loss": 1.2271, - "step": 12011 - }, - { - "epoch": 0.9239289285439581, - "learning_rate": 4.2631644336284136e-05, - "loss": 1.1826, - "step": 12012 - }, - { - "epoch": 0.9240058457041767, - "learning_rate": 4.254588598103815e-05, - "loss": 1.0768, - "step": 12013 - }, - { - "epoch": 0.9240827628643951, - "learning_rate": 4.2460212728056555e-05, - "loss": 0.9962, - "step": 12014 - }, - { - "epoch": 0.9241596800246135, - "learning_rate": 4.2374624582342366e-05, - "loss": 1.0629, - "step": 12015 - }, - { - "epoch": 0.924236597184832, - "learning_rate": 4.228912154889258e-05, - "loss": 0.6099, - "step": 12016 - }, - { - "epoch": 0.9243135143450504, - "learning_rate": 4.2203703632700705e-05, - "loss": 0.9834, - "step": 12017 - }, - { - "epoch": 0.9243904315052688, - "learning_rate": 4.2118370838753584e-05, - "loss": 0.7054, - "step": 12018 - }, - { - "epoch": 0.9244673486654873, - "learning_rate": 4.2033123172033895e-05, - "loss": 1.1603, - "step": 12019 - }, - { - "epoch": 0.9245442658257057, - "learning_rate": 4.194796063751999e-05, - "loss": 0.8002, - "step": 12020 - }, - { - "epoch": 0.9246211829859241, - "learning_rate": 4.1862883240184055e-05, - "loss": 1.2631, - "step": 12021 - }, - { - "epoch": 0.9246981001461426, - "learning_rate": 4.177789098499396e-05, - "loss": 1.0248, - "step": 12022 - }, - { - "epoch": 0.924775017306361, - "learning_rate": 4.169298387691273e-05, - "loss": 0.9804, - "step": 12023 - }, - { - "epoch": 0.9248519344665795, - "learning_rate": 4.160816192089772e-05, - "loss": 1.2527, - "step": 12024 - }, - { - "epoch": 0.9249288516267979, - "learning_rate": 4.152342512190216e-05, - "loss": 0.9706, - "step": 12025 - }, - { - "epoch": 0.9250057687870163, - "learning_rate": 4.143877348487391e-05, - "loss": 1.1578, - "step": 12026 - }, - { - "epoch": 0.9250826859472349, - "learning_rate": 4.135420701475551e-05, - "loss": 0.9385, - "step": 12027 - }, - { - "epoch": 0.9251596031074533, - "learning_rate": 4.1269725716485184e-05, - "loss": 1.1869, - "step": 12028 - }, - { - "epoch": 0.9252365202676717, - "learning_rate": 4.118532959499599e-05, - "loss": 1.2315, - "step": 12029 - }, - { - "epoch": 0.9253134374278902, - "learning_rate": 4.110101865521565e-05, - "loss": 1.4298, - "step": 12030 - }, - { - "epoch": 0.9253903545881086, - "learning_rate": 4.101679290206722e-05, - "loss": 1.627, - "step": 12031 - }, - { - "epoch": 0.9254672717483271, - "learning_rate": 4.093265234046878e-05, - "loss": 0.9546, - "step": 12032 - }, - { - "epoch": 0.9255441889085455, - "learning_rate": 4.084859697533338e-05, - "loss": 1.1561, - "step": 12033 - }, - { - "epoch": 0.9256211060687639, - "learning_rate": 4.076462681156912e-05, - "loss": 0.9854, - "step": 12034 - }, - { - "epoch": 0.9256980232289824, - "learning_rate": 4.068074185407905e-05, - "loss": 0.8717, - "step": 12035 - }, - { - "epoch": 0.9257749403892008, - "learning_rate": 4.0596942107761435e-05, - "loss": 1.3097, - "step": 12036 - }, - { - "epoch": 0.9258518575494192, - "learning_rate": 4.0513227577509195e-05, - "loss": 1.1436, - "step": 12037 - }, - { - "epoch": 0.9259287747096377, - "learning_rate": 4.0429598268210575e-05, - "loss": 1.1507, - "step": 12038 - }, - { - "epoch": 0.9260056918698562, - "learning_rate": 4.0346054184749016e-05, - "loss": 1.0428, - "step": 12039 - }, - { - "epoch": 0.9260826090300746, - "learning_rate": 4.02625953320026e-05, - "loss": 1.1619, - "step": 12040 - }, - { - "epoch": 0.9261595261902931, - "learning_rate": 4.017922171484428e-05, - "loss": 1.3877, - "step": 12041 - }, - { - "epoch": 0.9262364433505115, - "learning_rate": 4.009593333814282e-05, - "loss": 1.326, - "step": 12042 - }, - { - "epoch": 0.92631336051073, - "learning_rate": 4.001273020676116e-05, - "loss": 1.2347, - "step": 12043 - }, - { - "epoch": 0.9263902776709484, - "learning_rate": 3.992961232555792e-05, - "loss": 1.2249, - "step": 12044 - }, - { - "epoch": 0.9264671948311668, - "learning_rate": 3.9846579699386046e-05, - "loss": 1.1475, - "step": 12045 - }, - { - "epoch": 0.9265441119913853, - "learning_rate": 3.9763632333094156e-05, - "loss": 1.2705, - "step": 12046 - }, - { - "epoch": 0.9266210291516037, - "learning_rate": 3.968077023152572e-05, - "loss": 1.0513, - "step": 12047 - }, - { - "epoch": 0.9266979463118221, - "learning_rate": 3.959799339951903e-05, - "loss": 1.1733, - "step": 12048 - }, - { - "epoch": 0.9267748634720406, - "learning_rate": 3.951530184190738e-05, - "loss": 1.2158, - "step": 12049 - }, - { - "epoch": 0.926851780632259, - "learning_rate": 3.943269556351942e-05, - "loss": 1.0553, - "step": 12050 - }, - { - "epoch": 0.9269286977924776, - "learning_rate": 3.9350174569178273e-05, - "loss": 1.2771, - "step": 12051 - }, - { - "epoch": 0.927005614952696, - "learning_rate": 3.9267738863703105e-05, - "loss": 1.1422, - "step": 12052 - }, - { - "epoch": 0.9270825321129144, - "learning_rate": 3.918538845190689e-05, - "loss": 0.8328, - "step": 12053 - }, - { - "epoch": 0.9271594492731329, - "learning_rate": 3.910312333859794e-05, - "loss": 1.0746, - "step": 12054 - }, - { - "epoch": 0.9272363664333513, - "learning_rate": 3.902094352858043e-05, - "loss": 1.0455, - "step": 12055 - }, - { - "epoch": 0.9273132835935697, - "learning_rate": 3.893884902665268e-05, - "loss": 1.1229, - "step": 12056 - }, - { - "epoch": 0.9273902007537882, - "learning_rate": 3.8856839837608015e-05, - "loss": 1.0004, - "step": 12057 - }, - { - "epoch": 0.9274671179140066, - "learning_rate": 3.877491596623545e-05, - "loss": 1.0069, - "step": 12058 - }, - { - "epoch": 0.9275440350742251, - "learning_rate": 3.8693077417318145e-05, - "loss": 0.9788, - "step": 12059 - }, - { - "epoch": 0.9276209522344435, - "learning_rate": 3.861132419563512e-05, - "loss": 1.388, - "step": 12060 - }, - { - "epoch": 0.9276978693946619, - "learning_rate": 3.852965630596006e-05, - "loss": 1.3543, - "step": 12061 - }, - { - "epoch": 0.9277747865548804, - "learning_rate": 3.844807375306114e-05, - "loss": 1.283, - "step": 12062 - }, - { - "epoch": 0.9278517037150988, - "learning_rate": 3.836657654170256e-05, - "loss": 1.1871, - "step": 12063 - }, - { - "epoch": 0.9279286208753172, - "learning_rate": 3.8285164676642836e-05, - "loss": 0.9242, - "step": 12064 - }, - { - "epoch": 0.9280055380355358, - "learning_rate": 3.820383816263584e-05, - "loss": 0.9492, - "step": 12065 - }, - { - "epoch": 0.9280824551957542, - "learning_rate": 3.81225970044301e-05, - "loss": 1.2078, - "step": 12066 - }, - { - "epoch": 0.9281593723559726, - "learning_rate": 3.804144120676933e-05, - "loss": 1.0509, - "step": 12067 - }, - { - "epoch": 0.9282362895161911, - "learning_rate": 3.7960370774392406e-05, - "loss": 0.9027, - "step": 12068 - }, - { - "epoch": 0.9283132066764095, - "learning_rate": 3.7879385712033196e-05, - "loss": 0.9869, - "step": 12069 - }, - { - "epoch": 0.928390123836628, - "learning_rate": 3.77984860244206e-05, - "loss": 0.9502, - "step": 12070 - }, - { - "epoch": 0.9284670409968464, - "learning_rate": 3.771767171627816e-05, - "loss": 1.239, - "step": 12071 - }, - { - "epoch": 0.9285439581570648, - "learning_rate": 3.763694279232477e-05, - "loss": 1.1828, - "step": 12072 - }, - { - "epoch": 0.9286208753172833, - "learning_rate": 3.755629925727433e-05, - "loss": 0.8113, - "step": 12073 - }, - { - "epoch": 0.9286977924775017, - "learning_rate": 3.7475741115835906e-05, - "loss": 1.0285, - "step": 12074 - }, - { - "epoch": 0.9287747096377201, - "learning_rate": 3.739526837271307e-05, - "loss": 1.148, - "step": 12075 - }, - { - "epoch": 0.9288516267979386, - "learning_rate": 3.7314881032604553e-05, - "loss": 1.3272, - "step": 12076 - }, - { - "epoch": 0.928928543958157, - "learning_rate": 3.723457910020478e-05, - "loss": 1.1356, - "step": 12077 - }, - { - "epoch": 0.9290054611183756, - "learning_rate": 3.715436258020233e-05, - "loss": 1.0096, - "step": 12078 - }, - { - "epoch": 0.929082378278594, - "learning_rate": 3.707423147728112e-05, - "loss": 1.2124, - "step": 12079 - }, - { - "epoch": 0.9291592954388124, - "learning_rate": 3.699418579612024e-05, - "loss": 0.8465, - "step": 12080 - }, - { - "epoch": 0.9292362125990309, - "learning_rate": 3.6914225541393456e-05, - "loss": 1.2877, - "step": 12081 - }, - { - "epoch": 0.9293131297592493, - "learning_rate": 3.683435071776986e-05, - "loss": 0.923, - "step": 12082 - }, - { - "epoch": 0.9293900469194677, - "learning_rate": 3.675456132991339e-05, - "loss": 1.2139, - "step": 12083 - }, - { - "epoch": 0.9294669640796862, - "learning_rate": 3.667485738248316e-05, - "loss": 1.0502, - "step": 12084 - }, - { - "epoch": 0.9295438812399046, - "learning_rate": 3.6595238880132944e-05, - "loss": 1.1556, - "step": 12085 - }, - { - "epoch": 0.929620798400123, - "learning_rate": 3.6515705827512016e-05, - "loss": 1.2585, - "step": 12086 - }, - { - "epoch": 0.9296977155603415, - "learning_rate": 3.6436258229263996e-05, - "loss": 0.7302, - "step": 12087 - }, - { - "epoch": 0.9297746327205599, - "learning_rate": 3.635689609002835e-05, - "loss": 1.4461, - "step": 12088 - }, - { - "epoch": 0.9298515498807784, - "learning_rate": 3.627761941443869e-05, - "loss": 1.3529, - "step": 12089 - }, - { - "epoch": 0.9299284670409969, - "learning_rate": 3.619842820712449e-05, - "loss": 1.0811, - "step": 12090 - }, - { - "epoch": 0.9300053842012153, - "learning_rate": 3.611932247270955e-05, - "loss": 1.2806, - "step": 12091 - }, - { - "epoch": 0.9300823013614338, - "learning_rate": 3.6040302215813004e-05, - "loss": 1.1022, - "step": 12092 - }, - { - "epoch": 0.9301592185216522, - "learning_rate": 3.596136744104883e-05, - "loss": 1.0145, - "step": 12093 - }, - { - "epoch": 0.9302361356818706, - "learning_rate": 3.5882518153026344e-05, - "loss": 1.3617, - "step": 12094 - }, - { - "epoch": 0.9303130528420891, - "learning_rate": 3.580375435634919e-05, - "loss": 1.2117, - "step": 12095 - }, - { - "epoch": 0.9303899700023075, - "learning_rate": 3.572507605561703e-05, - "loss": 0.9597, - "step": 12096 - }, - { - "epoch": 0.930466887162526, - "learning_rate": 3.564648325542369e-05, - "loss": 0.8889, - "step": 12097 - }, - { - "epoch": 0.9305438043227444, - "learning_rate": 3.5567975960358326e-05, - "loss": 1.2961, - "step": 12098 - }, - { - "epoch": 0.9306207214829628, - "learning_rate": 3.5489554175004936e-05, - "loss": 1.1012, - "step": 12099 - }, - { - "epoch": 0.9306976386431813, - "learning_rate": 3.5411217903942696e-05, - "loss": 1.0175, - "step": 12100 - }, - { - "epoch": 0.9307745558033997, - "learning_rate": 3.533296715174611e-05, - "loss": 1.2612, - "step": 12101 - }, - { - "epoch": 0.9308514729636181, - "learning_rate": 3.525480192298369e-05, - "loss": 1.1085, - "step": 12102 - }, - { - "epoch": 0.9309283901238367, - "learning_rate": 3.517672222221996e-05, - "loss": 0.8248, - "step": 12103 - }, - { - "epoch": 0.9310053072840551, - "learning_rate": 3.50987280540141e-05, - "loss": 1.152, - "step": 12104 - }, - { - "epoch": 0.9310822244442735, - "learning_rate": 3.502081942292012e-05, - "loss": 1.1902, - "step": 12105 - }, - { - "epoch": 0.931159141604492, - "learning_rate": 3.494299633348741e-05, - "loss": 1.0832, - "step": 12106 - }, - { - "epoch": 0.9312360587647104, - "learning_rate": 3.486525879025998e-05, - "loss": 1.0917, - "step": 12107 - }, - { - "epoch": 0.9313129759249289, - "learning_rate": 3.47876067977767e-05, - "loss": 1.348, - "step": 12108 - }, - { - "epoch": 0.9313898930851473, - "learning_rate": 3.471004036057229e-05, - "loss": 1.3364, - "step": 12109 - }, - { - "epoch": 0.9314668102453657, - "learning_rate": 3.463255948317595e-05, - "loss": 1.5034, - "step": 12110 - }, - { - "epoch": 0.9315437274055842, - "learning_rate": 3.455516417011123e-05, - "loss": 1.4563, - "step": 12111 - }, - { - "epoch": 0.9316206445658026, - "learning_rate": 3.447785442589802e-05, - "loss": 1.2429, - "step": 12112 - }, - { - "epoch": 0.931697561726021, - "learning_rate": 3.440063025505002e-05, - "loss": 1.0759, - "step": 12113 - }, - { - "epoch": 0.9317744788862395, - "learning_rate": 3.4323491662076825e-05, - "loss": 0.9891, - "step": 12114 - }, - { - "epoch": 0.931851396046458, - "learning_rate": 3.42464386514823e-05, - "loss": 1.1364, - "step": 12115 - }, - { - "epoch": 0.9319283132066765, - "learning_rate": 3.41694712277657e-05, - "loss": 0.8841, - "step": 12116 - }, - { - "epoch": 0.9320052303668949, - "learning_rate": 3.4092589395421424e-05, - "loss": 0.8101, - "step": 12117 - }, - { - "epoch": 0.9320821475271133, - "learning_rate": 3.4015793158938544e-05, - "loss": 1.2486, - "step": 12118 - }, - { - "epoch": 0.9321590646873318, - "learning_rate": 3.393908252280148e-05, - "loss": 1.1403, - "step": 12119 - }, - { - "epoch": 0.9322359818475502, - "learning_rate": 3.386245749148914e-05, - "loss": 0.958, - "step": 12120 - }, - { - "epoch": 0.9323128990077686, - "learning_rate": 3.3785918069475953e-05, - "loss": 1.4061, - "step": 12121 - }, - { - "epoch": 0.9323898161679871, - "learning_rate": 3.370946426123101e-05, - "loss": 1.1242, - "step": 12122 - }, - { - "epoch": 0.9324667333282055, - "learning_rate": 3.3633096071218724e-05, - "loss": 1.0106, - "step": 12123 - }, - { - "epoch": 0.9325436504884239, - "learning_rate": 3.3556813503898056e-05, - "loss": 0.9884, - "step": 12124 - }, - { - "epoch": 0.9326205676486424, - "learning_rate": 3.348061656372309e-05, - "loss": 0.9873, - "step": 12125 - }, - { - "epoch": 0.9326974848088608, - "learning_rate": 3.3404505255143605e-05, - "loss": 1.1096, - "step": 12126 - }, - { - "epoch": 0.9327744019690793, - "learning_rate": 3.3328479582603376e-05, - "loss": 1.4038, - "step": 12127 - }, - { - "epoch": 0.9328513191292978, - "learning_rate": 3.325253955054186e-05, - "loss": 1.0945, - "step": 12128 - }, - { - "epoch": 0.9329282362895162, - "learning_rate": 3.317668516339317e-05, - "loss": 1.1887, - "step": 12129 - }, - { - "epoch": 0.9330051534497347, - "learning_rate": 3.310091642558627e-05, - "loss": 1.9605, - "step": 12130 - }, - { - "epoch": 0.9330820706099531, - "learning_rate": 3.3025233341545945e-05, - "loss": 1.4124, - "step": 12131 - }, - { - "epoch": 0.9331589877701715, - "learning_rate": 3.2949635915691e-05, - "loss": 1.2597, - "step": 12132 - }, - { - "epoch": 0.93323590493039, - "learning_rate": 3.287412415243557e-05, - "loss": 0.9069, - "step": 12133 - }, - { - "epoch": 0.9333128220906084, - "learning_rate": 3.279869805618929e-05, - "loss": 1.1168, - "step": 12134 - }, - { - "epoch": 0.9333897392508269, - "learning_rate": 3.272335763135581e-05, - "loss": 0.9069, - "step": 12135 - }, - { - "epoch": 0.9334666564110453, - "learning_rate": 3.264810288233494e-05, - "loss": 1.2822, - "step": 12136 - }, - { - "epoch": 0.9335435735712637, - "learning_rate": 3.25729338135205e-05, - "loss": 1.0019, - "step": 12137 - }, - { - "epoch": 0.9336204907314822, - "learning_rate": 3.2497850429301643e-05, - "loss": 1.2547, - "step": 12138 - }, - { - "epoch": 0.9336974078917006, - "learning_rate": 3.24228527340627e-05, - "loss": 1.4441, - "step": 12139 - }, - { - "epoch": 0.933774325051919, - "learning_rate": 3.2347940732183e-05, - "loss": 1.1251, - "step": 12140 - }, - { - "epoch": 0.9338512422121376, - "learning_rate": 3.2273114428036546e-05, - "loss": 1.2194, - "step": 12141 - }, - { - "epoch": 0.933928159372356, - "learning_rate": 3.21983738259925e-05, - "loss": 1.4413, - "step": 12142 - }, - { - "epoch": 0.9340050765325744, - "learning_rate": 3.212371893041521e-05, - "loss": 1.2906, - "step": 12143 - }, - { - "epoch": 0.9340819936927929, - "learning_rate": 3.204914974566353e-05, - "loss": 1.2077, - "step": 12144 - }, - { - "epoch": 0.9341589108530113, - "learning_rate": 3.1974666276092135e-05, - "loss": 1.3196, - "step": 12145 - }, - { - "epoch": 0.9342358280132298, - "learning_rate": 3.190026852604971e-05, - "loss": 1.1493, - "step": 12146 - }, - { - "epoch": 0.9343127451734482, - "learning_rate": 3.182595649988079e-05, - "loss": 1.0146, - "step": 12147 - }, - { - "epoch": 0.9343896623336666, - "learning_rate": 3.175173020192424e-05, - "loss": 1.3035, - "step": 12148 - }, - { - "epoch": 0.9344665794938851, - "learning_rate": 3.16775896365144e-05, - "loss": 1.1528, - "step": 12149 - }, - { - "epoch": 0.9345434966541035, - "learning_rate": 3.16035348079805e-05, - "loss": 1.3201, - "step": 12150 - }, - { - "epoch": 0.9346204138143219, - "learning_rate": 3.152956572064625e-05, - "loss": 1.0597, - "step": 12151 - }, - { - "epoch": 0.9346973309745404, - "learning_rate": 3.145568237883117e-05, - "loss": 1.223, - "step": 12152 - }, - { - "epoch": 0.9347742481347588, - "learning_rate": 3.138188478684917e-05, - "loss": 1.2984, - "step": 12153 - }, - { - "epoch": 0.9348511652949774, - "learning_rate": 3.130817294900962e-05, - "loss": 1.4366, - "step": 12154 - }, - { - "epoch": 0.9349280824551958, - "learning_rate": 3.123454686961657e-05, - "loss": 1.1027, - "step": 12155 - }, - { - "epoch": 0.9350049996154142, - "learning_rate": 3.116100655296894e-05, - "loss": 1.3012, - "step": 12156 - }, - { - "epoch": 0.9350819167756327, - "learning_rate": 3.1087552003360775e-05, - "loss": 1.1954, - "step": 12157 - }, - { - "epoch": 0.9351588339358511, - "learning_rate": 3.101418322508165e-05, - "loss": 1.4599, - "step": 12158 - }, - { - "epoch": 0.9352357510960695, - "learning_rate": 3.094090022241497e-05, - "loss": 0.9842, - "step": 12159 - }, - { - "epoch": 0.935312668256288, - "learning_rate": 3.086770299964031e-05, - "loss": 1.1034, - "step": 12160 - }, - { - "epoch": 0.9353895854165064, - "learning_rate": 3.0794591561031426e-05, - "loss": 1.6368, - "step": 12161 - }, - { - "epoch": 0.9354665025767248, - "learning_rate": 3.072156591085773e-05, - "loss": 1.0797, - "step": 12162 - }, - { - "epoch": 0.9355434197369433, - "learning_rate": 3.064862605338281e-05, - "loss": 1.1395, - "step": 12163 - }, - { - "epoch": 0.9356203368971617, - "learning_rate": 3.0575771992866095e-05, - "loss": 1.2444, - "step": 12164 - }, - { - "epoch": 0.9356972540573802, - "learning_rate": 3.050300373356135e-05, - "loss": 1.0648, - "step": 12165 - }, - { - "epoch": 0.9357741712175986, - "learning_rate": 3.043032127971784e-05, - "loss": 1.4334, - "step": 12166 - }, - { - "epoch": 0.935851088377817, - "learning_rate": 3.0357724635579342e-05, - "loss": 1.1942, - "step": 12167 - }, - { - "epoch": 0.9359280055380356, - "learning_rate": 3.0285213805384793e-05, - "loss": 1.1077, - "step": 12168 - }, - { - "epoch": 0.936004922698254, - "learning_rate": 3.0212788793368474e-05, - "loss": 0.946, - "step": 12169 - }, - { - "epoch": 0.9360818398584724, - "learning_rate": 3.0140449603759167e-05, - "loss": 1.0302, - "step": 12170 - }, - { - "epoch": 0.9361587570186909, - "learning_rate": 3.006819624078083e-05, - "loss": 1.0565, - "step": 12171 - }, - { - "epoch": 0.9362356741789093, - "learning_rate": 2.999602870865259e-05, - "loss": 0.9015, - "step": 12172 - }, - { - "epoch": 0.9363125913391278, - "learning_rate": 2.9923947011587904e-05, - "loss": 1.0329, - "step": 12173 - }, - { - "epoch": 0.9363895084993462, - "learning_rate": 2.985195115379624e-05, - "loss": 1.229, - "step": 12174 - }, - { - "epoch": 0.9364664256595646, - "learning_rate": 2.9780041139481405e-05, - "loss": 1.1141, - "step": 12175 - }, - { - "epoch": 0.9365433428197831, - "learning_rate": 2.9708216972842038e-05, - "loss": 1.3029, - "step": 12176 - }, - { - "epoch": 0.9366202599800015, - "learning_rate": 2.9636478658072287e-05, - "loss": 1.1784, - "step": 12177 - }, - { - "epoch": 0.9366971771402199, - "learning_rate": 2.9564826199360795e-05, - "loss": 1.3269, - "step": 12178 - }, - { - "epoch": 0.9367740943004385, - "learning_rate": 2.9493259600891555e-05, - "loss": 1.0073, - "step": 12179 - }, - { - "epoch": 0.9368510114606569, - "learning_rate": 2.942177886684355e-05, - "loss": 1.264, - "step": 12180 - }, - { - "epoch": 0.9369279286208754, - "learning_rate": 2.9350384001390285e-05, - "loss": 0.7756, - "step": 12181 - }, - { - "epoch": 0.9370048457810938, - "learning_rate": 2.9279075008700916e-05, - "loss": 0.9171, - "step": 12182 - }, - { - "epoch": 0.9370817629413122, - "learning_rate": 2.920785189293912e-05, - "loss": 1.203, - "step": 12183 - }, - { - "epoch": 0.9371586801015307, - "learning_rate": 2.9136714658263564e-05, - "loss": 1.1214, - "step": 12184 - }, - { - "epoch": 0.9372355972617491, - "learning_rate": 2.906566330882826e-05, - "loss": 1.3194, - "step": 12185 - }, - { - "epoch": 0.9373125144219675, - "learning_rate": 2.8994697848781725e-05, - "loss": 1.1999, - "step": 12186 - }, - { - "epoch": 0.937389431582186, - "learning_rate": 2.8923818282267812e-05, - "loss": 0.9972, - "step": 12187 - }, - { - "epoch": 0.9374663487424044, - "learning_rate": 2.8853024613425372e-05, - "loss": 1.1405, - "step": 12188 - }, - { - "epoch": 0.9375432659026228, - "learning_rate": 2.8782316846388102e-05, - "loss": 1.2609, - "step": 12189 - }, - { - "epoch": 0.9376201830628413, - "learning_rate": 2.8711694985284366e-05, - "loss": 0.7842, - "step": 12190 - }, - { - "epoch": 0.9376971002230597, - "learning_rate": 2.8641159034238363e-05, - "loss": 1.2734, - "step": 12191 - }, - { - "epoch": 0.9377740173832783, - "learning_rate": 2.85707089973683e-05, - "loss": 1.3886, - "step": 12192 - }, - { - "epoch": 0.9378509345434967, - "learning_rate": 2.8500344878788222e-05, - "loss": 1.0924, - "step": 12193 - }, - { - "epoch": 0.9379278517037151, - "learning_rate": 2.8430066682606337e-05, - "loss": 1.0277, - "step": 12194 - }, - { - "epoch": 0.9380047688639336, - "learning_rate": 2.8359874412926534e-05, - "loss": 1.2736, - "step": 12195 - }, - { - "epoch": 0.938081686024152, - "learning_rate": 2.828976807384753e-05, - "loss": 1.0204, - "step": 12196 - }, - { - "epoch": 0.9381586031843704, - "learning_rate": 2.8219747669462558e-05, - "loss": 1.2011, - "step": 12197 - }, - { - "epoch": 0.9382355203445889, - "learning_rate": 2.8149813203860507e-05, - "loss": 1.3007, - "step": 12198 - }, - { - "epoch": 0.9383124375048073, - "learning_rate": 2.807996468112478e-05, - "loss": 1.2816, - "step": 12199 - }, - { - "epoch": 0.9383893546650258, - "learning_rate": 2.8010202105333782e-05, - "loss": 0.9556, - "step": 12200 - }, - { - "epoch": 0.9384662718252442, - "learning_rate": 2.7940525480561252e-05, - "loss": 0.8778, - "step": 12201 - }, - { - "epoch": 0.9385431889854626, - "learning_rate": 2.787093481087577e-05, - "loss": 0.861, - "step": 12202 - }, - { - "epoch": 0.9386201061456811, - "learning_rate": 2.780143010034025e-05, - "loss": 1.3557, - "step": 12203 - }, - { - "epoch": 0.9386970233058995, - "learning_rate": 2.7732011353013786e-05, - "loss": 1.0216, - "step": 12204 - }, - { - "epoch": 0.938773940466118, - "learning_rate": 2.7662678572949462e-05, - "loss": 1.2426, - "step": 12205 - }, - { - "epoch": 0.9388508576263365, - "learning_rate": 2.7593431764195707e-05, - "loss": 0.9678, - "step": 12206 - }, - { - "epoch": 0.9389277747865549, - "learning_rate": 2.7524270930796125e-05, - "loss": 1.3174, - "step": 12207 - }, - { - "epoch": 0.9390046919467733, - "learning_rate": 2.7455196076788812e-05, - "loss": 0.9357, - "step": 12208 - }, - { - "epoch": 0.9390816091069918, - "learning_rate": 2.7386207206207214e-05, - "loss": 1.1123, - "step": 12209 - }, - { - "epoch": 0.9391585262672102, - "learning_rate": 2.7317304323079773e-05, - "loss": 1.0222, - "step": 12210 - }, - { - "epoch": 0.9392354434274287, - "learning_rate": 2.724848743142977e-05, - "loss": 1.2877, - "step": 12211 - }, - { - "epoch": 0.9393123605876471, - "learning_rate": 2.7179756535275325e-05, - "loss": 1.5601, - "step": 12212 - }, - { - "epoch": 0.9393892777478655, - "learning_rate": 2.711111163863006e-05, - "loss": 1.0236, - "step": 12213 - }, - { - "epoch": 0.939466194908084, - "learning_rate": 2.7042552745501605e-05, - "loss": 1.4442, - "step": 12214 - }, - { - "epoch": 0.9395431120683024, - "learning_rate": 2.6974079859894084e-05, - "loss": 0.8903, - "step": 12215 - }, - { - "epoch": 0.9396200292285208, - "learning_rate": 2.690569298580481e-05, - "loss": 0.8188, - "step": 12216 - }, - { - "epoch": 0.9396969463887394, - "learning_rate": 2.683739212722741e-05, - "loss": 0.9695, - "step": 12217 - }, - { - "epoch": 0.9397738635489578, - "learning_rate": 2.6769177288150036e-05, - "loss": 1.0914, - "step": 12218 - }, - { - "epoch": 0.9398507807091763, - "learning_rate": 2.6701048472555834e-05, - "loss": 1.0811, - "step": 12219 - }, - { - "epoch": 0.9399276978693947, - "learning_rate": 2.6633005684422785e-05, - "loss": 1.045, - "step": 12220 - }, - { - "epoch": 0.9400046150296131, - "learning_rate": 2.6565048927724045e-05, - "loss": 0.731, - "step": 12221 - }, - { - "epoch": 0.9400815321898316, - "learning_rate": 2.6497178206427443e-05, - "loss": 1.2308, - "step": 12222 - }, - { - "epoch": 0.94015844935005, - "learning_rate": 2.642939352449647e-05, - "loss": 0.9671, - "step": 12223 - }, - { - "epoch": 0.9402353665102684, - "learning_rate": 2.6361694885888967e-05, - "loss": 1.104, - "step": 12224 - }, - { - "epoch": 0.9403122836704869, - "learning_rate": 2.6294082294557764e-05, - "loss": 1.1925, - "step": 12225 - }, - { - "epoch": 0.9403892008307053, - "learning_rate": 2.622655575445104e-05, - "loss": 1.2977, - "step": 12226 - }, - { - "epoch": 0.9404661179909237, - "learning_rate": 2.6159115269511812e-05, - "loss": 1.1346, - "step": 12227 - }, - { - "epoch": 0.9405430351511422, - "learning_rate": 2.6091760843677593e-05, - "loss": 0.9481, - "step": 12228 - }, - { - "epoch": 0.9406199523113606, - "learning_rate": 2.6024492480881735e-05, - "loss": 0.8181, - "step": 12229 - }, - { - "epoch": 0.9406968694715792, - "learning_rate": 2.5957310185051597e-05, - "loss": 0.9931, - "step": 12230 - }, - { - "epoch": 0.9407737866317976, - "learning_rate": 2.5890213960110542e-05, - "loss": 1.4884, - "step": 12231 - }, - { - "epoch": 0.940850703792016, - "learning_rate": 2.5823203809976105e-05, - "loss": 1.0598, - "step": 12232 - }, - { - "epoch": 0.9409276209522345, - "learning_rate": 2.5756279738561317e-05, - "loss": 1.04, - "step": 12233 - }, - { - "epoch": 0.9410045381124529, - "learning_rate": 2.5689441749773555e-05, - "loss": 1.4693, - "step": 12234 - }, - { - "epoch": 0.9410814552726713, - "learning_rate": 2.562268984751587e-05, - "loss": 1.1455, - "step": 12235 - }, - { - "epoch": 0.9411583724328898, - "learning_rate": 2.55560240356858e-05, - "loss": 0.9774, - "step": 12236 - }, - { - "epoch": 0.9412352895931082, - "learning_rate": 2.5489444318176235e-05, - "loss": 1.2256, - "step": 12237 - }, - { - "epoch": 0.9413122067533267, - "learning_rate": 2.5422950698874735e-05, - "loss": 1.2458, - "step": 12238 - }, - { - "epoch": 0.9413891239135451, - "learning_rate": 2.535654318166386e-05, - "loss": 1.0736, - "step": 12239 - }, - { - "epoch": 0.9414660410737635, - "learning_rate": 2.5290221770421173e-05, - "loss": 1.4101, - "step": 12240 - }, - { - "epoch": 0.941542958233982, - "learning_rate": 2.522398646901941e-05, - "loss": 0.8386, - "step": 12241 - }, - { - "epoch": 0.9416198753942004, - "learning_rate": 2.5157837281326147e-05, - "loss": 0.9314, - "step": 12242 - }, - { - "epoch": 0.9416967925544188, - "learning_rate": 2.5091774211203787e-05, - "loss": 1.0781, - "step": 12243 - }, - { - "epoch": 0.9417737097146374, - "learning_rate": 2.5025797262509587e-05, - "loss": 1.1547, - "step": 12244 - }, - { - "epoch": 0.9418506268748558, - "learning_rate": 2.4959906439096626e-05, - "loss": 0.8614, - "step": 12245 - }, - { - "epoch": 0.9419275440350742, - "learning_rate": 2.4894101744811824e-05, - "loss": 1.0271, - "step": 12246 - }, - { - "epoch": 0.9420044611952927, - "learning_rate": 2.4828383183497617e-05, - "loss": 0.7347, - "step": 12247 - }, - { - "epoch": 0.9420813783555111, - "learning_rate": 2.476275075899176e-05, - "loss": 1.1688, - "step": 12248 - }, - { - "epoch": 0.9421582955157296, - "learning_rate": 2.4697204475126024e-05, - "loss": 1.2839, - "step": 12249 - }, - { - "epoch": 0.942235212675948, - "learning_rate": 2.463174433572818e-05, - "loss": 0.9427, - "step": 12250 - }, - { - "epoch": 0.9423121298361664, - "learning_rate": 2.456637034462067e-05, - "loss": 1.2026, - "step": 12251 - }, - { - "epoch": 0.9423890469963849, - "learning_rate": 2.4501082505620108e-05, - "loss": 0.7465, - "step": 12252 - }, - { - "epoch": 0.9424659641566033, - "learning_rate": 2.4435880822539116e-05, - "loss": 1.1824, - "step": 12253 - }, - { - "epoch": 0.9425428813168217, - "learning_rate": 2.4370765299184805e-05, - "loss": 1.3512, - "step": 12254 - }, - { - "epoch": 0.9426197984770402, - "learning_rate": 2.4305735939359473e-05, - "loss": 1.1434, - "step": 12255 - }, - { - "epoch": 0.9426967156372587, - "learning_rate": 2.424079274686025e-05, - "loss": 1.169, - "step": 12256 - }, - { - "epoch": 0.9427736327974772, - "learning_rate": 2.4175935725478938e-05, - "loss": 1.2582, - "step": 12257 - }, - { - "epoch": 0.9428505499576956, - "learning_rate": 2.4111164879002832e-05, - "loss": 1.2718, - "step": 12258 - }, - { - "epoch": 0.942927467117914, - "learning_rate": 2.4046480211214082e-05, - "loss": 0.9659, - "step": 12259 - }, - { - "epoch": 0.9430043842781325, - "learning_rate": 2.3981881725889332e-05, - "loss": 1.3946, - "step": 12260 - }, - { - "epoch": 0.9430813014383509, - "learning_rate": 2.391736942680106e-05, - "loss": 1.2674, - "step": 12261 - }, - { - "epoch": 0.9431582185985693, - "learning_rate": 2.3852943317715757e-05, - "loss": 1.0531, - "step": 12262 - }, - { - "epoch": 0.9432351357587878, - "learning_rate": 2.3788603402395415e-05, - "loss": 0.9711, - "step": 12263 - }, - { - "epoch": 0.9433120529190062, - "learning_rate": 2.372434968459719e-05, - "loss": 1.3543, - "step": 12264 - }, - { - "epoch": 0.9433889700792246, - "learning_rate": 2.3660182168072585e-05, - "loss": 0.9723, - "step": 12265 - }, - { - "epoch": 0.9434658872394431, - "learning_rate": 2.3596100856568603e-05, - "loss": 0.8284, - "step": 12266 - }, - { - "epoch": 0.9435428043996615, - "learning_rate": 2.3532105753826917e-05, - "loss": 1.4464, - "step": 12267 - }, - { - "epoch": 0.94361972155988, - "learning_rate": 2.3468196863584367e-05, - "loss": 0.8575, - "step": 12268 - }, - { - "epoch": 0.9436966387200985, - "learning_rate": 2.3404374189572643e-05, - "loss": 1.0575, - "step": 12269 - }, - { - "epoch": 0.9437735558803169, - "learning_rate": 2.3340637735518254e-05, - "loss": 0.663, - "step": 12270 - }, - { - "epoch": 0.9438504730405354, - "learning_rate": 2.32769875051429e-05, - "loss": 0.9797, - "step": 12271 - }, - { - "epoch": 0.9439273902007538, - "learning_rate": 2.3213423502163432e-05, - "loss": 0.8766, - "step": 12272 - }, - { - "epoch": 0.9440043073609722, - "learning_rate": 2.314994573029139e-05, - "loss": 0.9139, - "step": 12273 - }, - { - "epoch": 0.9440812245211907, - "learning_rate": 2.30865541932328e-05, - "loss": 1.2268, - "step": 12274 - }, - { - "epoch": 0.9441581416814091, - "learning_rate": 2.302324889468971e-05, - "loss": 1.0848, - "step": 12275 - }, - { - "epoch": 0.9442350588416276, - "learning_rate": 2.2960029838358154e-05, - "loss": 1.016, - "step": 12276 - }, - { - "epoch": 0.944311976001846, - "learning_rate": 2.2896897027930186e-05, - "loss": 1.0007, - "step": 12277 - }, - { - "epoch": 0.9443888931620644, - "learning_rate": 2.2833850467091523e-05, - "loss": 1.1234, - "step": 12278 - }, - { - "epoch": 0.9444658103222829, - "learning_rate": 2.2770890159523882e-05, - "loss": 1.1582, - "step": 12279 - }, - { - "epoch": 0.9445427274825013, - "learning_rate": 2.2708016108903494e-05, - "loss": 1.3735, - "step": 12280 - }, - { - "epoch": 0.9446196446427197, - "learning_rate": 2.264522831890159e-05, - "loss": 1.0893, - "step": 12281 - }, - { - "epoch": 0.9446965618029383, - "learning_rate": 2.2582526793184566e-05, - "loss": 1.427, - "step": 12282 - }, - { - "epoch": 0.9447734789631567, - "learning_rate": 2.251991153541333e-05, - "loss": 1.382, - "step": 12283 - }, - { - "epoch": 0.9448503961233752, - "learning_rate": 2.2457382549244288e-05, - "loss": 1.1619, - "step": 12284 - }, - { - "epoch": 0.9449273132835936, - "learning_rate": 2.239493983832869e-05, - "loss": 1.1343, - "step": 12285 - }, - { - "epoch": 0.945004230443812, - "learning_rate": 2.233258340631261e-05, - "loss": 0.9671, - "step": 12286 - }, - { - "epoch": 0.9450811476040305, - "learning_rate": 2.227031325683665e-05, - "loss": 0.9059, - "step": 12287 - }, - { - "epoch": 0.9451580647642489, - "learning_rate": 2.220812939353739e-05, - "loss": 1.4422, - "step": 12288 - }, - { - "epoch": 0.9452349819244673, - "learning_rate": 2.214603182004543e-05, - "loss": 1.3011, - "step": 12289 - }, - { - "epoch": 0.9453118990846858, - "learning_rate": 2.208402053998687e-05, - "loss": 1.0817, - "step": 12290 - }, - { - "epoch": 0.9453888162449042, - "learning_rate": 2.2022095556982645e-05, - "loss": 1.4671, - "step": 12291 - }, - { - "epoch": 0.9454657334051226, - "learning_rate": 2.1960256874648532e-05, - "loss": 0.9501, - "step": 12292 - }, - { - "epoch": 0.9455426505653411, - "learning_rate": 2.1898504496595306e-05, - "loss": 1.4825, - "step": 12293 - }, - { - "epoch": 0.9456195677255596, - "learning_rate": 2.1836838426429086e-05, - "loss": 1.1331, - "step": 12294 - }, - { - "epoch": 0.9456964848857781, - "learning_rate": 2.1775258667750153e-05, - "loss": 1.0971, - "step": 12295 - }, - { - "epoch": 0.9457734020459965, - "learning_rate": 2.1713765224154635e-05, - "loss": 1.2781, - "step": 12296 - }, - { - "epoch": 0.9458503192062149, - "learning_rate": 2.165235809923283e-05, - "loss": 1.1165, - "step": 12297 - }, - { - "epoch": 0.9459272363664334, - "learning_rate": 2.1591037296570526e-05, - "loss": 0.9566, - "step": 12298 - }, - { - "epoch": 0.9460041535266518, - "learning_rate": 2.1529802819748533e-05, - "loss": 1.3316, - "step": 12299 - }, - { - "epoch": 0.9460810706868702, - "learning_rate": 2.146865467234199e-05, - "loss": 1.2123, - "step": 12300 - }, - { - "epoch": 0.9461579878470887, - "learning_rate": 2.1407592857921542e-05, - "loss": 1.2362, - "step": 12301 - }, - { - "epoch": 0.9462349050073071, - "learning_rate": 2.1346617380052834e-05, - "loss": 1.0055, - "step": 12302 - }, - { - "epoch": 0.9463118221675256, - "learning_rate": 2.128572824229602e-05, - "loss": 1.2387, - "step": 12303 - }, - { - "epoch": 0.946388739327744, - "learning_rate": 2.1224925448206755e-05, - "loss": 1.1984, - "step": 12304 - }, - { - "epoch": 0.9464656564879624, - "learning_rate": 2.11642090013352e-05, - "loss": 1.3221, - "step": 12305 - }, - { - "epoch": 0.946542573648181, - "learning_rate": 2.110357890522652e-05, - "loss": 0.9596, - "step": 12306 - }, - { - "epoch": 0.9466194908083994, - "learning_rate": 2.1043035163421374e-05, - "loss": 1.3926, - "step": 12307 - }, - { - "epoch": 0.9466964079686178, - "learning_rate": 2.098257777945478e-05, - "loss": 1.5158, - "step": 12308 - }, - { - "epoch": 0.9467733251288363, - "learning_rate": 2.0922206756856577e-05, - "loss": 1.2633, - "step": 12309 - }, - { - "epoch": 0.9468502422890547, - "learning_rate": 2.0861922099152275e-05, - "loss": 1.2649, - "step": 12310 - }, - { - "epoch": 0.9469271594492731, - "learning_rate": 2.0801723809862062e-05, - "loss": 1.0729, - "step": 12311 - }, - { - "epoch": 0.9470040766094916, - "learning_rate": 2.0741611892500624e-05, - "loss": 0.7765, - "step": 12312 - }, - { - "epoch": 0.94708099376971, - "learning_rate": 2.0681586350578152e-05, - "loss": 1.2071, - "step": 12313 - }, - { - "epoch": 0.9471579109299285, - "learning_rate": 2.0621647187599347e-05, - "loss": 0.839, - "step": 12314 - }, - { - "epoch": 0.9472348280901469, - "learning_rate": 2.056179440706457e-05, - "loss": 1.1766, - "step": 12315 - }, - { - "epoch": 0.9473117452503653, - "learning_rate": 2.0502028012468367e-05, - "loss": 1.2616, - "step": 12316 - }, - { - "epoch": 0.9473886624105838, - "learning_rate": 2.044234800730077e-05, - "loss": 1.3573, - "step": 12317 - }, - { - "epoch": 0.9474655795708022, - "learning_rate": 2.0382754395046332e-05, - "loss": 1.0533, - "step": 12318 - }, - { - "epoch": 0.9475424967310206, - "learning_rate": 2.0323247179184935e-05, - "loss": 1.2565, - "step": 12319 - }, - { - "epoch": 0.9476194138912392, - "learning_rate": 2.0263826363190962e-05, - "loss": 1.0266, - "step": 12320 - }, - { - "epoch": 0.9476963310514576, - "learning_rate": 2.0204491950534642e-05, - "loss": 0.9547, - "step": 12321 - }, - { - "epoch": 0.9477732482116761, - "learning_rate": 2.0145243944680036e-05, - "loss": 1.3065, - "step": 12322 - }, - { - "epoch": 0.9478501653718945, - "learning_rate": 2.0086082349087043e-05, - "loss": 1.349, - "step": 12323 - }, - { - "epoch": 0.9479270825321129, - "learning_rate": 2.0027007167210066e-05, - "loss": 1.2137, - "step": 12324 - }, - { - "epoch": 0.9480039996923314, - "learning_rate": 1.9968018402498346e-05, - "loss": 1.0298, - "step": 12325 - }, - { - "epoch": 0.9480809168525498, - "learning_rate": 1.9909116058396626e-05, - "loss": 1.2058, - "step": 12326 - }, - { - "epoch": 0.9481578340127682, - "learning_rate": 1.985030013834416e-05, - "loss": 1.0441, - "step": 12327 - }, - { - "epoch": 0.9482347511729867, - "learning_rate": 1.9791570645775025e-05, - "loss": 0.878, - "step": 12328 - }, - { - "epoch": 0.9483116683332051, - "learning_rate": 1.973292758411882e-05, - "loss": 1.1512, - "step": 12329 - }, - { - "epoch": 0.9483885854934235, - "learning_rate": 1.96743709567998e-05, - "loss": 1.0311, - "step": 12330 - }, - { - "epoch": 0.948465502653642, - "learning_rate": 1.9615900767236904e-05, - "loss": 1.0456, - "step": 12331 - }, - { - "epoch": 0.9485424198138604, - "learning_rate": 1.9557517018844396e-05, - "loss": 1.007, - "step": 12332 - }, - { - "epoch": 0.948619336974079, - "learning_rate": 1.949921971503138e-05, - "loss": 1.0526, - "step": 12333 - }, - { - "epoch": 0.9486962541342974, - "learning_rate": 1.944100885920197e-05, - "loss": 1.2026, - "step": 12334 - }, - { - "epoch": 0.9487731712945158, - "learning_rate": 1.9382884454754947e-05, - "loss": 1.0834, - "step": 12335 - }, - { - "epoch": 0.9488500884547343, - "learning_rate": 1.932484650508426e-05, - "loss": 1.0423, - "step": 12336 - }, - { - "epoch": 0.9489270056149527, - "learning_rate": 1.9266895013578866e-05, - "loss": 1.0567, - "step": 12337 - }, - { - "epoch": 0.9490039227751711, - "learning_rate": 1.9209029983622893e-05, - "loss": 0.9646, - "step": 12338 - }, - { - "epoch": 0.9490808399353896, - "learning_rate": 1.9151251418594806e-05, - "loss": 1.1774, - "step": 12339 - }, - { - "epoch": 0.949157757095608, - "learning_rate": 1.9093559321868404e-05, - "loss": 0.8241, - "step": 12340 - }, - { - "epoch": 0.9492346742558265, - "learning_rate": 1.903595369681249e-05, - "loss": 1.0441, - "step": 12341 - }, - { - "epoch": 0.9493115914160449, - "learning_rate": 1.8978434546790546e-05, - "loss": 1.4644, - "step": 12342 - }, - { - "epoch": 0.9493885085762633, - "learning_rate": 1.8921001875161546e-05, - "loss": 1.3055, - "step": 12343 - }, - { - "epoch": 0.9494654257364818, - "learning_rate": 1.886365568527848e-05, - "loss": 1.0192, - "step": 12344 - }, - { - "epoch": 0.9495423428967003, - "learning_rate": 1.88063959804905e-05, - "loss": 1.0575, - "step": 12345 - }, - { - "epoch": 0.9496192600569187, - "learning_rate": 1.8749222764140426e-05, - "loss": 0.9611, - "step": 12346 - }, - { - "epoch": 0.9496961772171372, - "learning_rate": 1.86921360395671e-05, - "loss": 1.447, - "step": 12347 - }, - { - "epoch": 0.9497730943773556, - "learning_rate": 1.8635135810103676e-05, - "loss": 1.3901, - "step": 12348 - }, - { - "epoch": 0.949850011537574, - "learning_rate": 1.8578222079078665e-05, - "loss": 0.9232, - "step": 12349 - }, - { - "epoch": 0.9499269286977925, - "learning_rate": 1.852139484981491e-05, - "loss": 1.4881, - "step": 12350 - }, - { - "epoch": 0.9500038458580109, - "learning_rate": 1.8464654125631087e-05, - "loss": 1.4199, - "step": 12351 - }, - { - "epoch": 0.9500807630182294, - "learning_rate": 1.8407999909840045e-05, - "loss": 1.2796, - "step": 12352 - }, - { - "epoch": 0.9501576801784478, - "learning_rate": 1.8351432205749974e-05, - "loss": 1.4685, - "step": 12353 - }, - { - "epoch": 0.9502345973386662, - "learning_rate": 1.8294951016663895e-05, - "loss": 0.8389, - "step": 12354 - }, - { - "epoch": 0.9503115144988847, - "learning_rate": 1.8238556345879675e-05, - "loss": 1.0521, - "step": 12355 - }, - { - "epoch": 0.9503884316591031, - "learning_rate": 1.8182248196690678e-05, - "loss": 1.272, - "step": 12356 - }, - { - "epoch": 0.9504653488193215, - "learning_rate": 1.8126026572384435e-05, - "loss": 1.0702, - "step": 12357 - }, - { - "epoch": 0.9505422659795401, - "learning_rate": 1.8069891476243827e-05, - "loss": 0.8724, - "step": 12358 - }, - { - "epoch": 0.9506191831397585, - "learning_rate": 1.801384291154673e-05, - "loss": 1.3949, - "step": 12359 - }, - { - "epoch": 0.950696100299977, - "learning_rate": 1.795788088156586e-05, - "loss": 1.252, - "step": 12360 - }, - { - "epoch": 0.9507730174601954, - "learning_rate": 1.7902005389568765e-05, - "loss": 1.1387, - "step": 12361 - }, - { - "epoch": 0.9508499346204138, - "learning_rate": 1.784621643881834e-05, - "loss": 1.2472, - "step": 12362 - }, - { - "epoch": 0.9509268517806323, - "learning_rate": 1.7790514032571815e-05, - "loss": 1.1041, - "step": 12363 - }, - { - "epoch": 0.9510037689408507, - "learning_rate": 1.7734898174081916e-05, - "loss": 0.9482, - "step": 12364 - }, - { - "epoch": 0.9510806861010691, - "learning_rate": 1.767936886659621e-05, - "loss": 1.2077, - "step": 12365 - }, - { - "epoch": 0.9511576032612876, - "learning_rate": 1.7623926113356946e-05, - "loss": 1.1982, - "step": 12366 - }, - { - "epoch": 0.951234520421506, - "learning_rate": 1.756856991760153e-05, - "loss": 1.4091, - "step": 12367 - }, - { - "epoch": 0.9513114375817244, - "learning_rate": 1.7513300282562206e-05, - "loss": 1.1945, - "step": 12368 - }, - { - "epoch": 0.9513883547419429, - "learning_rate": 1.7458117211466395e-05, - "loss": 1.4266, - "step": 12369 - }, - { - "epoch": 0.9514652719021613, - "learning_rate": 1.740302070753619e-05, - "loss": 1.3746, - "step": 12370 - }, - { - "epoch": 0.9515421890623799, - "learning_rate": 1.7348010773988676e-05, - "loss": 1.2154, - "step": 12371 - }, - { - "epoch": 0.9516191062225983, - "learning_rate": 1.7293087414035956e-05, - "loss": 1.1474, - "step": 12372 - }, - { - "epoch": 0.9516960233828167, - "learning_rate": 1.7238250630885132e-05, - "loss": 0.9347, - "step": 12373 - }, - { - "epoch": 0.9517729405430352, - "learning_rate": 1.7183500427738142e-05, - "loss": 0.8565, - "step": 12374 - }, - { - "epoch": 0.9518498577032536, - "learning_rate": 1.7128836807791926e-05, - "loss": 1.2474, - "step": 12375 - }, - { - "epoch": 0.951926774863472, - "learning_rate": 1.7074259774238432e-05, - "loss": 1.1588, - "step": 12376 - }, - { - "epoch": 0.9520036920236905, - "learning_rate": 1.701976933026428e-05, - "loss": 0.9822, - "step": 12377 - }, - { - "epoch": 0.9520806091839089, - "learning_rate": 1.696536547905142e-05, - "loss": 1.2391, - "step": 12378 - }, - { - "epoch": 0.9521575263441274, - "learning_rate": 1.6911048223776316e-05, - "loss": 0.9747, - "step": 12379 - }, - { - "epoch": 0.9522344435043458, - "learning_rate": 1.685681756761076e-05, - "loss": 1.4819, - "step": 12380 - }, - { - "epoch": 0.9523113606645642, - "learning_rate": 1.680267351372139e-05, - "loss": 1.3563, - "step": 12381 - }, - { - "epoch": 0.9523882778247827, - "learning_rate": 1.6748616065269673e-05, - "loss": 1.059, - "step": 12382 - }, - { - "epoch": 0.9524651949850012, - "learning_rate": 1.6694645225412087e-05, - "loss": 1.4287, - "step": 12383 - }, - { - "epoch": 0.9525421121452196, - "learning_rate": 1.6640760997299942e-05, - "loss": 1.0282, - "step": 12384 - }, - { - "epoch": 0.9526190293054381, - "learning_rate": 1.6586963384079722e-05, - "loss": 1.0989, - "step": 12385 - }, - { - "epoch": 0.9526959464656565, - "learning_rate": 1.6533252388892584e-05, - "loss": 1.3305, - "step": 12386 - }, - { - "epoch": 0.9527728636258749, - "learning_rate": 1.6479628014875014e-05, - "loss": 1.2509, - "step": 12387 - }, - { - "epoch": 0.9528497807860934, - "learning_rate": 1.642609026515801e-05, - "loss": 1.2583, - "step": 12388 - }, - { - "epoch": 0.9529266979463118, - "learning_rate": 1.637263914286774e-05, - "loss": 0.9596, - "step": 12389 - }, - { - "epoch": 0.9530036151065303, - "learning_rate": 1.6319274651125204e-05, - "loss": 0.7987, - "step": 12390 - }, - { - "epoch": 0.9530805322667487, - "learning_rate": 1.626599679304658e-05, - "loss": 1.459, - "step": 12391 - }, - { - "epoch": 0.9531574494269671, - "learning_rate": 1.621280557174287e-05, - "loss": 0.8505, - "step": 12392 - }, - { - "epoch": 0.9532343665871856, - "learning_rate": 1.6159700990319436e-05, - "loss": 1.2911, - "step": 12393 - }, - { - "epoch": 0.953311283747404, - "learning_rate": 1.6106683051877623e-05, - "loss": 0.6452, - "step": 12394 - }, - { - "epoch": 0.9533882009076224, - "learning_rate": 1.6053751759513124e-05, - "loss": 1.2833, - "step": 12395 - }, - { - "epoch": 0.953465118067841, - "learning_rate": 1.600090711631663e-05, - "loss": 1.3633, - "step": 12396 - }, - { - "epoch": 0.9535420352280594, - "learning_rate": 1.5948149125373678e-05, - "loss": 0.9192, - "step": 12397 - }, - { - "epoch": 0.9536189523882779, - "learning_rate": 1.5895477789764966e-05, - "loss": 1.0924, - "step": 12398 - }, - { - "epoch": 0.9536958695484963, - "learning_rate": 1.584289311256587e-05, - "loss": 1.0657, - "step": 12399 - }, - { - "epoch": 0.9537727867087147, - "learning_rate": 1.5790395096847264e-05, - "loss": 1.6161, - "step": 12400 - }, - { - "epoch": 0.9538497038689332, - "learning_rate": 1.5737983745674035e-05, - "loss": 1.2523, - "step": 12401 - }, - { - "epoch": 0.9539266210291516, - "learning_rate": 1.568565906210706e-05, - "loss": 1.4484, - "step": 12402 - }, - { - "epoch": 0.95400353818937, - "learning_rate": 1.5633421049201236e-05, - "loss": 1.0825, - "step": 12403 - }, - { - "epoch": 0.9540804553495885, - "learning_rate": 1.5581269710006785e-05, - "loss": 1.1632, - "step": 12404 - }, - { - "epoch": 0.9541573725098069, - "learning_rate": 1.5529205047569273e-05, - "loss": 0.9711, - "step": 12405 - }, - { - "epoch": 0.9542342896700254, - "learning_rate": 1.547722706492827e-05, - "loss": 0.9484, - "step": 12406 - }, - { - "epoch": 0.9543112068302438, - "learning_rate": 1.542533576511934e-05, - "loss": 1.3789, - "step": 12407 - }, - { - "epoch": 0.9543881239904622, - "learning_rate": 1.5373531151172236e-05, - "loss": 0.9206, - "step": 12408 - }, - { - "epoch": 0.9544650411506808, - "learning_rate": 1.5321813226111703e-05, - "loss": 1.1812, - "step": 12409 - }, - { - "epoch": 0.9545419583108992, - "learning_rate": 1.5270181992957987e-05, - "loss": 0.9874, - "step": 12410 - }, - { - "epoch": 0.9546188754711176, - "learning_rate": 1.5218637454725682e-05, - "loss": 1.3513, - "step": 12411 - }, - { - "epoch": 0.9546957926313361, - "learning_rate": 1.5167179614424376e-05, - "loss": 1.4242, - "step": 12412 - }, - { - "epoch": 0.9547727097915545, - "learning_rate": 1.5115808475059001e-05, - "loss": 1.3357, - "step": 12413 - }, - { - "epoch": 0.9548496269517729, - "learning_rate": 1.5064524039629157e-05, - "loss": 1.1638, - "step": 12414 - }, - { - "epoch": 0.9549265441119914, - "learning_rate": 1.5013326311129117e-05, - "loss": 1.4263, - "step": 12415 - }, - { - "epoch": 0.9550034612722098, - "learning_rate": 1.496221529254882e-05, - "loss": 1.3826, - "step": 12416 - }, - { - "epoch": 0.9550803784324283, - "learning_rate": 1.4911190986872213e-05, - "loss": 1.1397, - "step": 12417 - }, - { - "epoch": 0.9551572955926467, - "learning_rate": 1.4860253397079082e-05, - "loss": 1.0621, - "step": 12418 - }, - { - "epoch": 0.9552342127528651, - "learning_rate": 1.4809402526143378e-05, - "loss": 1.0134, - "step": 12419 - }, - { - "epoch": 0.9553111299130836, - "learning_rate": 1.4758638377034562e-05, - "loss": 1.1756, - "step": 12420 - }, - { - "epoch": 0.955388047073302, - "learning_rate": 1.470796095271676e-05, - "loss": 1.0331, - "step": 12421 - }, - { - "epoch": 0.9554649642335205, - "learning_rate": 1.4657370256148939e-05, - "loss": 0.7614, - "step": 12422 - }, - { - "epoch": 0.955541881393739, - "learning_rate": 1.4606866290285404e-05, - "loss": 1.2887, - "step": 12423 - }, - { - "epoch": 0.9556187985539574, - "learning_rate": 1.4556449058074794e-05, - "loss": 1.156, - "step": 12424 - }, - { - "epoch": 0.9556957157141759, - "learning_rate": 1.4506118562461257e-05, - "loss": 0.9547, - "step": 12425 - }, - { - "epoch": 0.9557726328743943, - "learning_rate": 1.4455874806383772e-05, - "loss": 0.8543, - "step": 12426 - }, - { - "epoch": 0.9558495500346127, - "learning_rate": 1.4405717792775996e-05, - "loss": 1.0461, - "step": 12427 - }, - { - "epoch": 0.9559264671948312, - "learning_rate": 1.4355647524566417e-05, - "loss": 0.9796, - "step": 12428 - }, - { - "epoch": 0.9560033843550496, - "learning_rate": 1.4305664004678865e-05, - "loss": 1.3172, - "step": 12429 - }, - { - "epoch": 0.956080301515268, - "learning_rate": 1.4255767236032002e-05, - "loss": 1.2666, - "step": 12430 - }, - { - "epoch": 0.9561572186754865, - "learning_rate": 1.4205957221539334e-05, - "loss": 1.1766, - "step": 12431 - }, - { - "epoch": 0.9562341358357049, - "learning_rate": 1.4156233964109366e-05, - "loss": 1.1884, - "step": 12432 - }, - { - "epoch": 0.9563110529959233, - "learning_rate": 1.4106597466645277e-05, - "loss": 0.9319, - "step": 12433 - }, - { - "epoch": 0.9563879701561419, - "learning_rate": 1.4057047732045414e-05, - "loss": 1.0771, - "step": 12434 - }, - { - "epoch": 0.9564648873163603, - "learning_rate": 1.4007584763203297e-05, - "loss": 1.1815, - "step": 12435 - }, - { - "epoch": 0.9565418044765788, - "learning_rate": 1.3958208563006947e-05, - "loss": 1.0333, - "step": 12436 - }, - { - "epoch": 0.9566187216367972, - "learning_rate": 1.390891913433956e-05, - "loss": 1.2589, - "step": 12437 - }, - { - "epoch": 0.9566956387970156, - "learning_rate": 1.3859716480079165e-05, - "loss": 1.2224, - "step": 12438 - }, - { - "epoch": 0.9567725559572341, - "learning_rate": 1.3810600603098466e-05, - "loss": 1.0726, - "step": 12439 - }, - { - "epoch": 0.9568494731174525, - "learning_rate": 1.3761571506266002e-05, - "loss": 0.8625, - "step": 12440 - }, - { - "epoch": 0.9569263902776709, - "learning_rate": 1.3712629192444314e-05, - "loss": 1.1425, - "step": 12441 - }, - { - "epoch": 0.9570033074378894, - "learning_rate": 1.3663773664490953e-05, - "loss": 1.7033, - "step": 12442 - }, - { - "epoch": 0.9570802245981078, - "learning_rate": 1.3615004925258968e-05, - "loss": 1.3728, - "step": 12443 - }, - { - "epoch": 0.9571571417583263, - "learning_rate": 1.3566322977595913e-05, - "loss": 1.5058, - "step": 12444 - }, - { - "epoch": 0.9572340589185447, - "learning_rate": 1.351772782434435e-05, - "loss": 1.179, - "step": 12445 - }, - { - "epoch": 0.9573109760787631, - "learning_rate": 1.3469219468341842e-05, - "loss": 1.303, - "step": 12446 - }, - { - "epoch": 0.9573878932389817, - "learning_rate": 1.342079791242079e-05, - "loss": 1.1784, - "step": 12447 - }, - { - "epoch": 0.9574648103992001, - "learning_rate": 1.3372463159408765e-05, - "loss": 1.2931, - "step": 12448 - }, - { - "epoch": 0.9575417275594185, - "learning_rate": 1.332421521212801e-05, - "loss": 1.0764, - "step": 12449 - }, - { - "epoch": 0.957618644719637, - "learning_rate": 1.3276054073395438e-05, - "loss": 0.9208, - "step": 12450 - }, - { - "epoch": 0.9576955618798554, - "learning_rate": 1.3227979746023633e-05, - "loss": 1.3162, - "step": 12451 - }, - { - "epoch": 0.9577724790400738, - "learning_rate": 1.3179992232819515e-05, - "loss": 1.0227, - "step": 12452 - }, - { - "epoch": 0.9578493962002923, - "learning_rate": 1.3132091536585344e-05, - "loss": 1.5541, - "step": 12453 - }, - { - "epoch": 0.9579263133605107, - "learning_rate": 1.3084277660117716e-05, - "loss": 1.3618, - "step": 12454 - }, - { - "epoch": 0.9580032305207292, - "learning_rate": 1.303655060620873e-05, - "loss": 1.1838, - "step": 12455 - }, - { - "epoch": 0.9580801476809476, - "learning_rate": 1.298891037764549e-05, - "loss": 1.0831, - "step": 12456 - }, - { - "epoch": 0.958157064841166, - "learning_rate": 1.2941356977209273e-05, - "loss": 1.1509, - "step": 12457 - }, - { - "epoch": 0.9582339820013845, - "learning_rate": 1.2893890407677023e-05, - "loss": 0.9196, - "step": 12458 - }, - { - "epoch": 0.958310899161603, - "learning_rate": 1.2846510671820356e-05, - "loss": 1.3795, - "step": 12459 - }, - { - "epoch": 0.9583878163218214, - "learning_rate": 1.2799217772405891e-05, - "loss": 1.2916, - "step": 12460 - }, - { - "epoch": 0.9584647334820399, - "learning_rate": 1.2752011712194922e-05, - "loss": 1.0964, - "step": 12461 - }, - { - "epoch": 0.9585416506422583, - "learning_rate": 1.2704892493944076e-05, - "loss": 1.1146, - "step": 12462 - }, - { - "epoch": 0.9586185678024768, - "learning_rate": 1.2657860120404485e-05, - "loss": 0.8127, - "step": 12463 - }, - { - "epoch": 0.9586954849626952, - "learning_rate": 1.2610914594322453e-05, - "loss": 1.0171, - "step": 12464 - }, - { - "epoch": 0.9587724021229136, - "learning_rate": 1.2564055918439287e-05, - "loss": 1.2993, - "step": 12465 - }, - { - "epoch": 0.9588493192831321, - "learning_rate": 1.2517284095491133e-05, - "loss": 0.9041, - "step": 12466 - }, - { - "epoch": 0.9589262364433505, - "learning_rate": 1.247059912820897e-05, - "loss": 1.2157, - "step": 12467 - }, - { - "epoch": 0.9590031536035689, - "learning_rate": 1.2424001019318786e-05, - "loss": 1.3155, - "step": 12468 - }, - { - "epoch": 0.9590800707637874, - "learning_rate": 1.2377489771541406e-05, - "loss": 0.9835, - "step": 12469 - }, - { - "epoch": 0.9591569879240058, - "learning_rate": 1.2331065387592821e-05, - "loss": 0.9406, - "step": 12470 - }, - { - "epoch": 0.9592339050842242, - "learning_rate": 1.2284727870183865e-05, - "loss": 0.9119, - "step": 12471 - }, - { - "epoch": 0.9593108222444428, - "learning_rate": 1.2238477222019872e-05, - "loss": 1.0957, - "step": 12472 - }, - { - "epoch": 0.9593877394046612, - "learning_rate": 1.219231344580185e-05, - "loss": 1.2068, - "step": 12473 - }, - { - "epoch": 0.9594646565648797, - "learning_rate": 1.2146236544224975e-05, - "loss": 1.003, - "step": 12474 - }, - { - "epoch": 0.9595415737250981, - "learning_rate": 1.2100246519980262e-05, - "loss": 1.226, - "step": 12475 - }, - { - "epoch": 0.9596184908853165, - "learning_rate": 1.205434337575273e-05, - "loss": 1.0051, - "step": 12476 - }, - { - "epoch": 0.959695408045535, - "learning_rate": 1.2008527114222734e-05, - "loss": 1.2554, - "step": 12477 - }, - { - "epoch": 0.9597723252057534, - "learning_rate": 1.1962797738065633e-05, - "loss": 0.9832, - "step": 12478 - }, - { - "epoch": 0.9598492423659718, - "learning_rate": 1.1917155249951461e-05, - "loss": 1.0128, - "step": 12479 - }, - { - "epoch": 0.9599261595261903, - "learning_rate": 1.1871599652545584e-05, - "loss": 1.3611, - "step": 12480 - }, - { - "epoch": 0.9600030766864087, - "learning_rate": 1.1826130948507707e-05, - "loss": 1.4286, - "step": 12481 - }, - { - "epoch": 0.9600799938466272, - "learning_rate": 1.1780749140493041e-05, - "loss": 1.0974, - "step": 12482 - }, - { - "epoch": 0.9601569110068456, - "learning_rate": 1.1735454231151465e-05, - "loss": 1.0853, - "step": 12483 - }, - { - "epoch": 0.960233828167064, - "learning_rate": 1.1690246223127865e-05, - "loss": 1.3005, - "step": 12484 - }, - { - "epoch": 0.9603107453272826, - "learning_rate": 1.1645125119061795e-05, - "loss": 0.9851, - "step": 12485 - }, - { - "epoch": 0.960387662487501, - "learning_rate": 1.1600090921587813e-05, - "loss": 1.1901, - "step": 12486 - }, - { - "epoch": 0.9604645796477194, - "learning_rate": 1.1555143633335986e-05, - "loss": 1.0553, - "step": 12487 - }, - { - "epoch": 0.9605414968079379, - "learning_rate": 1.1510283256930376e-05, - "loss": 0.9551, - "step": 12488 - }, - { - "epoch": 0.9606184139681563, - "learning_rate": 1.1465509794990559e-05, - "loss": 1.1452, - "step": 12489 - }, - { - "epoch": 0.9606953311283747, - "learning_rate": 1.1420823250131107e-05, - "loss": 1.1404, - "step": 12490 - }, - { - "epoch": 0.9607722482885932, - "learning_rate": 1.1376223624960935e-05, - "loss": 1.4506, - "step": 12491 - }, - { - "epoch": 0.9608491654488116, - "learning_rate": 1.133171092208446e-05, - "loss": 1.1455, - "step": 12492 - }, - { - "epoch": 0.9609260826090301, - "learning_rate": 1.1287285144100933e-05, - "loss": 1.0521, - "step": 12493 - }, - { - "epoch": 0.9610029997692485, - "learning_rate": 1.124294629360445e-05, - "loss": 1.0328, - "step": 12494 - }, - { - "epoch": 0.9610799169294669, - "learning_rate": 1.119869437318377e-05, - "loss": 1.0154, - "step": 12495 - }, - { - "epoch": 0.9611568340896854, - "learning_rate": 1.115452938542283e-05, - "loss": 0.9358, - "step": 12496 - }, - { - "epoch": 0.9612337512499038, - "learning_rate": 1.1110451332900729e-05, - "loss": 1.2245, - "step": 12497 - }, - { - "epoch": 0.9613106684101222, - "learning_rate": 1.106646021819091e-05, - "loss": 1.2063, - "step": 12498 - }, - { - "epoch": 0.9613875855703408, - "learning_rate": 1.102255604386232e-05, - "loss": 1.1398, - "step": 12499 - }, - { - "epoch": 0.9614645027305592, - "learning_rate": 1.0978738812478405e-05, - "loss": 1.2032, - "step": 12500 - }, - { - "epoch": 0.9615414198907777, - "learning_rate": 1.0935008526597623e-05, - "loss": 1.3763, - "step": 12501 - }, - { - "epoch": 0.9616183370509961, - "learning_rate": 1.089136518877376e-05, - "loss": 1.1248, - "step": 12502 - }, - { - "epoch": 0.9616952542112145, - "learning_rate": 1.0847808801554949e-05, - "loss": 1.2183, - "step": 12503 - }, - { - "epoch": 0.961772171371433, - "learning_rate": 1.0804339367484484e-05, - "loss": 1.0694, - "step": 12504 - }, - { - "epoch": 0.9618490885316514, - "learning_rate": 1.076095688910067e-05, - "loss": 1.0625, - "step": 12505 - }, - { - "epoch": 0.9619260056918698, - "learning_rate": 1.071766136893665e-05, - "loss": 1.0587, - "step": 12506 - }, - { - "epoch": 0.9620029228520883, - "learning_rate": 1.0674452809520397e-05, - "loss": 0.867, - "step": 12507 - }, - { - "epoch": 0.9620798400123067, - "learning_rate": 1.0631331213375061e-05, - "loss": 1.0942, - "step": 12508 - }, - { - "epoch": 0.9621567571725252, - "learning_rate": 1.0588296583018464e-05, - "loss": 1.0028, - "step": 12509 - }, - { - "epoch": 0.9622336743327436, - "learning_rate": 1.0545348920963426e-05, - "loss": 1.3383, - "step": 12510 - }, - { - "epoch": 0.962310591492962, - "learning_rate": 1.0502488229717777e-05, - "loss": 1.1583, - "step": 12511 - }, - { - "epoch": 0.9623875086531806, - "learning_rate": 1.045971451178418e-05, - "loss": 0.9778, - "step": 12512 - }, - { - "epoch": 0.962464425813399, - "learning_rate": 1.0417027769660137e-05, - "loss": 1.3648, - "step": 12513 - }, - { - "epoch": 0.9625413429736174, - "learning_rate": 1.037442800583832e-05, - "loss": 1.3772, - "step": 12514 - }, - { - "epoch": 0.9626182601338359, - "learning_rate": 1.0331915222806077e-05, - "loss": 1.37, - "step": 12515 - }, - { - "epoch": 0.9626951772940543, - "learning_rate": 1.028948942304575e-05, - "loss": 1.0969, - "step": 12516 - }, - { - "epoch": 0.9627720944542727, - "learning_rate": 1.024715060903486e-05, - "loss": 1.1798, - "step": 12517 - }, - { - "epoch": 0.9628490116144912, - "learning_rate": 1.0204898783245097e-05, - "loss": 0.8726, - "step": 12518 - }, - { - "epoch": 0.9629259287747096, - "learning_rate": 1.0162733948144321e-05, - "loss": 1.2034, - "step": 12519 - }, - { - "epoch": 0.9630028459349281, - "learning_rate": 1.0120656106193894e-05, - "loss": 1.0849, - "step": 12520 - }, - { - "epoch": 0.9630797630951465, - "learning_rate": 1.0078665259851183e-05, - "loss": 0.9967, - "step": 12521 - }, - { - "epoch": 0.9631566802553649, - "learning_rate": 1.0036761411567897e-05, - "loss": 0.8526, - "step": 12522 - }, - { - "epoch": 0.9632335974155835, - "learning_rate": 9.994944563790908e-06, - "loss": 1.4954, - "step": 12523 - }, - { - "epoch": 0.9633105145758019, - "learning_rate": 9.9532147189621e-06, - "loss": 1.2077, - "step": 12524 - }, - { - "epoch": 0.9633874317360203, - "learning_rate": 9.911571879517856e-06, - "loss": 1.0329, - "step": 12525 - }, - { - "epoch": 0.9634643488962388, - "learning_rate": 9.87001604788973e-06, - "loss": 1.0367, - "step": 12526 - }, - { - "epoch": 0.9635412660564572, - "learning_rate": 9.828547226504614e-06, - "loss": 0.7542, - "step": 12527 - }, - { - "epoch": 0.9636181832166757, - "learning_rate": 9.787165417783405e-06, - "loss": 1.1541, - "step": 12528 - }, - { - "epoch": 0.9636951003768941, - "learning_rate": 9.745870624142838e-06, - "loss": 1.4798, - "step": 12529 - }, - { - "epoch": 0.9637720175371125, - "learning_rate": 9.704662847993984e-06, - "loss": 1.0329, - "step": 12530 - }, - { - "epoch": 0.963848934697331, - "learning_rate": 9.663542091742916e-06, - "loss": 1.0714, - "step": 12531 - }, - { - "epoch": 0.9639258518575494, - "learning_rate": 9.622508357790883e-06, - "loss": 1.1419, - "step": 12532 - }, - { - "epoch": 0.9640027690177678, - "learning_rate": 9.581561648533798e-06, - "loss": 1.1956, - "step": 12533 - }, - { - "epoch": 0.9640796861779863, - "learning_rate": 9.540701966362586e-06, - "loss": 1.0352, - "step": 12534 - }, - { - "epoch": 0.9641566033382047, - "learning_rate": 9.49992931366317e-06, - "loss": 1.0903, - "step": 12535 - }, - { - "epoch": 0.9642335204984231, - "learning_rate": 9.45924369281631e-06, - "loss": 1.1682, - "step": 12536 - }, - { - "epoch": 0.9643104376586417, - "learning_rate": 9.418645106197443e-06, - "loss": 1.2397, - "step": 12537 - }, - { - "epoch": 0.9643873548188601, - "learning_rate": 9.378133556177504e-06, - "loss": 1.0185, - "step": 12538 - }, - { - "epoch": 0.9644642719790786, - "learning_rate": 9.337709045121601e-06, - "loss": 0.9796, - "step": 12539 - }, - { - "epoch": 0.964541189139297, - "learning_rate": 9.297371575390512e-06, - "loss": 1.3726, - "step": 12540 - }, - { - "epoch": 0.9646181062995154, - "learning_rate": 9.25712114933952e-06, - "loss": 0.9965, - "step": 12541 - }, - { - "epoch": 0.9646950234597339, - "learning_rate": 9.216957769318912e-06, - "loss": 1.0806, - "step": 12542 - }, - { - "epoch": 0.9647719406199523, - "learning_rate": 9.176881437673645e-06, - "loss": 1.4456, - "step": 12543 - }, - { - "epoch": 0.9648488577801707, - "learning_rate": 9.136892156744015e-06, - "loss": 1.9841, - "step": 12544 - }, - { - "epoch": 0.9649257749403892, - "learning_rate": 9.096989928864984e-06, - "loss": 1.0315, - "step": 12545 - }, - { - "epoch": 0.9650026921006076, - "learning_rate": 9.057174756366526e-06, - "loss": 1.4377, - "step": 12546 - }, - { - "epoch": 0.9650796092608261, - "learning_rate": 9.017446641573446e-06, - "loss": 1.2672, - "step": 12547 - }, - { - "epoch": 0.9651565264210445, - "learning_rate": 8.977805586805555e-06, - "loss": 1.2858, - "step": 12548 - }, - { - "epoch": 0.965233443581263, - "learning_rate": 8.938251594377334e-06, - "loss": 1.4411, - "step": 12549 - }, - { - "epoch": 0.9653103607414815, - "learning_rate": 8.89878466659877e-06, - "loss": 1.3483, - "step": 12550 - }, - { - "epoch": 0.9653872779016999, - "learning_rate": 8.859404805774018e-06, - "loss": 1.1452, - "step": 12551 - }, - { - "epoch": 0.9654641950619183, - "learning_rate": 8.820112014202742e-06, - "loss": 1.4143, - "step": 12552 - }, - { - "epoch": 0.9655411122221368, - "learning_rate": 8.780906294179104e-06, - "loss": 1.2448, - "step": 12553 - }, - { - "epoch": 0.9656180293823552, - "learning_rate": 8.741787647992439e-06, - "loss": 1.277, - "step": 12554 - }, - { - "epoch": 0.9656949465425736, - "learning_rate": 8.702756077927087e-06, - "loss": 1.1117, - "step": 12555 - }, - { - "epoch": 0.9657718637027921, - "learning_rate": 8.663811586261894e-06, - "loss": 1.2591, - "step": 12556 - }, - { - "epoch": 0.9658487808630105, - "learning_rate": 8.624954175271038e-06, - "loss": 1.0719, - "step": 12557 - }, - { - "epoch": 0.965925698023229, - "learning_rate": 8.586183847223371e-06, - "loss": 1.2018, - "step": 12558 - }, - { - "epoch": 0.9660026151834474, - "learning_rate": 8.547500604382752e-06, - "loss": 1.4612, - "step": 12559 - }, - { - "epoch": 0.9660795323436658, - "learning_rate": 8.508904449007871e-06, - "loss": 1.2367, - "step": 12560 - }, - { - "epoch": 0.9661564495038844, - "learning_rate": 8.470395383352425e-06, - "loss": 1.4056, - "step": 12561 - }, - { - "epoch": 0.9662333666641028, - "learning_rate": 8.431973409665116e-06, - "loss": 1.1051, - "step": 12562 - }, - { - "epoch": 0.9663102838243212, - "learning_rate": 8.393638530189319e-06, - "loss": 1.113, - "step": 12563 - }, - { - "epoch": 0.9663872009845397, - "learning_rate": 8.355390747163405e-06, - "loss": 1.101, - "step": 12564 - }, - { - "epoch": 0.9664641181447581, - "learning_rate": 8.317230062820925e-06, - "loss": 1.1721, - "step": 12565 - }, - { - "epoch": 0.9665410353049766, - "learning_rate": 8.279156479389926e-06, - "loss": 1.3101, - "step": 12566 - }, - { - "epoch": 0.966617952465195, - "learning_rate": 8.241169999093468e-06, - "loss": 1.8327, - "step": 12567 - }, - { - "epoch": 0.9666948696254134, - "learning_rate": 8.203270624150106e-06, - "loss": 1.2764, - "step": 12568 - }, - { - "epoch": 0.9667717867856319, - "learning_rate": 8.165458356772238e-06, - "loss": 0.9915, - "step": 12569 - }, - { - "epoch": 0.9668487039458503, - "learning_rate": 8.127733199167931e-06, - "loss": 1.4001, - "step": 12570 - }, - { - "epoch": 0.9669256211060687, - "learning_rate": 8.090095153540255e-06, - "loss": 1.1301, - "step": 12571 - }, - { - "epoch": 0.9670025382662872, - "learning_rate": 8.052544222086621e-06, - "loss": 1.5621, - "step": 12572 - }, - { - "epoch": 0.9670794554265056, - "learning_rate": 8.015080406999942e-06, - "loss": 1.0522, - "step": 12573 - }, - { - "epoch": 0.967156372586724, - "learning_rate": 7.977703710467632e-06, - "loss": 1.1197, - "step": 12574 - }, - { - "epoch": 0.9672332897469426, - "learning_rate": 7.940414134671948e-06, - "loss": 0.9458, - "step": 12575 - }, - { - "epoch": 0.967310206907161, - "learning_rate": 7.903211681790645e-06, - "loss": 1.316, - "step": 12576 - }, - { - "epoch": 0.9673871240673795, - "learning_rate": 7.866096353995823e-06, - "loss": 1.4618, - "step": 12577 - }, - { - "epoch": 0.9674640412275979, - "learning_rate": 7.829068153454577e-06, - "loss": 1.0523, - "step": 12578 - }, - { - "epoch": 0.9675409583878163, - "learning_rate": 7.792127082329348e-06, - "loss": 1.1657, - "step": 12579 - }, - { - "epoch": 0.9676178755480348, - "learning_rate": 7.755273142776742e-06, - "loss": 1.1556, - "step": 12580 - }, - { - "epoch": 0.9676947927082532, - "learning_rate": 7.718506336949038e-06, - "loss": 1.3058, - "step": 12581 - }, - { - "epoch": 0.9677717098684716, - "learning_rate": 7.681826666993019e-06, - "loss": 1.0612, - "step": 12582 - }, - { - "epoch": 0.9678486270286901, - "learning_rate": 7.645234135050305e-06, - "loss": 1.1075, - "step": 12583 - }, - { - "epoch": 0.9679255441889085, - "learning_rate": 7.608728743257687e-06, - "loss": 1.2045, - "step": 12584 - }, - { - "epoch": 0.968002461349127, - "learning_rate": 7.572310493746793e-06, - "loss": 1.2167, - "step": 12585 - }, - { - "epoch": 0.9680793785093454, - "learning_rate": 7.535979388643921e-06, - "loss": 1.2357, - "step": 12586 - }, - { - "epoch": 0.9681562956695638, - "learning_rate": 7.499735430070709e-06, - "loss": 1.1877, - "step": 12587 - }, - { - "epoch": 0.9682332128297824, - "learning_rate": 7.463578620143297e-06, - "loss": 0.8865, - "step": 12588 - }, - { - "epoch": 0.9683101299900008, - "learning_rate": 7.427508960973162e-06, - "loss": 0.8846, - "step": 12589 - }, - { - "epoch": 0.9683870471502192, - "learning_rate": 7.391526454666286e-06, - "loss": 1.0404, - "step": 12590 - }, - { - "epoch": 0.9684639643104377, - "learning_rate": 7.355631103323657e-06, - "loss": 0.8971, - "step": 12591 - }, - { - "epoch": 0.9685408814706561, - "learning_rate": 7.319822909041429e-06, - "loss": 1.1847, - "step": 12592 - }, - { - "epoch": 0.9686177986308745, - "learning_rate": 7.2841018739102646e-06, - "loss": 0.8368, - "step": 12593 - }, - { - "epoch": 0.968694715791093, - "learning_rate": 7.2484680000161615e-06, - "loss": 1.0476, - "step": 12594 - }, - { - "epoch": 0.9687716329513114, - "learning_rate": 7.2129212894396224e-06, - "loss": 1.1231, - "step": 12595 - }, - { - "epoch": 0.9688485501115299, - "learning_rate": 7.177461744256319e-06, - "loss": 1.1153, - "step": 12596 - }, - { - "epoch": 0.9689254672717483, - "learning_rate": 7.14208936653693e-06, - "loss": 0.981, - "step": 12597 - }, - { - "epoch": 0.9690023844319667, - "learning_rate": 7.106804158346636e-06, - "loss": 1.3899, - "step": 12598 - }, - { - "epoch": 0.9690793015921852, - "learning_rate": 7.071606121745955e-06, - "loss": 0.9187, - "step": 12599 - }, - { - "epoch": 0.9691562187524037, - "learning_rate": 7.0364952587902435e-06, - "loss": 1.1633, - "step": 12600 - }, - { - "epoch": 0.9692331359126221, - "learning_rate": 7.001471571529361e-06, - "loss": 1.2645, - "step": 12601 - }, - { - "epoch": 0.9693100530728406, - "learning_rate": 6.9665350620085055e-06, - "loss": 1.2485, - "step": 12602 - }, - { - "epoch": 0.969386970233059, - "learning_rate": 6.931685732267712e-06, - "loss": 1.1676, - "step": 12603 - }, - { - "epoch": 0.9694638873932775, - "learning_rate": 6.8969235843416855e-06, - "loss": 1.0614, - "step": 12604 - }, - { - "epoch": 0.9695408045534959, - "learning_rate": 6.862248620260636e-06, - "loss": 1.1671, - "step": 12605 - }, - { - "epoch": 0.9696177217137143, - "learning_rate": 6.827660842048777e-06, - "loss": 1.6335, - "step": 12606 - }, - { - "epoch": 0.9696946388739328, - "learning_rate": 6.793160251725994e-06, - "loss": 1.5102, - "step": 12607 - }, - { - "epoch": 0.9697715560341512, - "learning_rate": 6.75874685130684e-06, - "loss": 1.2357, - "step": 12608 - }, - { - "epoch": 0.9698484731943696, - "learning_rate": 6.72442064280071e-06, - "loss": 0.9505, - "step": 12609 - }, - { - "epoch": 0.9699253903545881, - "learning_rate": 6.6901816282116665e-06, - "loss": 0.8884, - "step": 12610 - }, - { - "epoch": 0.9700023075148065, - "learning_rate": 6.6560298095394434e-06, - "loss": 1.1603, - "step": 12611 - }, - { - "epoch": 0.9700792246750249, - "learning_rate": 6.621965188777945e-06, - "loss": 0.9623, - "step": 12612 - }, - { - "epoch": 0.9701561418352435, - "learning_rate": 6.587987767916248e-06, - "loss": 1.0489, - "step": 12613 - }, - { - "epoch": 0.9702330589954619, - "learning_rate": 6.554097548938265e-06, - "loss": 1.1253, - "step": 12614 - }, - { - "epoch": 0.9703099761556804, - "learning_rate": 6.5202945338229124e-06, - "loss": 1.2029, - "step": 12615 - }, - { - "epoch": 0.9703868933158988, - "learning_rate": 6.486578724544279e-06, - "loss": 1.2329, - "step": 12616 - }, - { - "epoch": 0.9704638104761172, - "learning_rate": 6.4529501230706225e-06, - "loss": 1.3183, - "step": 12617 - }, - { - "epoch": 0.9705407276363357, - "learning_rate": 6.419408731365706e-06, - "loss": 0.7932, - "step": 12618 - }, - { - "epoch": 0.9706176447965541, - "learning_rate": 6.385954551388129e-06, - "loss": 1.4808, - "step": 12619 - }, - { - "epoch": 0.9706945619567725, - "learning_rate": 6.35258758509133e-06, - "loss": 1.3753, - "step": 12620 - }, - { - "epoch": 0.970771479116991, - "learning_rate": 6.319307834423582e-06, - "loss": 1.1444, - "step": 12621 - }, - { - "epoch": 0.9708483962772094, - "learning_rate": 6.286115301328166e-06, - "loss": 1.3147, - "step": 12622 - }, - { - "epoch": 0.9709253134374279, - "learning_rate": 6.25300998774303e-06, - "loss": 1.4053, - "step": 12623 - }, - { - "epoch": 0.9710022305976463, - "learning_rate": 6.2199918956014625e-06, - "loss": 1.0932, - "step": 12624 - }, - { - "epoch": 0.9710791477578647, - "learning_rate": 6.18706102683142e-06, - "loss": 1.3351, - "step": 12625 - }, - { - "epoch": 0.9711560649180833, - "learning_rate": 6.154217383355698e-06, - "loss": 1.2115, - "step": 12626 - }, - { - "epoch": 0.9712329820783017, - "learning_rate": 6.1214609670919295e-06, - "loss": 1.1545, - "step": 12627 - }, - { - "epoch": 0.9713098992385201, - "learning_rate": 6.088791779953085e-06, - "loss": 1.0827, - "step": 12628 - }, - { - "epoch": 0.9713868163987386, - "learning_rate": 6.056209823846637e-06, - "loss": 1.1002, - "step": 12629 - }, - { - "epoch": 0.971463733558957, - "learning_rate": 6.023715100674898e-06, - "loss": 0.9341, - "step": 12630 - }, - { - "epoch": 0.9715406507191755, - "learning_rate": 5.991307612335517e-06, - "loss": 1.0185, - "step": 12631 - }, - { - "epoch": 0.9716175678793939, - "learning_rate": 5.958987360720647e-06, - "loss": 1.1625, - "step": 12632 - }, - { - "epoch": 0.9716944850396123, - "learning_rate": 5.926754347717611e-06, - "loss": 1.3745, - "step": 12633 - }, - { - "epoch": 0.9717714021998308, - "learning_rate": 5.8946085752084046e-06, - "loss": 1.1692, - "step": 12634 - }, - { - "epoch": 0.9718483193600492, - "learning_rate": 5.8625500450701914e-06, - "loss": 1.182, - "step": 12635 - }, - { - "epoch": 0.9719252365202676, - "learning_rate": 5.830578759174809e-06, - "loss": 1.4195, - "step": 12636 - }, - { - "epoch": 0.9720021536804861, - "learning_rate": 5.7986947193889305e-06, - "loss": 1.0184, - "step": 12637 - }, - { - "epoch": 0.9720790708407046, - "learning_rate": 5.766897927574733e-06, - "loss": 1.1037, - "step": 12638 - }, - { - "epoch": 0.972155988000923, - "learning_rate": 5.735188385588564e-06, - "loss": 1.1467, - "step": 12639 - }, - { - "epoch": 0.9722329051611415, - "learning_rate": 5.703566095282109e-06, - "loss": 1.176, - "step": 12640 - }, - { - "epoch": 0.9723098223213599, - "learning_rate": 5.6720310585015586e-06, - "loss": 1.0016, - "step": 12641 - }, - { - "epoch": 0.9723867394815784, - "learning_rate": 5.6405832770886065e-06, - "loss": 1.064, - "step": 12642 - }, - { - "epoch": 0.9724636566417968, - "learning_rate": 5.609222752879284e-06, - "loss": 1.0039, - "step": 12643 - }, - { - "epoch": 0.9725405738020152, - "learning_rate": 5.5779494877049585e-06, - "loss": 1.4048, - "step": 12644 - }, - { - "epoch": 0.9726174909622337, - "learning_rate": 5.5467634833915035e-06, - "loss": 1.0396, - "step": 12645 - }, - { - "epoch": 0.9726944081224521, - "learning_rate": 5.515664741760129e-06, - "loss": 1.2299, - "step": 12646 - }, - { - "epoch": 0.9727713252826705, - "learning_rate": 5.4846532646267155e-06, - "loss": 1.2927, - "step": 12647 - }, - { - "epoch": 0.972848242442889, - "learning_rate": 5.453729053801648e-06, - "loss": 1.4449, - "step": 12648 - }, - { - "epoch": 0.9729251596031074, - "learning_rate": 5.422892111091149e-06, - "loss": 1.2064, - "step": 12649 - }, - { - "epoch": 0.973002076763326, - "learning_rate": 5.392142438295445e-06, - "loss": 1.3436, - "step": 12650 - }, - { - "epoch": 0.9730789939235444, - "learning_rate": 5.361480037210265e-06, - "loss": 1.117, - "step": 12651 - }, - { - "epoch": 0.9731559110837628, - "learning_rate": 5.330904909625845e-06, - "loss": 1.1208, - "step": 12652 - }, - { - "epoch": 0.9732328282439813, - "learning_rate": 5.300417057327589e-06, - "loss": 1.3434, - "step": 12653 - }, - { - "epoch": 0.9733097454041997, - "learning_rate": 5.270016482095908e-06, - "loss": 1.2208, - "step": 12654 - }, - { - "epoch": 0.9733866625644181, - "learning_rate": 5.239703185705546e-06, - "loss": 1.2378, - "step": 12655 - }, - { - "epoch": 0.9734635797246366, - "learning_rate": 5.209477169926758e-06, - "loss": 1.2216, - "step": 12656 - }, - { - "epoch": 0.973540496884855, - "learning_rate": 5.1793384365246274e-06, - "loss": 1.2038, - "step": 12657 - }, - { - "epoch": 0.9736174140450734, - "learning_rate": 5.149286987258583e-06, - "loss": 1.2282, - "step": 12658 - }, - { - "epoch": 0.9736943312052919, - "learning_rate": 5.119322823883554e-06, - "loss": 1.1535, - "step": 12659 - }, - { - "epoch": 0.9737712483655103, - "learning_rate": 5.0894459481493075e-06, - "loss": 1.2682, - "step": 12660 - }, - { - "epoch": 0.9738481655257288, - "learning_rate": 5.059656361800114e-06, - "loss": 1.9436, - "step": 12661 - }, - { - "epoch": 0.9739250826859472, - "learning_rate": 5.029954066575748e-06, - "loss": 1.5157, - "step": 12662 - }, - { - "epoch": 0.9740019998461656, - "learning_rate": 5.000339064210324e-06, - "loss": 1.2199, - "step": 12663 - }, - { - "epoch": 0.9740789170063842, - "learning_rate": 4.970811356433124e-06, - "loss": 0.8043, - "step": 12664 - }, - { - "epoch": 0.9741558341666026, - "learning_rate": 4.941370944968271e-06, - "loss": 0.9901, - "step": 12665 - }, - { - "epoch": 0.974232751326821, - "learning_rate": 4.9120178315350535e-06, - "loss": 0.8299, - "step": 12666 - }, - { - "epoch": 0.9743096684870395, - "learning_rate": 4.882752017847103e-06, - "loss": 1.0728, - "step": 12667 - }, - { - "epoch": 0.9743865856472579, - "learning_rate": 4.853573505613551e-06, - "loss": 0.8153, - "step": 12668 - }, - { - "epoch": 0.9744635028074764, - "learning_rate": 4.824482296537869e-06, - "loss": 0.9062, - "step": 12669 - }, - { - "epoch": 0.9745404199676948, - "learning_rate": 4.79547839231903e-06, - "loss": 1.367, - "step": 12670 - }, - { - "epoch": 0.9746173371279132, - "learning_rate": 4.766561794650515e-06, - "loss": 0.9308, - "step": 12671 - }, - { - "epoch": 0.9746942542881317, - "learning_rate": 4.737732505220638e-06, - "loss": 0.8003, - "step": 12672 - }, - { - "epoch": 0.9747711714483501, - "learning_rate": 4.708990525713053e-06, - "loss": 1.1944, - "step": 12673 - }, - { - "epoch": 0.9748480886085685, - "learning_rate": 4.680335857805751e-06, - "loss": 1.4942, - "step": 12674 - }, - { - "epoch": 0.974925005768787, - "learning_rate": 4.65176850317206e-06, - "loss": 1.3383, - "step": 12675 - }, - { - "epoch": 0.9750019229290054, - "learning_rate": 4.62328846347998e-06, - "loss": 1.0785, - "step": 12676 - }, - { - "epoch": 0.9750788400892239, - "learning_rate": 4.5948957403925126e-06, - "loss": 1.28, - "step": 12677 - }, - { - "epoch": 0.9751557572494424, - "learning_rate": 4.566590335567666e-06, - "loss": 1.4324, - "step": 12678 - }, - { - "epoch": 0.9752326744096608, - "learning_rate": 4.538372250657952e-06, - "loss": 1.1537, - "step": 12679 - }, - { - "epoch": 0.9753095915698793, - "learning_rate": 4.5102414873112176e-06, - "loss": 1.1609, - "step": 12680 - }, - { - "epoch": 0.9753865087300977, - "learning_rate": 4.48219804717015e-06, - "loss": 1.3438, - "step": 12681 - }, - { - "epoch": 0.9754634258903161, - "learning_rate": 4.454241931872105e-06, - "loss": 1.1787, - "step": 12682 - }, - { - "epoch": 0.9755403430505346, - "learning_rate": 4.426373143049611e-06, - "loss": 1.0784, - "step": 12683 - }, - { - "epoch": 0.975617260210753, - "learning_rate": 4.3985916823297e-06, - "loss": 0.9945, - "step": 12684 - }, - { - "epoch": 0.9756941773709714, - "learning_rate": 4.370897551334741e-06, - "loss": 1.4363, - "step": 12685 - }, - { - "epoch": 0.9757710945311899, - "learning_rate": 4.343290751681772e-06, - "loss": 1.2473, - "step": 12686 - }, - { - "epoch": 0.9758480116914083, - "learning_rate": 4.315771284982672e-06, - "loss": 0.9725, - "step": 12687 - }, - { - "epoch": 0.9759249288516268, - "learning_rate": 4.288339152844656e-06, - "loss": 1.2346, - "step": 12688 - }, - { - "epoch": 0.9760018460118453, - "learning_rate": 4.260994356869108e-06, - "loss": 1.38, - "step": 12689 - }, - { - "epoch": 0.9760787631720637, - "learning_rate": 4.233736898653084e-06, - "loss": 1.3028, - "step": 12690 - }, - { - "epoch": 0.9761556803322822, - "learning_rate": 4.206566779787979e-06, - "loss": 1.2133, - "step": 12691 - }, - { - "epoch": 0.9762325974925006, - "learning_rate": 4.179484001860356e-06, - "loss": 1.0899, - "step": 12692 - }, - { - "epoch": 0.976309514652719, - "learning_rate": 4.1524885664514515e-06, - "loss": 0.9205, - "step": 12693 - }, - { - "epoch": 0.9763864318129375, - "learning_rate": 4.1255804751376695e-06, - "loss": 1.0226, - "step": 12694 - }, - { - "epoch": 0.9764633489731559, - "learning_rate": 4.098759729490254e-06, - "loss": 1.0614, - "step": 12695 - }, - { - "epoch": 0.9765402661333743, - "learning_rate": 4.072026331075284e-06, - "loss": 1.2945, - "step": 12696 - }, - { - "epoch": 0.9766171832935928, - "learning_rate": 4.045380281453681e-06, - "loss": 1.066, - "step": 12697 - }, - { - "epoch": 0.9766941004538112, - "learning_rate": 4.018821582181364e-06, - "loss": 1.1933, - "step": 12698 - }, - { - "epoch": 0.9767710176140297, - "learning_rate": 3.992350234809261e-06, - "loss": 0.955, - "step": 12699 - }, - { - "epoch": 0.9768479347742481, - "learning_rate": 3.965966240882802e-06, - "loss": 0.9804, - "step": 12700 - }, - { - "epoch": 0.9769248519344665, - "learning_rate": 3.939669601942753e-06, - "loss": 1.519, - "step": 12701 - }, - { - "epoch": 0.9770017690946851, - "learning_rate": 3.913460319524387e-06, - "loss": 0.7899, - "step": 12702 - }, - { - "epoch": 0.9770786862549035, - "learning_rate": 3.8873383951584795e-06, - "loss": 1.2498, - "step": 12703 - }, - { - "epoch": 0.9771556034151219, - "learning_rate": 3.8613038303699775e-06, - "loss": 1.149, - "step": 12704 - }, - { - "epoch": 0.9772325205753404, - "learning_rate": 3.835356626679165e-06, - "loss": 1.2062, - "step": 12705 - }, - { - "epoch": 0.9773094377355588, - "learning_rate": 3.8094967856013297e-06, - "loss": 1.079, - "step": 12706 - }, - { - "epoch": 0.9773863548957773, - "learning_rate": 3.7837243086459303e-06, - "loss": 1.4197, - "step": 12707 - }, - { - "epoch": 0.9774632720559957, - "learning_rate": 3.7580391973184302e-06, - "loss": 1.7759, - "step": 12708 - }, - { - "epoch": 0.9775401892162141, - "learning_rate": 3.7324414531184626e-06, - "loss": 1.1377, - "step": 12709 - }, - { - "epoch": 0.9776171063764326, - "learning_rate": 3.706931077540332e-06, - "loss": 1.1648, - "step": 12710 - }, - { - "epoch": 0.977694023536651, - "learning_rate": 3.6815080720740136e-06, - "loss": 1.1485, - "step": 12711 - }, - { - "epoch": 0.9777709406968694, - "learning_rate": 3.6561724382039863e-06, - "loss": 1.1104, - "step": 12712 - }, - { - "epoch": 0.9778478578570879, - "learning_rate": 3.6309241774094004e-06, - "loss": 1.0424, - "step": 12713 - }, - { - "epoch": 0.9779247750173063, - "learning_rate": 3.6057632911645766e-06, - "loss": 1.3053, - "step": 12714 - }, - { - "epoch": 0.9780016921775248, - "learning_rate": 3.5806897809388395e-06, - "loss": 1.2633, - "step": 12715 - }, - { - "epoch": 0.9780786093377433, - "learning_rate": 3.555703648196018e-06, - "loss": 1.0491, - "step": 12716 - }, - { - "epoch": 0.9781555264979617, - "learning_rate": 3.5308048943954453e-06, - "loss": 1.366, - "step": 12717 - }, - { - "epoch": 0.9782324436581802, - "learning_rate": 3.5059935209906247e-06, - "loss": 1.0456, - "step": 12718 - }, - { - "epoch": 0.9783093608183986, - "learning_rate": 3.4812695294303977e-06, - "loss": 1.1009, - "step": 12719 - }, - { - "epoch": 0.978386277978617, - "learning_rate": 3.456632921158609e-06, - "loss": 1.1536, - "step": 12720 - }, - { - "epoch": 0.9784631951388355, - "learning_rate": 3.4320836976134418e-06, - "loss": 1.8793, - "step": 12721 - }, - { - "epoch": 0.9785401122990539, - "learning_rate": 3.4076218602289156e-06, - "loss": 1.0634, - "step": 12722 - }, - { - "epoch": 0.9786170294592723, - "learning_rate": 3.383247410432888e-06, - "loss": 1.1016, - "step": 12723 - }, - { - "epoch": 0.9786939466194908, - "learning_rate": 3.358960349648721e-06, - "loss": 1.2406, - "step": 12724 - }, - { - "epoch": 0.9787708637797092, - "learning_rate": 3.3347606792947793e-06, - "loss": 1.2966, - "step": 12725 - }, - { - "epoch": 0.9788477809399277, - "learning_rate": 3.3106484007837667e-06, - "loss": 0.7612, - "step": 12726 - }, - { - "epoch": 0.9789246981001462, - "learning_rate": 3.2866235155238897e-06, - "loss": 1.1185, - "step": 12727 - }, - { - "epoch": 0.9790016152603646, - "learning_rate": 3.262686024918027e-06, - "loss": 1.002, - "step": 12728 - }, - { - "epoch": 0.9790785324205831, - "learning_rate": 3.2388359303635596e-06, - "loss": 1.3115, - "step": 12729 - }, - { - "epoch": 0.9791554495808015, - "learning_rate": 3.2150732332535404e-06, - "loss": 1.2793, - "step": 12730 - }, - { - "epoch": 0.9792323667410199, - "learning_rate": 3.1913979349751932e-06, - "loss": 1.0122, - "step": 12731 - }, - { - "epoch": 0.9793092839012384, - "learning_rate": 3.1678100369110783e-06, - "loss": 1.0464, - "step": 12732 - }, - { - "epoch": 0.9793862010614568, - "learning_rate": 3.1443095404385945e-06, - "loss": 0.8579, - "step": 12733 - }, - { - "epoch": 0.9794631182216753, - "learning_rate": 3.120896446929644e-06, - "loss": 1.3535, - "step": 12734 - }, - { - "epoch": 0.9795400353818937, - "learning_rate": 3.0975707577517997e-06, - "loss": 0.8382, - "step": 12735 - }, - { - "epoch": 0.9796169525421121, - "learning_rate": 3.0743324742668054e-06, - "loss": 1.0523, - "step": 12736 - }, - { - "epoch": 0.9796938697023306, - "learning_rate": 3.0511815978314096e-06, - "loss": 1.3411, - "step": 12737 - }, - { - "epoch": 0.979770786862549, - "learning_rate": 3.0281181297976965e-06, - "loss": 1.0219, - "step": 12738 - }, - { - "epoch": 0.9798477040227674, - "learning_rate": 3.005142071512257e-06, - "loss": 0.8128, - "step": 12739 - }, - { - "epoch": 0.979924621182986, - "learning_rate": 2.98225342431685e-06, - "loss": 1.1951, - "step": 12740 - }, - { - "epoch": 0.9800015383432044, - "learning_rate": 2.95945218954774e-06, - "loss": 0.8736, - "step": 12741 - }, - { - "epoch": 0.9800784555034228, - "learning_rate": 2.9367383685363625e-06, - "loss": 1.0182, - "step": 12742 - }, - { - "epoch": 0.9801553726636413, - "learning_rate": 2.9141119626089898e-06, - "loss": 1.1931, - "step": 12743 - }, - { - "epoch": 0.9802322898238597, - "learning_rate": 2.8915729730868976e-06, - "loss": 1.1738, - "step": 12744 - }, - { - "epoch": 0.9803092069840782, - "learning_rate": 2.8691214012860343e-06, - "loss": 1.3247, - "step": 12745 - }, - { - "epoch": 0.9803861241442966, - "learning_rate": 2.8467572485175176e-06, - "loss": 1.1912, - "step": 12746 - }, - { - "epoch": 0.980463041304515, - "learning_rate": 2.824480516087136e-06, - "loss": 1.1418, - "step": 12747 - }, - { - "epoch": 0.9805399584647335, - "learning_rate": 2.802291205295682e-06, - "loss": 1.3108, - "step": 12748 - }, - { - "epoch": 0.9806168756249519, - "learning_rate": 2.780189317438786e-06, - "loss": 1.0052, - "step": 12749 - }, - { - "epoch": 0.9806937927851703, - "learning_rate": 2.758174853806916e-06, - "loss": 1.3417, - "step": 12750 - }, - { - "epoch": 0.9807707099453888, - "learning_rate": 2.7362478156857105e-06, - "loss": 1.1238, - "step": 12751 - }, - { - "epoch": 0.9808476271056072, - "learning_rate": 2.714408204355312e-06, - "loss": 1.1047, - "step": 12752 - }, - { - "epoch": 0.9809245442658258, - "learning_rate": 2.6926560210911997e-06, - "loss": 1.1214, - "step": 12753 - }, - { - "epoch": 0.9810014614260442, - "learning_rate": 2.670991267163192e-06, - "loss": 1.2733, - "step": 12754 - }, - { - "epoch": 0.9810783785862626, - "learning_rate": 2.6494139438366092e-06, - "loss": 1.2716, - "step": 12755 - }, - { - "epoch": 0.9811552957464811, - "learning_rate": 2.6279240523711113e-06, - "loss": 1.1705, - "step": 12756 - }, - { - "epoch": 0.9812322129066995, - "learning_rate": 2.6065215940216935e-06, - "loss": 0.8963, - "step": 12757 - }, - { - "epoch": 0.9813091300669179, - "learning_rate": 2.58520657003819e-06, - "loss": 0.8876, - "step": 12758 - }, - { - "epoch": 0.9813860472271364, - "learning_rate": 2.563978981664772e-06, - "loss": 1.2798, - "step": 12759 - }, - { - "epoch": 0.9814629643873548, - "learning_rate": 2.5428388301414474e-06, - "loss": 0.9096, - "step": 12760 - }, - { - "epoch": 0.9815398815475732, - "learning_rate": 2.5217861167022295e-06, - "loss": 0.9567, - "step": 12761 - }, - { - "epoch": 0.9816167987077917, - "learning_rate": 2.5008208425766345e-06, - "loss": 0.9947, - "step": 12762 - }, - { - "epoch": 0.9816937158680101, - "learning_rate": 2.479943008988683e-06, - "loss": 1.5009, - "step": 12763 - }, - { - "epoch": 0.9817706330282286, - "learning_rate": 2.4591526171575673e-06, - "loss": 1.259, - "step": 12764 - }, - { - "epoch": 0.981847550188447, - "learning_rate": 2.4384496682973158e-06, - "loss": 1.125, - "step": 12765 - }, - { - "epoch": 0.9819244673486655, - "learning_rate": 2.417834163616628e-06, - "loss": 1.0267, - "step": 12766 - }, - { - "epoch": 0.982001384508884, - "learning_rate": 2.3973061043193746e-06, - "loss": 1.3648, - "step": 12767 - }, - { - "epoch": 0.9820783016691024, - "learning_rate": 2.376865491604263e-06, - "loss": 1.1793, - "step": 12768 - }, - { - "epoch": 0.9821552188293208, - "learning_rate": 2.3565123266646725e-06, - "loss": 1.2886, - "step": 12769 - }, - { - "epoch": 0.9822321359895393, - "learning_rate": 2.3362466106891524e-06, - "loss": 1.2724, - "step": 12770 - }, - { - "epoch": 0.9823090531497577, - "learning_rate": 2.3160683448609223e-06, - "loss": 1.0261, - "step": 12771 - }, - { - "epoch": 0.9823859703099762, - "learning_rate": 2.295977530358373e-06, - "loss": 0.873, - "step": 12772 - }, - { - "epoch": 0.9824628874701946, - "learning_rate": 2.2759741683545665e-06, - "loss": 1.3855, - "step": 12773 - }, - { - "epoch": 0.982539804630413, - "learning_rate": 2.2560582600175684e-06, - "loss": 1.3186, - "step": 12774 - }, - { - "epoch": 0.9826167217906315, - "learning_rate": 2.2362298065101148e-06, - "loss": 1.333, - "step": 12775 - }, - { - "epoch": 0.9826936389508499, - "learning_rate": 2.21648880899028e-06, - "loss": 1.3524, - "step": 12776 - }, - { - "epoch": 0.9827705561110683, - "learning_rate": 2.196835268610642e-06, - "loss": 1.0582, - "step": 12777 - }, - { - "epoch": 0.9828474732712869, - "learning_rate": 2.1772691865186155e-06, - "loss": 1.4895, - "step": 12778 - }, - { - "epoch": 0.9829243904315053, - "learning_rate": 2.157790563856954e-06, - "loss": 1.1967, - "step": 12779 - }, - { - "epoch": 0.9830013075917237, - "learning_rate": 2.1383994017629137e-06, - "loss": 1.2466, - "step": 12780 - }, - { - "epoch": 0.9830782247519422, - "learning_rate": 2.1190957013685895e-06, - "loss": 1.1307, - "step": 12781 - }, - { - "epoch": 0.9831551419121606, - "learning_rate": 2.0998794638014128e-06, - "loss": 0.887, - "step": 12782 - }, - { - "epoch": 0.9832320590723791, - "learning_rate": 2.080750690183486e-06, - "loss": 1.3147, - "step": 12783 - }, - { - "epoch": 0.9833089762325975, - "learning_rate": 2.061709381631582e-06, - "loss": 0.7593, - "step": 12784 - }, - { - "epoch": 0.9833858933928159, - "learning_rate": 2.0427555392574793e-06, - "loss": 1.0692, - "step": 12785 - }, - { - "epoch": 0.9834628105530344, - "learning_rate": 2.023889164167958e-06, - "loss": 0.9097, - "step": 12786 - }, - { - "epoch": 0.9835397277132528, - "learning_rate": 2.0051102574648037e-06, - "loss": 1.2492, - "step": 12787 - }, - { - "epoch": 0.9836166448734712, - "learning_rate": 1.9864188202444734e-06, - "loss": 1.042, - "step": 12788 - }, - { - "epoch": 0.9836935620336897, - "learning_rate": 1.96781485359826e-06, - "loss": 0.9379, - "step": 12789 - }, - { - "epoch": 0.9837704791939081, - "learning_rate": 1.949298358612628e-06, - "loss": 1.2095, - "step": 12790 - }, - { - "epoch": 0.9838473963541267, - "learning_rate": 1.930869336368546e-06, - "loss": 1.1779, - "step": 12791 - }, - { - "epoch": 0.9839243135143451, - "learning_rate": 1.9125277879424864e-06, - "loss": 0.7955, - "step": 12792 - }, - { - "epoch": 0.9840012306745635, - "learning_rate": 1.8942737144050926e-06, - "loss": 1.16, - "step": 12793 - }, - { - "epoch": 0.984078147834782, - "learning_rate": 1.8761071168223454e-06, - "loss": 0.9418, - "step": 12794 - }, - { - "epoch": 0.9841550649950004, - "learning_rate": 1.8580279962548963e-06, - "loss": 1.0033, - "step": 12795 - }, - { - "epoch": 0.9842319821552188, - "learning_rate": 1.8400363537585674e-06, - "loss": 1.109, - "step": 12796 - }, - { - "epoch": 0.9843088993154373, - "learning_rate": 1.8221321903840182e-06, - "loss": 1.2688, - "step": 12797 - }, - { - "epoch": 0.9843858164756557, - "learning_rate": 1.8043155071764129e-06, - "loss": 1.2503, - "step": 12798 - }, - { - "epoch": 0.9844627336358741, - "learning_rate": 1.786586305176252e-06, - "loss": 0.8924, - "step": 12799 - }, - { - "epoch": 0.9845396507960926, - "learning_rate": 1.7689445854185416e-06, - "loss": 1.4539, - "step": 12800 - }, - { - "epoch": 0.984616567956311, - "learning_rate": 1.75139034893379e-06, - "loss": 1.037, - "step": 12801 - }, - { - "epoch": 0.9846934851165295, - "learning_rate": 1.7339235967466783e-06, - "loss": 1.0452, - "step": 12802 - }, - { - "epoch": 0.984770402276748, - "learning_rate": 1.7165443298772233e-06, - "loss": 0.9585, - "step": 12803 - }, - { - "epoch": 0.9848473194369664, - "learning_rate": 1.69925254934028e-06, - "loss": 1.0599, - "step": 12804 - }, - { - "epoch": 0.9849242365971849, - "learning_rate": 1.6820482561455409e-06, - "loss": 0.885, - "step": 12805 - }, - { - "epoch": 0.9850011537574033, - "learning_rate": 1.664931451297369e-06, - "loss": 1.106, - "step": 12806 - }, - { - "epoch": 0.9850780709176217, - "learning_rate": 1.6479021357954649e-06, - "loss": 1.3251, - "step": 12807 - }, - { - "epoch": 0.9851549880778402, - "learning_rate": 1.6309603106340331e-06, - "loss": 1.2526, - "step": 12808 - }, - { - "epoch": 0.9852319052380586, - "learning_rate": 1.6141059768024492e-06, - "loss": 1.0131, - "step": 12809 - }, - { - "epoch": 0.9853088223982771, - "learning_rate": 1.5973391352847588e-06, - "loss": 1.4628, - "step": 12810 - }, - { - "epoch": 0.9853857395584955, - "learning_rate": 1.5806597870600126e-06, - "loss": 1.3573, - "step": 12811 - }, - { - "epoch": 0.9854626567187139, - "learning_rate": 1.5640679331022644e-06, - "loss": 1.0887, - "step": 12812 - }, - { - "epoch": 0.9855395738789324, - "learning_rate": 1.547563574380073e-06, - "loss": 1.1108, - "step": 12813 - }, - { - "epoch": 0.9856164910391508, - "learning_rate": 1.5311467118575006e-06, - "loss": 1.5457, - "step": 12814 - }, - { - "epoch": 0.9856934081993692, - "learning_rate": 1.5148173464927806e-06, - "loss": 1.2486, - "step": 12815 - }, - { - "epoch": 0.9857703253595878, - "learning_rate": 1.4985754792394833e-06, - "loss": 1.2806, - "step": 12816 - }, - { - "epoch": 0.9858472425198062, - "learning_rate": 1.4824211110461839e-06, - "loss": 1.2897, - "step": 12817 - }, - { - "epoch": 0.9859241596800246, - "learning_rate": 1.4663542428561272e-06, - "loss": 1.2803, - "step": 12818 - }, - { - "epoch": 0.9860010768402431, - "learning_rate": 1.45037487560723e-06, - "loss": 1.1977, - "step": 12819 - }, - { - "epoch": 0.9860779940004615, - "learning_rate": 1.4344830102327456e-06, - "loss": 1.0733, - "step": 12820 - }, - { - "epoch": 0.98615491116068, - "learning_rate": 1.4186786476604318e-06, - "loss": 0.967, - "step": 12821 - }, - { - "epoch": 0.9862318283208984, - "learning_rate": 1.402961788813384e-06, - "loss": 0.895, - "step": 12822 - }, - { - "epoch": 0.9863087454811168, - "learning_rate": 1.3873324346093673e-06, - "loss": 1.3207, - "step": 12823 - }, - { - "epoch": 0.9863856626413353, - "learning_rate": 1.3717905859604862e-06, - "loss": 1.4154, - "step": 12824 - }, - { - "epoch": 0.9864625798015537, - "learning_rate": 1.3563362437748472e-06, - "loss": 1.1526, - "step": 12825 - }, - { - "epoch": 0.9865394969617721, - "learning_rate": 1.3409694089543956e-06, - "loss": 1.3104, - "step": 12826 - }, - { - "epoch": 0.9866164141219906, - "learning_rate": 1.3256900823967465e-06, - "loss": 0.8908, - "step": 12827 - }, - { - "epoch": 0.986693331282209, - "learning_rate": 1.3104982649938534e-06, - "loss": 1.2367, - "step": 12828 - }, - { - "epoch": 0.9867702484424276, - "learning_rate": 1.2953939576330065e-06, - "loss": 1.1903, - "step": 12829 - }, - { - "epoch": 0.986847165602646, - "learning_rate": 1.2803771611958337e-06, - "loss": 1.1108, - "step": 12830 - }, - { - "epoch": 0.9869240827628644, - "learning_rate": 1.2654478765594668e-06, - "loss": 1.2312, - "step": 12831 - }, - { - "epoch": 0.9870009999230829, - "learning_rate": 1.250606104595542e-06, - "loss": 1.1009, - "step": 12832 - }, - { - "epoch": 0.9870779170833013, - "learning_rate": 1.235851846170699e-06, - "loss": 0.9126, - "step": 12833 - }, - { - "epoch": 0.9871548342435197, - "learning_rate": 1.221185102146416e-06, - "loss": 1.1545, - "step": 12834 - }, - { - "epoch": 0.9872317514037382, - "learning_rate": 1.2066058733790075e-06, - "loss": 1.1847, - "step": 12835 - }, - { - "epoch": 0.9873086685639566, - "learning_rate": 1.1921141607201257e-06, - "loss": 1.2383, - "step": 12836 - }, - { - "epoch": 0.9873855857241751, - "learning_rate": 1.1777099650154277e-06, - "loss": 1.4643, - "step": 12837 - }, - { - "epoch": 0.9874625028843935, - "learning_rate": 1.1633932871065733e-06, - "loss": 1.1681, - "step": 12838 - }, - { - "epoch": 0.9875394200446119, - "learning_rate": 1.149164127829061e-06, - "loss": 1.3279, - "step": 12839 - }, - { - "epoch": 0.9876163372048304, - "learning_rate": 1.1350224880138926e-06, - "loss": 1.1594, - "step": 12840 - }, - { - "epoch": 0.9876932543650488, - "learning_rate": 1.1209683684869075e-06, - "loss": 1.2254, - "step": 12841 - }, - { - "epoch": 0.9877701715252672, - "learning_rate": 1.107001770068783e-06, - "loss": 1.0006, - "step": 12842 - }, - { - "epoch": 0.9878470886854858, - "learning_rate": 1.0931226935746996e-06, - "loss": 1.1089, - "step": 12843 - }, - { - "epoch": 0.9879240058457042, - "learning_rate": 1.0793311398155093e-06, - "loss": 1.3209, - "step": 12844 - }, - { - "epoch": 0.9880009230059226, - "learning_rate": 1.0656271095962345e-06, - "loss": 0.9682, - "step": 12845 - }, - { - "epoch": 0.9880778401661411, - "learning_rate": 1.0520106037170685e-06, - "loss": 1.1248, - "step": 12846 - }, - { - "epoch": 0.9881547573263595, - "learning_rate": 1.0384816229732086e-06, - "loss": 1.2041, - "step": 12847 - }, - { - "epoch": 0.988231674486578, - "learning_rate": 1.0250401681546896e-06, - "loss": 1.0256, - "step": 12848 - }, - { - "epoch": 0.9883085916467964, - "learning_rate": 1.0116862400462167e-06, - "loss": 0.9725, - "step": 12849 - }, - { - "epoch": 0.9883855088070148, - "learning_rate": 9.984198394275002e-07, - "loss": 0.7537, - "step": 12850 - }, - { - "epoch": 0.9884624259672333, - "learning_rate": 9.852409670734197e-07, - "loss": 1.165, - "step": 12851 - }, - { - "epoch": 0.9885393431274517, - "learning_rate": 9.721496237531934e-07, - "loss": 1.3722, - "step": 12852 - }, - { - "epoch": 0.9886162602876701, - "learning_rate": 9.59145810231543e-07, - "loss": 0.8697, - "step": 12853 - }, - { - "epoch": 0.9886931774478886, - "learning_rate": 9.46229527267528e-07, - "loss": 1.3825, - "step": 12854 - }, - { - "epoch": 0.988770094608107, - "learning_rate": 9.33400775615545e-07, - "loss": 1.2608, - "step": 12855 - }, - { - "epoch": 0.9888470117683256, - "learning_rate": 9.206595560244946e-07, - "loss": 0.8974, - "step": 12856 - }, - { - "epoch": 0.988923928928544, - "learning_rate": 9.080058692384485e-07, - "loss": 0.872, - "step": 12857 - }, - { - "epoch": 0.9890008460887624, - "learning_rate": 8.954397159963157e-07, - "loss": 1.4197, - "step": 12858 - }, - { - "epoch": 0.9890777632489809, - "learning_rate": 8.829610970318425e-07, - "loss": 1.2919, - "step": 12859 - }, - { - "epoch": 0.9891546804091993, - "learning_rate": 8.705700130734462e-07, - "loss": 1.3476, - "step": 12860 - }, - { - "epoch": 0.9892315975694177, - "learning_rate": 8.582664648448813e-07, - "loss": 1.3725, - "step": 12861 - }, - { - "epoch": 0.9893085147296362, - "learning_rate": 8.460504530645729e-07, - "loss": 0.9024, - "step": 12862 - }, - { - "epoch": 0.9893854318898546, - "learning_rate": 8.339219784457841e-07, - "loss": 0.93, - "step": 12863 - }, - { - "epoch": 0.989462349050073, - "learning_rate": 8.218810416966149e-07, - "loss": 1.0988, - "step": 12864 - }, - { - "epoch": 0.9895392662102915, - "learning_rate": 8.099276435201697e-07, - "loss": 1.0911, - "step": 12865 - }, - { - "epoch": 0.9896161833705099, - "learning_rate": 7.980617846145566e-07, - "loss": 0.839, - "step": 12866 - }, - { - "epoch": 0.9896931005307285, - "learning_rate": 7.862834656723882e-07, - "loss": 1.2614, - "step": 12867 - }, - { - "epoch": 0.9897700176909469, - "learning_rate": 7.74592687381781e-07, - "loss": 1.2021, - "step": 12868 - }, - { - "epoch": 0.9898469348511653, - "learning_rate": 7.629894504250223e-07, - "loss": 0.8893, - "step": 12869 - }, - { - "epoch": 0.9899238520113838, - "learning_rate": 7.514737554797368e-07, - "loss": 1.1556, - "step": 12870 - }, - { - "epoch": 0.9900007691716022, - "learning_rate": 7.400456032185532e-07, - "loss": 0.9451, - "step": 12871 - }, - { - "epoch": 0.9900776863318206, - "learning_rate": 7.287049943084379e-07, - "loss": 1.2126, - "step": 12872 - }, - { - "epoch": 0.9901546034920391, - "learning_rate": 7.174519294118609e-07, - "loss": 0.9153, - "step": 12873 - }, - { - "epoch": 0.9902315206522575, - "learning_rate": 7.062864091857967e-07, - "loss": 1.0964, - "step": 12874 - }, - { - "epoch": 0.990308437812476, - "learning_rate": 6.952084342820574e-07, - "loss": 1.3543, - "step": 12875 - }, - { - "epoch": 0.9903853549726944, - "learning_rate": 6.84218005347792e-07, - "loss": 1.212, - "step": 12876 - }, - { - "epoch": 0.9904622721329128, - "learning_rate": 6.733151230244872e-07, - "loss": 0.9412, - "step": 12877 - }, - { - "epoch": 0.9905391892931313, - "learning_rate": 6.624997879488004e-07, - "loss": 1.377, - "step": 12878 - }, - { - "epoch": 0.9906161064533497, - "learning_rate": 6.51772000752393e-07, - "loss": 0.9764, - "step": 12879 - }, - { - "epoch": 0.9906930236135681, - "learning_rate": 6.411317620615975e-07, - "loss": 1.0797, - "step": 12880 - }, - { - "epoch": 0.9907699407737867, - "learning_rate": 6.305790724977501e-07, - "loss": 0.9857, - "step": 12881 - }, - { - "epoch": 0.9908468579340051, - "learning_rate": 6.201139326770245e-07, - "loss": 0.8654, - "step": 12882 - }, - { - "epoch": 0.9909237750942235, - "learning_rate": 6.09736343210432e-07, - "loss": 1.3121, - "step": 12883 - }, - { - "epoch": 0.991000692254442, - "learning_rate": 5.994463047039878e-07, - "loss": 1.084, - "step": 12884 - }, - { - "epoch": 0.9910776094146604, - "learning_rate": 5.892438177583781e-07, - "loss": 1.2455, - "step": 12885 - }, - { - "epoch": 0.9911545265748789, - "learning_rate": 5.791288829696262e-07, - "loss": 1.2246, - "step": 12886 - }, - { - "epoch": 0.9912314437350973, - "learning_rate": 5.691015009280931e-07, - "loss": 1.2855, - "step": 12887 - }, - { - "epoch": 0.9913083608953157, - "learning_rate": 5.59161672219477e-07, - "loss": 1.3215, - "step": 12888 - }, - { - "epoch": 0.9913852780555342, - "learning_rate": 5.493093974239804e-07, - "loss": 1.4003, - "step": 12889 - }, - { - "epoch": 0.9914621952157526, - "learning_rate": 5.39544677117143e-07, - "loss": 1.0938, - "step": 12890 - }, - { - "epoch": 0.991539112375971, - "learning_rate": 5.298675118688423e-07, - "loss": 1.211, - "step": 12891 - }, - { - "epoch": 0.9916160295361895, - "learning_rate": 5.202779022444592e-07, - "loss": 1.3325, - "step": 12892 - }, - { - "epoch": 0.991692946696408, - "learning_rate": 5.107758488035464e-07, - "loss": 1.0643, - "step": 12893 - }, - { - "epoch": 0.9917698638566265, - "learning_rate": 5.013613521013261e-07, - "loss": 1.3499, - "step": 12894 - }, - { - "epoch": 0.9918467810168449, - "learning_rate": 4.920344126873588e-07, - "loss": 1.4176, - "step": 12895 - }, - { - "epoch": 0.9919236981770633, - "learning_rate": 4.827950311062091e-07, - "loss": 0.9883, - "step": 12896 - }, - { - "epoch": 0.9920006153372818, - "learning_rate": 4.736432078974451e-07, - "loss": 1.3637, - "step": 12897 - }, - { - "epoch": 0.9920775324975002, - "learning_rate": 4.6457894359530627e-07, - "loss": 1.0593, - "step": 12898 - }, - { - "epoch": 0.9921544496577186, - "learning_rate": 4.55602238729369e-07, - "loss": 1.0567, - "step": 12899 - }, - { - "epoch": 0.9922313668179371, - "learning_rate": 4.46713093823381e-07, - "loss": 1.1161, - "step": 12900 - }, - { - "epoch": 0.9923082839781555, - "learning_rate": 4.3791150939676003e-07, - "loss": 1.0579, - "step": 12901 - }, - { - "epoch": 0.9923852011383739, - "learning_rate": 4.291974859632619e-07, - "loss": 1.2888, - "step": 12902 - }, - { - "epoch": 0.9924621182985924, - "learning_rate": 4.205710240318128e-07, - "loss": 1.2811, - "step": 12903 - }, - { - "epoch": 0.9925390354588108, - "learning_rate": 4.1203212410600987e-07, - "loss": 1.1893, - "step": 12904 - }, - { - "epoch": 0.9926159526190294, - "learning_rate": 4.035807866844543e-07, - "loss": 1.189, - "step": 12905 - }, - { - "epoch": 0.9926928697792478, - "learning_rate": 3.952170122605847e-07, - "loss": 1.0412, - "step": 12906 - }, - { - "epoch": 0.9927697869394662, - "learning_rate": 3.8694080132301023e-07, - "loss": 0.9806, - "step": 12907 - }, - { - "epoch": 0.9928467040996847, - "learning_rate": 3.787521543546779e-07, - "loss": 0.9748, - "step": 12908 - }, - { - "epoch": 0.9929236212599031, - "learning_rate": 3.706510718338718e-07, - "loss": 1.4697, - "step": 12909 - }, - { - "epoch": 0.9930005384201215, - "learning_rate": 3.626375542337135e-07, - "loss": 1.1323, - "step": 12910 - }, - { - "epoch": 0.99307745558034, - "learning_rate": 3.5471160202199535e-07, - "loss": 1.2568, - "step": 12911 - }, - { - "epoch": 0.9931543727405584, - "learning_rate": 3.46873215661514e-07, - "loss": 1.2582, - "step": 12912 - }, - { - "epoch": 0.9932312899007769, - "learning_rate": 3.3912239561006975e-07, - "loss": 1.1581, - "step": 12913 - }, - { - "epoch": 0.9933082070609953, - "learning_rate": 3.314591423201341e-07, - "loss": 1.2994, - "step": 12914 - }, - { - "epoch": 0.9933851242212137, - "learning_rate": 3.238834562393489e-07, - "loss": 1.1417, - "step": 12915 - }, - { - "epoch": 0.9934620413814322, - "learning_rate": 3.163953378098605e-07, - "loss": 1.2175, - "step": 12916 - }, - { - "epoch": 0.9935389585416506, - "learning_rate": 3.0899478746881925e-07, - "loss": 1.0339, - "step": 12917 - }, - { - "epoch": 0.993615875701869, - "learning_rate": 3.016818056487125e-07, - "loss": 1.2117, - "step": 12918 - }, - { - "epoch": 0.9936927928620876, - "learning_rate": 2.944563927763655e-07, - "loss": 1.1594, - "step": 12919 - }, - { - "epoch": 0.993769710022306, - "learning_rate": 2.873185492734409e-07, - "loss": 1.1495, - "step": 12920 - }, - { - "epoch": 0.9938466271825244, - "learning_rate": 2.802682755571051e-07, - "loss": 1.4044, - "step": 12921 - }, - { - "epoch": 0.9939235443427429, - "learning_rate": 2.7330557203886217e-07, - "loss": 1.4963, - "step": 12922 - }, - { - "epoch": 0.9940004615029613, - "learning_rate": 2.664304391253869e-07, - "loss": 0.805, - "step": 12923 - }, - { - "epoch": 0.9940773786631798, - "learning_rate": 2.5964287721785827e-07, - "loss": 1.0027, - "step": 12924 - }, - { - "epoch": 0.9941542958233982, - "learning_rate": 2.5294288671279255e-07, - "loss": 0.9452, - "step": 12925 - }, - { - "epoch": 0.9942312129836166, - "learning_rate": 2.4633046800154325e-07, - "loss": 0.9951, - "step": 12926 - }, - { - "epoch": 0.9943081301438351, - "learning_rate": 2.398056214699684e-07, - "loss": 1.0305, - "step": 12927 - }, - { - "epoch": 0.9943850473040535, - "learning_rate": 2.3336834749909663e-07, - "loss": 1.3752, - "step": 12928 - }, - { - "epoch": 0.9944619644642719, - "learning_rate": 2.270186464649604e-07, - "loss": 1.1421, - "step": 12929 - }, - { - "epoch": 0.9945388816244904, - "learning_rate": 2.2075651873809667e-07, - "loss": 1.1417, - "step": 12930 - }, - { - "epoch": 0.9946157987847088, - "learning_rate": 2.1458196468437942e-07, - "loss": 1.2761, - "step": 12931 - }, - { - "epoch": 0.9946927159449274, - "learning_rate": 2.0849498466435357e-07, - "loss": 1.4439, - "step": 12932 - }, - { - "epoch": 0.9947696331051458, - "learning_rate": 2.02495579033235e-07, - "loss": 1.2078, - "step": 12933 - }, - { - "epoch": 0.9948465502653642, - "learning_rate": 1.965837481414101e-07, - "loss": 1.1953, - "step": 12934 - }, - { - "epoch": 0.9949234674255827, - "learning_rate": 1.9075949233426925e-07, - "loss": 1.4175, - "step": 12935 - }, - { - "epoch": 0.9950003845858011, - "learning_rate": 1.850228119515407e-07, - "loss": 1.3363, - "step": 12936 - }, - { - "epoch": 0.9950773017460195, - "learning_rate": 1.793737073286228e-07, - "loss": 0.9397, - "step": 12937 - }, - { - "epoch": 0.995154218906238, - "learning_rate": 1.7381217879508527e-07, - "loss": 1.2953, - "step": 12938 - }, - { - "epoch": 0.9952311360664564, - "learning_rate": 1.6833822667566834e-07, - "loss": 1.5886, - "step": 12939 - }, - { - "epoch": 0.9953080532266748, - "learning_rate": 1.6295185129011625e-07, - "loss": 1.1365, - "step": 12940 - }, - { - "epoch": 0.9953849703868933, - "learning_rate": 1.5765305295284416e-07, - "loss": 1.2621, - "step": 12941 - }, - { - "epoch": 0.9954618875471117, - "learning_rate": 1.5244183197327122e-07, - "loss": 1.1917, - "step": 12942 - }, - { - "epoch": 0.9955388047073302, - "learning_rate": 1.473181886558206e-07, - "loss": 1.0094, - "step": 12943 - }, - { - "epoch": 0.9956157218675487, - "learning_rate": 1.4228212329958634e-07, - "loss": 1.0105, - "step": 12944 - }, - { - "epoch": 0.9956926390277671, - "learning_rate": 1.3733363619850003e-07, - "loss": 1.1703, - "step": 12945 - }, - { - "epoch": 0.9957695561879856, - "learning_rate": 1.3247272764166373e-07, - "loss": 0.9972, - "step": 12946 - }, - { - "epoch": 0.995846473348204, - "learning_rate": 1.27699397913017e-07, - "loss": 1.2288, - "step": 12947 - }, - { - "epoch": 0.9959233905084224, - "learning_rate": 1.2301364729100374e-07, - "loss": 1.133, - "step": 12948 - }, - { - "epoch": 0.9960003076686409, - "learning_rate": 1.1841547604940494e-07, - "loss": 1.0169, - "step": 12949 - }, - { - "epoch": 0.9960772248288593, - "learning_rate": 1.1390488445650604e-07, - "loss": 1.1699, - "step": 12950 - }, - { - "epoch": 0.9961541419890778, - "learning_rate": 1.09481872776096e-07, - "loss": 1.3666, - "step": 12951 - }, - { - "epoch": 0.9962310591492962, - "learning_rate": 1.0514644126596862e-07, - "loss": 1.343, - "step": 12952 - }, - { - "epoch": 0.9963079763095146, - "learning_rate": 1.008985901795878e-07, - "loss": 1.0168, - "step": 12953 - }, - { - "epoch": 0.9963848934697331, - "learning_rate": 9.673831976492187e-08, - "loss": 0.9452, - "step": 12954 - }, - { - "epoch": 0.9964618106299515, - "learning_rate": 9.26656302647766e-08, - "loss": 1.0083, - "step": 12955 - }, - { - "epoch": 0.9965387277901699, - "learning_rate": 8.868052191712828e-08, - "loss": 1.3389, - "step": 12956 - }, - { - "epoch": 0.9966156449503885, - "learning_rate": 8.478299495462416e-08, - "loss": 1.0279, - "step": 12957 - }, - { - "epoch": 0.9966925621106069, - "learning_rate": 8.097304960474894e-08, - "loss": 0.9121, - "step": 12958 - }, - { - "epoch": 0.9967694792708254, - "learning_rate": 7.72506860899913e-08, - "loss": 1.2067, - "step": 12959 - }, - { - "epoch": 0.9968463964310438, - "learning_rate": 7.361590462784395e-08, - "loss": 1.0315, - "step": 12960 - }, - { - "epoch": 0.9969233135912622, - "learning_rate": 7.00687054304705e-08, - "loss": 1.1543, - "step": 12961 - }, - { - "epoch": 0.9970002307514807, - "learning_rate": 6.660908870487203e-08, - "loss": 1.5518, - "step": 12962 - }, - { - "epoch": 0.9970771479116991, - "learning_rate": 6.323705465322016e-08, - "loss": 1.4173, - "step": 12963 - }, - { - "epoch": 0.9971540650719175, - "learning_rate": 5.995260347235742e-08, - "loss": 1.4398, - "step": 12964 - }, - { - "epoch": 0.997230982232136, - "learning_rate": 5.675573535413037e-08, - "loss": 1.5524, - "step": 12965 - }, - { - "epoch": 0.9973078993923544, - "learning_rate": 5.3646450485056455e-08, - "loss": 1.7572, - "step": 12966 - }, - { - "epoch": 0.9973848165525728, - "learning_rate": 5.062474904665715e-08, - "loss": 1.2099, - "step": 12967 - }, - { - "epoch": 0.9974617337127913, - "learning_rate": 4.7690631215624444e-08, - "loss": 1.2903, - "step": 12968 - }, - { - "epoch": 0.9975386508730097, - "learning_rate": 4.484409716315474e-08, - "loss": 0.8052, - "step": 12969 - }, - { - "epoch": 0.9976155680332283, - "learning_rate": 4.208514705544841e-08, - "loss": 1.1351, - "step": 12970 - }, - { - "epoch": 0.9976924851934467, - "learning_rate": 3.941378105354332e-08, - "loss": 1.1921, - "step": 12971 - }, - { - "epoch": 0.9977694023536651, - "learning_rate": 3.682999931364783e-08, - "loss": 1.4235, - "step": 12972 - }, - { - "epoch": 0.9978463195138836, - "learning_rate": 3.43338019863082e-08, - "loss": 1.1581, - "step": 12973 - }, - { - "epoch": 0.997923236674102, - "learning_rate": 3.192518921740772e-08, - "loss": 0.9665, - "step": 12974 - }, - { - "epoch": 0.9980001538343204, - "learning_rate": 2.960416114783371e-08, - "loss": 1.4974, - "step": 12975 - }, - { - "epoch": 0.9980770709945389, - "learning_rate": 2.7370717912811316e-08, - "loss": 1.2694, - "step": 12976 - }, - { - "epoch": 0.9981539881547573, - "learning_rate": 2.522485964273624e-08, - "loss": 1.3921, - "step": 12977 - }, - { - "epoch": 0.9982309053149758, - "learning_rate": 2.316658646317471e-08, - "loss": 1.1122, - "step": 12978 - }, - { - "epoch": 0.9983078224751942, - "learning_rate": 2.119589849403081e-08, - "loss": 1.2248, - "step": 12979 - }, - { - "epoch": 0.9983847396354126, - "learning_rate": 1.9312795850545682e-08, - "loss": 1.2476, - "step": 12980 - }, - { - "epoch": 0.9984616567956311, - "learning_rate": 1.7517278642631416e-08, - "loss": 0.8254, - "step": 12981 - }, - { - "epoch": 0.9985385739558496, - "learning_rate": 1.5809346975204087e-08, - "loss": 0.9032, - "step": 12982 - }, - { - "epoch": 0.998615491116068, - "learning_rate": 1.418900094768416e-08, - "loss": 1.2986, - "step": 12983 - }, - { - "epoch": 0.9986924082762865, - "learning_rate": 1.2656240655162243e-08, - "loss": 1.372, - "step": 12984 - }, - { - "epoch": 0.9987693254365049, - "learning_rate": 1.1211066186733732e-08, - "loss": 0.941, - "step": 12985 - }, - { - "epoch": 0.9988462425967233, - "learning_rate": 9.85347762699762e-09, - "loss": 1.1172, - "step": 12986 - }, - { - "epoch": 0.9989231597569418, - "learning_rate": 8.583475055223832e-09, - "loss": 1.1312, - "step": 12987 - }, - { - "epoch": 0.9990000769171602, - "learning_rate": 7.4010585455197566e-09, - "loss": 1.2059, - "step": 12988 - }, - { - "epoch": 0.9990769940773787, - "learning_rate": 6.306228166830241e-09, - "loss": 1.466, - "step": 12989 - }, - { - "epoch": 0.9991539112375971, - "learning_rate": 5.2989839832706665e-09, - "loss": 1.1152, - "step": 12990 - }, - { - "epoch": 0.9992308283978155, - "learning_rate": 4.379326053460808e-09, - "loss": 1.209, - "step": 12991 - }, - { - "epoch": 0.999307745558034, - "learning_rate": 3.547254431357505e-09, - "loss": 1.1204, - "step": 12992 - }, - { - "epoch": 0.9993846627182524, - "learning_rate": 2.8027691652554587e-09, - "loss": 1.1816, - "step": 12993 - }, - { - "epoch": 0.9994615798784708, - "learning_rate": 2.145870298786434e-09, - "loss": 1.1467, - "step": 12994 - }, - { - "epoch": 0.9995384970386894, - "learning_rate": 1.5765578702531258e-09, - "loss": 1.1449, - "step": 12995 - }, - { - "epoch": 0.9996154141989078, - "learning_rate": 1.0948319129622242e-09, - "loss": 1.2414, - "step": 12996 - }, - { - "epoch": 0.9996923313591263, - "learning_rate": 7.006924548913496e-10, - "loss": 0.8868, - "step": 12997 - }, - { - "epoch": 0.9997692485193447, - "learning_rate": 3.9413951935518555e-10, - "loss": 1.1303, - "step": 12998 - }, - { - "epoch": 0.9998461656795631, - "learning_rate": 1.7517312400627817e-10, - "loss": 1.3999, - "step": 12999 - }, - { - "epoch": 0.9999230828397816, - "learning_rate": 4.3793281667703356e-11, - "loss": 1.2557, - "step": 13000 - } - ], - "logging_steps": 1, - "max_steps": 13001, - "num_input_tokens_seen": 0, - "num_train_epochs": 1, - "save_steps": 100, - "stateful_callbacks": { - "TrainerControl": { - "args": { - "should_epoch_stop": false, - "should_evaluate": false, - "should_log": false, - "should_save": true, - "should_training_stop": false - }, - "attributes": {} - } - }, - "total_flos": 3.419903693543424e+16, - "train_batch_size": 4, - "trial_name": null, - "trial_params": null -}