| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 705, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.00851063829787234, |
| "grad_norm": 1.2625373601913452, |
| "learning_rate": 8.333333333333333e-07, |
| "loss": 4.0039777755737305, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.01702127659574468, |
| "grad_norm": 0.8850640654563904, |
| "learning_rate": 2.4999999999999998e-06, |
| "loss": 2.092726945877075, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.02553191489361702, |
| "grad_norm": 0.42879173159599304, |
| "learning_rate": 4.166666666666667e-06, |
| "loss": 1.9236807823181152, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.03404255319148936, |
| "grad_norm": 0.8568687438964844, |
| "learning_rate": 5.833333333333334e-06, |
| "loss": 1.7322841882705688, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.0425531914893617, |
| "grad_norm": 0.3813435435295105, |
| "learning_rate": 7.5e-06, |
| "loss": 1.7557320594787598, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.05106382978723404, |
| "grad_norm": 3.289834499359131, |
| "learning_rate": 9.166666666666668e-06, |
| "loss": 2.0681214332580566, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.059574468085106386, |
| "grad_norm": 0.8551011085510254, |
| "learning_rate": 1.0833333333333334e-05, |
| "loss": 1.6496400833129883, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.06808510638297872, |
| "grad_norm": 0.6510457396507263, |
| "learning_rate": 1.25e-05, |
| "loss": 1.78928804397583, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.07659574468085106, |
| "grad_norm": 0.9583740830421448, |
| "learning_rate": 1.4166666666666666e-05, |
| "loss": 1.4100672006607056, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.0851063829787234, |
| "grad_norm": 0.25010502338409424, |
| "learning_rate": 1.5833333333333333e-05, |
| "loss": 1.4046533107757568, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.09361702127659574, |
| "grad_norm": 0.2605890929698944, |
| "learning_rate": 1.7500000000000002e-05, |
| "loss": 1.4094287157058716, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.10212765957446808, |
| "grad_norm": 1.3786605596542358, |
| "learning_rate": 1.9166666666666667e-05, |
| "loss": 1.0651377439498901, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.11063829787234042, |
| "grad_norm": 0.5269911289215088, |
| "learning_rate": 2.0833333333333333e-05, |
| "loss": 1.187571406364441, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.11914893617021277, |
| "grad_norm": 0.24545100331306458, |
| "learning_rate": 2.25e-05, |
| "loss": 0.8422402739524841, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.1276595744680851, |
| "grad_norm": 0.3340352475643158, |
| "learning_rate": 2.4166666666666667e-05, |
| "loss": 1.3952103853225708, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.13617021276595745, |
| "grad_norm": 1.0275627374649048, |
| "learning_rate": 2.5833333333333336e-05, |
| "loss": 0.6625706553459167, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.14468085106382977, |
| "grad_norm": 0.27028778195381165, |
| "learning_rate": 2.75e-05, |
| "loss": 1.233126163482666, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.15319148936170213, |
| "grad_norm": 0.3007853627204895, |
| "learning_rate": 2.9166666666666666e-05, |
| "loss": 1.522036075592041, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.16170212765957448, |
| "grad_norm": 0.25563573837280273, |
| "learning_rate": 2.9998734788806287e-05, |
| "loss": 1.4075392484664917, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.1702127659574468, |
| "grad_norm": 0.3948342502117157, |
| "learning_rate": 2.9988614605803806e-05, |
| "loss": 1.0875834226608276, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.17872340425531916, |
| "grad_norm": 0.189285010099411, |
| "learning_rate": 2.99683822733885e-05, |
| "loss": 1.2082504034042358, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.18723404255319148, |
| "grad_norm": 0.6968986988067627, |
| "learning_rate": 2.9938053852362484e-05, |
| "loss": 1.2259453535079956, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.19574468085106383, |
| "grad_norm": 0.2155912071466446, |
| "learning_rate": 2.989765341799095e-05, |
| "loss": 1.3428634405136108, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.20425531914893616, |
| "grad_norm": 1.266939401626587, |
| "learning_rate": 2.9847213040890793e-05, |
| "loss": 0.7827966213226318, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.2127659574468085, |
| "grad_norm": 0.738777756690979, |
| "learning_rate": 2.9786772761572335e-05, |
| "loss": 0.9074859023094177, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.22127659574468084, |
| "grad_norm": 0.2421567142009735, |
| "learning_rate": 2.9716380558654445e-05, |
| "loss": 1.321467399597168, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.2297872340425532, |
| "grad_norm": 0.16079238057136536, |
| "learning_rate": 2.9636092310778195e-05, |
| "loss": 1.1002169847488403, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.23829787234042554, |
| "grad_norm": 1.2075369358062744, |
| "learning_rate": 2.954597175224938e-05, |
| "loss": 0.8815129399299622, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.24680851063829787, |
| "grad_norm": 0.17142559587955475, |
| "learning_rate": 2.9446090422445016e-05, |
| "loss": 1.0324469804763794, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.2553191489361702, |
| "grad_norm": 0.1944112330675125, |
| "learning_rate": 2.9336527609024072e-05, |
| "loss": 1.3160098791122437, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.26382978723404255, |
| "grad_norm": 0.3625730574131012, |
| "learning_rate": 2.9217370284987434e-05, |
| "loss": 1.2711020708084106, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.2723404255319149, |
| "grad_norm": 0.1674957275390625, |
| "learning_rate": 2.9088713039637117e-05, |
| "loss": 1.2588579654693604, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.28085106382978725, |
| "grad_norm": 0.2627532184123993, |
| "learning_rate": 2.8950658003489534e-05, |
| "loss": 1.371834635734558, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.28936170212765955, |
| "grad_norm": 0.1840338557958603, |
| "learning_rate": 2.880331476720238e-05, |
| "loss": 1.227813720703125, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.2978723404255319, |
| "grad_norm": 0.5450706481933594, |
| "learning_rate": 2.8646800294579517e-05, |
| "loss": 1.4304167032241821, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.30638297872340425, |
| "grad_norm": 0.197848379611969, |
| "learning_rate": 2.848123882972295e-05, |
| "loss": 1.328366756439209, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.3148936170212766, |
| "grad_norm": 0.2317323237657547, |
| "learning_rate": 2.8306761798405526e-05, |
| "loss": 0.8880937695503235, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.32340425531914896, |
| "grad_norm": 2.356330156326294, |
| "learning_rate": 2.812350770374273e-05, |
| "loss": 1.3181480169296265, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.33191489361702126, |
| "grad_norm": 0.24451768398284912, |
| "learning_rate": 2.793162201624631e-05, |
| "loss": 1.2563281059265137, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.3404255319148936, |
| "grad_norm": 0.38318371772766113, |
| "learning_rate": 2.77312570583471e-05, |
| "loss": 0.903084933757782, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.34893617021276596, |
| "grad_norm": 0.3278236389160156, |
| "learning_rate": 2.752257188347862e-05, |
| "loss": 1.2202683687210083, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.3574468085106383, |
| "grad_norm": 0.12423935532569885, |
| "learning_rate": 2.730573214981751e-05, |
| "loss": 1.021187424659729, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.3659574468085106, |
| "grad_norm": 0.15058887004852295, |
| "learning_rate": 2.7080909988780982e-05, |
| "loss": 0.955544650554657, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.37446808510638296, |
| "grad_norm": 0.2629765272140503, |
| "learning_rate": 2.684828386838569e-05, |
| "loss": 1.1776865720748901, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.3829787234042553, |
| "grad_norm": 0.5266962647438049, |
| "learning_rate": 2.6608038451576528e-05, |
| "loss": 0.8726862668991089, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.39148936170212767, |
| "grad_norm": 0.20310066640377045, |
| "learning_rate": 2.636036444963769e-05, |
| "loss": 1.2601391077041626, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 0.21515996754169464, |
| "learning_rate": 2.6105458470802563e-05, |
| "loss": 1.246147871017456, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.4085106382978723, |
| "grad_norm": 0.28013795614242554, |
| "learning_rate": 2.5843522864182394e-05, |
| "loss": 1.1205939054489136, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.41702127659574467, |
| "grad_norm": 0.27731189131736755, |
| "learning_rate": 2.557476555913785e-05, |
| "loss": 1.029671549797058, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.425531914893617, |
| "grad_norm": 0.5619083046913147, |
| "learning_rate": 2.5299399900220803e-05, |
| "loss": 1.0204272270202637, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.4340425531914894, |
| "grad_norm": 0.18958072364330292, |
| "learning_rate": 2.5017644477817424e-05, |
| "loss": 1.2801953554153442, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.4425531914893617, |
| "grad_norm": 0.14192377030849457, |
| "learning_rate": 2.47297229546271e-05, |
| "loss": 0.9845929741859436, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.451063829787234, |
| "grad_norm": 1.0689587593078613, |
| "learning_rate": 2.4435863888114814e-05, |
| "loss": 0.8531012535095215, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.4595744680851064, |
| "grad_norm": 0.22902530431747437, |
| "learning_rate": 2.4136300549077976e-05, |
| "loss": 1.256201148033142, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.46808510638297873, |
| "grad_norm": 0.15747293829917908, |
| "learning_rate": 2.3831270736471703e-05, |
| "loss": 1.2275390625, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.4765957446808511, |
| "grad_norm": 0.2964925467967987, |
| "learning_rate": 2.352101658863959e-05, |
| "loss": 1.5093128681182861, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.4851063829787234, |
| "grad_norm": 0.23254844546318054, |
| "learning_rate": 2.32057843910998e-05, |
| "loss": 1.0485801696777344, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.49361702127659574, |
| "grad_norm": 0.2164650410413742, |
| "learning_rate": 2.288582438103903e-05, |
| "loss": 1.0241464376449585, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.502127659574468, |
| "grad_norm": 0.2963908612728119, |
| "learning_rate": 2.256139054866955e-05, |
| "loss": 1.259883999824524, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.5106382978723404, |
| "grad_norm": 0.3361697494983673, |
| "learning_rate": 2.2232740435607067e-05, |
| "loss": 0.8736612796783447, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.5191489361702127, |
| "grad_norm": 0.2727033495903015, |
| "learning_rate": 2.19001349304294e-05, |
| "loss": 1.2946780920028687, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.5276595744680851, |
| "grad_norm": 0.2193780094385147, |
| "learning_rate": 2.156383806157826e-05, |
| "loss": 1.2013747692108154, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.5361702127659574, |
| "grad_norm": 0.18866108357906342, |
| "learning_rate": 2.1224116787768552e-05, |
| "loss": 1.2230784893035889, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.5446808510638298, |
| "grad_norm": 0.4698742926120758, |
| "learning_rate": 2.0881240786071588e-05, |
| "loss": 1.0054576396942139, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.5531914893617021, |
| "grad_norm": 0.2406734824180603, |
| "learning_rate": 2.05354822378404e-05, |
| "loss": 1.0494941473007202, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.5617021276595745, |
| "grad_norm": 0.20684203505516052, |
| "learning_rate": 2.018711561264714e-05, |
| "loss": 1.2933894395828247, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.5702127659574469, |
| "grad_norm": 0.18240053951740265, |
| "learning_rate": 1.9836417450403978e-05, |
| "loss": 1.172606110572815, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.5787234042553191, |
| "grad_norm": 0.27221980690956116, |
| "learning_rate": 1.9483666141840615e-05, |
| "loss": 0.9525983929634094, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.5872340425531914, |
| "grad_norm": 0.45265427231788635, |
| "learning_rate": 1.9129141707512508e-05, |
| "loss": 0.6214677095413208, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.5957446808510638, |
| "grad_norm": 0.27331340312957764, |
| "learning_rate": 1.8773125575515364e-05, |
| "loss": 1.0034734010696411, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.6042553191489362, |
| "grad_norm": 0.5351057648658752, |
| "learning_rate": 1.8415900358082268e-05, |
| "loss": 1.2574905157089233, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.6127659574468085, |
| "grad_norm": 2.15321946144104, |
| "learning_rate": 1.805774962724083e-05, |
| "loss": 1.039708137512207, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.6212765957446809, |
| "grad_norm": 0.19406694173812866, |
| "learning_rate": 1.7698957689708426e-05, |
| "loss": 1.1023411750793457, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.6297872340425532, |
| "grad_norm": 0.28949815034866333, |
| "learning_rate": 1.7339809361204252e-05, |
| "loss": 1.2206919193267822, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.6382978723404256, |
| "grad_norm": 0.17495916783809662, |
| "learning_rate": 1.6980589740357294e-05, |
| "loss": 1.327172875404358, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.6468085106382979, |
| "grad_norm": 0.3632895052433014, |
| "learning_rate": 1.6621583982389707e-05, |
| "loss": 1.3268358707427979, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.6553191489361702, |
| "grad_norm": 0.3797944486141205, |
| "learning_rate": 1.6263077072755326e-05, |
| "loss": 0.7203034162521362, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.6638297872340425, |
| "grad_norm": 0.3124215602874756, |
| "learning_rate": 1.5905353600912898e-05, |
| "loss": 0.9463112354278564, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.6723404255319149, |
| "grad_norm": 0.20629045367240906, |
| "learning_rate": 1.5548697534413646e-05, |
| "loss": 0.9798778891563416, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.6808510638297872, |
| "grad_norm": 0.17236708104610443, |
| "learning_rate": 1.5193391993482582e-05, |
| "loss": 1.1495308876037598, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.6893617021276596, |
| "grad_norm": 0.17647439241409302, |
| "learning_rate": 1.4839719026272377e-05, |
| "loss": 1.0284472703933716, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.6978723404255319, |
| "grad_norm": 0.21520790457725525, |
| "learning_rate": 1.4487959384968272e-05, |
| "loss": 0.8173097968101501, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.7063829787234043, |
| "grad_norm": 0.22225292026996613, |
| "learning_rate": 1.4138392302921813e-05, |
| "loss": 0.994484007358551, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.7148936170212766, |
| "grad_norm": 0.8280470967292786, |
| "learning_rate": 1.3791295272990175e-05, |
| "loss": 1.1443301439285278, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.723404255319149, |
| "grad_norm": 0.3155645430088043, |
| "learning_rate": 1.344694382725718e-05, |
| "loss": 1.0067280530929565, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.7319148936170212, |
| "grad_norm": 0.269070029258728, |
| "learning_rate": 1.3105611318310818e-05, |
| "loss": 0.9691381454467773, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.7404255319148936, |
| "grad_norm": 0.14885057508945465, |
| "learning_rate": 1.2767568702250844e-05, |
| "loss": 1.0925947427749634, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.7489361702127659, |
| "grad_norm": 1.3091343641281128, |
| "learning_rate": 1.2433084323598791e-05, |
| "loss": 0.6399118304252625, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.7574468085106383, |
| "grad_norm": 0.5745394825935364, |
| "learning_rate": 1.2102423702281116e-05, |
| "loss": 1.0284110307693481, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.7659574468085106, |
| "grad_norm": 0.4978311061859131, |
| "learning_rate": 1.1775849322854516e-05, |
| "loss": 1.0611108541488647, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.774468085106383, |
| "grad_norm": 0.3034535050392151, |
| "learning_rate": 1.1453620426140795e-05, |
| "loss": 0.7678611278533936, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.7829787234042553, |
| "grad_norm": 0.1695900857448578, |
| "learning_rate": 1.1135992803436695e-05, |
| "loss": 0.954198956489563, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.7914893617021277, |
| "grad_norm": 0.1768021285533905, |
| "learning_rate": 1.0823218593461992e-05, |
| "loss": 1.0221017599105835, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.22744205594062805, |
| "learning_rate": 1.0515546082207097e-05, |
| "loss": 0.7189929485321045, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.8085106382978723, |
| "grad_norm": 0.3399372100830078, |
| "learning_rate": 1.0213219505838983e-05, |
| "loss": 1.0886459350585938, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.8170212765957446, |
| "grad_norm": 0.17291869223117828, |
| "learning_rate": 9.91647885682201e-06, |
| "loss": 1.2072498798370361, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.825531914893617, |
| "grad_norm": 0.19541242718696594, |
| "learning_rate": 9.625559693407413e-06, |
| "loss": 1.1986221075057983, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.8340425531914893, |
| "grad_norm": 0.14983496069908142, |
| "learning_rate": 9.340692952642789e-06, |
| "loss": 0.720963180065155, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.8425531914893617, |
| "grad_norm": 0.22478389739990234, |
| "learning_rate": 9.062104767049956e-06, |
| "loss": 1.1722772121429443, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.851063829787234, |
| "grad_norm": 0.22518520057201385, |
| "learning_rate": 8.790016285116763e-06, |
| "loss": 1.0002360343933105, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.8595744680851064, |
| "grad_norm": 0.21948722004890442, |
| "learning_rate": 8.524643495745306e-06, |
| "loss": 0.9713231325149536, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.8680851063829788, |
| "grad_norm": 0.15189974009990692, |
| "learning_rate": 8.26619705679589e-06, |
| "loss": 1.3201754093170166, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.8765957446808511, |
| "grad_norm": 0.24632404744625092, |
| "learning_rate": 8.014882127862923e-06, |
| "loss": 1.282568335533142, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.8851063829787233, |
| "grad_norm": 0.13049790263175964, |
| "learning_rate": 7.770898207415416e-06, |
| "loss": 0.9211198091506958, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.8936170212765957, |
| "grad_norm": 0.19982703030109406, |
| "learning_rate": 7.534438974431351e-06, |
| "loss": 1.167230248451233, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.902127659574468, |
| "grad_norm": 0.18156394362449646, |
| "learning_rate": 7.305692134651742e-06, |
| "loss": 0.7129737138748169, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.9106382978723404, |
| "grad_norm": 0.26700806617736816, |
| "learning_rate": 7.084839271576291e-06, |
| "loss": 0.8392055034637451, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.9191489361702128, |
| "grad_norm": 0.1967747062444687, |
| "learning_rate": 6.872055702319054e-06, |
| "loss": 0.5692006349563599, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.9276595744680851, |
| "grad_norm": 0.1744726002216339, |
| "learning_rate": 6.667510338438419e-06, |
| "loss": 1.1953270435333252, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.9361702127659575, |
| "grad_norm": 0.38672754168510437, |
| "learning_rate": 6.471365551852012e-06, |
| "loss": 0.3942694067955017, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.9446808510638298, |
| "grad_norm": 0.19522197544574738, |
| "learning_rate": 6.2837770459428e-06, |
| "loss": 1.3081778287887573, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.9531914893617022, |
| "grad_norm": 0.22507460415363312, |
| "learning_rate": 6.1048937319588676e-06, |
| "loss": 1.3682390451431274, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.9617021276595744, |
| "grad_norm": 0.303733766078949, |
| "learning_rate": 5.9348576108049065e-06, |
| "loss": 0.894014835357666, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.9702127659574468, |
| "grad_norm": 0.205647811293602, |
| "learning_rate": 5.773803660319234e-06, |
| "loss": 1.0522630214691162, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.9787234042553191, |
| "grad_norm": 0.21294599771499634, |
| "learning_rate": 5.621859728125884e-06, |
| "loss": 1.2654610872268677, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.9872340425531915, |
| "grad_norm": 0.3174782395362854, |
| "learning_rate": 5.479146430146781e-06, |
| "loss": 1.0047165155410767, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.9957446808510638, |
| "grad_norm": 1.1156642436981201, |
| "learning_rate": 5.345777054854579e-06, |
| "loss": 0.9647661447525024, |
| "step": 234 |
| }, |
| { |
| "epoch": 1.004255319148936, |
| "grad_norm": 0.3331040143966675, |
| "learning_rate": 5.221857473342149e-06, |
| "loss": 0.9735086560249329, |
| "step": 236 |
| }, |
| { |
| "epoch": 1.0127659574468084, |
| "grad_norm": 0.18369407951831818, |
| "learning_rate": 5.10748605528015e-06, |
| "loss": 0.8922760486602783, |
| "step": 238 |
| }, |
| { |
| "epoch": 1.0212765957446808, |
| "grad_norm": 0.08912570774555206, |
| "learning_rate": 5.002753590829349e-06, |
| "loss": 0.6222575306892395, |
| "step": 240 |
| }, |
| { |
| "epoch": 1.0297872340425531, |
| "grad_norm": 0.15733036398887634, |
| "learning_rate": 4.9077432185697e-06, |
| "loss": 0.8671458959579468, |
| "step": 242 |
| }, |
| { |
| "epoch": 1.0382978723404255, |
| "grad_norm": 0.22573904693126678, |
| "learning_rate": 4.822530359503393e-06, |
| "loss": 0.9555956721305847, |
| "step": 244 |
| }, |
| { |
| "epoch": 1.0468085106382978, |
| "grad_norm": 0.13159972429275513, |
| "learning_rate": 4.747182657184251e-06, |
| "loss": 0.6556552052497864, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.0553191489361702, |
| "grad_norm": 0.19305525720119476, |
| "learning_rate": 4.681759924021033e-06, |
| "loss": 0.7056201100349426, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.0638297872340425, |
| "grad_norm": 0.15075285732746124, |
| "learning_rate": 4.626314093797213e-06, |
| "loss": 0.6993778347969055, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.0723404255319149, |
| "grad_norm": 0.2154897153377533, |
| "learning_rate": 4.580889180444988e-06, |
| "loss": 0.5365909337997437, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.0808510638297872, |
| "grad_norm": 0.17837639153003693, |
| "learning_rate": 4.545521243106197e-06, |
| "loss": 0.8809426426887512, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.0893617021276596, |
| "grad_norm": 0.13702534139156342, |
| "learning_rate": 4.520238357507899e-06, |
| "loss": 0.8317233324050903, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.097872340425532, |
| "grad_norm": 0.23964199423789978, |
| "learning_rate": 4.505060593675342e-06, |
| "loss": 0.9430326819419861, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.1063829787234043, |
| "grad_norm": 0.2120589166879654, |
| "learning_rate": 4.5e-06, |
| "loss": 0.7370553612709045, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.1148936170212767, |
| "grad_norm": 0.21252022683620453, |
| "learning_rate": 4.505060593675342e-06, |
| "loss": 0.7691786289215088, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.123404255319149, |
| "grad_norm": 0.17547725141048431, |
| "learning_rate": 4.520238357507899e-06, |
| "loss": 0.7104212045669556, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.1319148936170214, |
| "grad_norm": 0.1989092081785202, |
| "learning_rate": 4.545521243106197e-06, |
| "loss": 0.8642681837081909, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.1404255319148937, |
| "grad_norm": 0.17653805017471313, |
| "learning_rate": 4.580889180444988e-06, |
| "loss": 0.770044207572937, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.148936170212766, |
| "grad_norm": 0.18561692535877228, |
| "learning_rate": 4.6263140937972124e-06, |
| "loss": 0.9661275148391724, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.1574468085106382, |
| "grad_norm": 0.2783913314342499, |
| "learning_rate": 4.6817599240210315e-06, |
| "loss": 0.6838027238845825, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.1659574468085105, |
| "grad_norm": 0.17354853451251984, |
| "learning_rate": 4.747182657184251e-06, |
| "loss": 0.8435700535774231, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.174468085106383, |
| "grad_norm": 0.19762302935123444, |
| "learning_rate": 4.822530359503391e-06, |
| "loss": 0.6485944986343384, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.1829787234042553, |
| "grad_norm": 0.3056742250919342, |
| "learning_rate": 4.9077432185697e-06, |
| "loss": 0.5239860415458679, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.1914893617021276, |
| "grad_norm": 0.14598001539707184, |
| "learning_rate": 5.002753590829348e-06, |
| "loss": 0.524198055267334, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.2, |
| "grad_norm": 0.16965726017951965, |
| "learning_rate": 5.1074860552801466e-06, |
| "loss": 0.8808120489120483, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.2085106382978723, |
| "grad_norm": 0.24560458958148956, |
| "learning_rate": 5.2218574733421455e-06, |
| "loss": 0.6656435132026672, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.2170212765957447, |
| "grad_norm": 0.23104996979236603, |
| "learning_rate": 5.345777054854578e-06, |
| "loss": 0.9856959581375122, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.225531914893617, |
| "grad_norm": 0.16903841495513916, |
| "learning_rate": 5.479146430146783e-06, |
| "loss": 0.8170779943466187, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.2340425531914894, |
| "grad_norm": 0.17953285574913025, |
| "learning_rate": 5.6218597281258834e-06, |
| "loss": 0.44949835538864136, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.2425531914893617, |
| "grad_norm": 0.4803825914859772, |
| "learning_rate": 5.773803660319232e-06, |
| "loss": 0.9233723282814026, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.251063829787234, |
| "grad_norm": 0.15944814682006836, |
| "learning_rate": 5.934857610804904e-06, |
| "loss": 0.846575140953064, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.2595744680851064, |
| "grad_norm": 0.17765574157238007, |
| "learning_rate": 6.1048937319588676e-06, |
| "loss": 0.6937251687049866, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.2680851063829788, |
| "grad_norm": 0.19115029275417328, |
| "learning_rate": 6.283777045942798e-06, |
| "loss": 0.8138055205345154, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.2765957446808511, |
| "grad_norm": 0.2789772152900696, |
| "learning_rate": 6.471365551852011e-06, |
| "loss": 0.9583751559257507, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.2851063829787235, |
| "grad_norm": 0.15571321547031403, |
| "learning_rate": 6.66751033843842e-06, |
| "loss": 0.38521289825439453, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.2936170212765958, |
| "grad_norm": 0.17477093636989594, |
| "learning_rate": 6.872055702319048e-06, |
| "loss": 0.9158145189285278, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.302127659574468, |
| "grad_norm": 0.18948765099048615, |
| "learning_rate": 7.084839271576289e-06, |
| "loss": 0.9535613059997559, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.3106382978723405, |
| "grad_norm": 0.4815928637981415, |
| "learning_rate": 7.30569213465174e-06, |
| "loss": 0.5246633887290955, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.3191489361702127, |
| "grad_norm": 0.4748033881187439, |
| "learning_rate": 7.534438974431351e-06, |
| "loss": 0.5024292469024658, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.327659574468085, |
| "grad_norm": 0.2643539607524872, |
| "learning_rate": 7.770898207415414e-06, |
| "loss": 0.5716067552566528, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.3361702127659574, |
| "grad_norm": 0.2274530827999115, |
| "learning_rate": 8.014882127862923e-06, |
| "loss": 0.5070632100105286, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.3446808510638297, |
| "grad_norm": 0.2692515254020691, |
| "learning_rate": 8.266197056795886e-06, |
| "loss": 0.6399731040000916, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.353191489361702, |
| "grad_norm": 0.17816181480884552, |
| "learning_rate": 8.5246434957453e-06, |
| "loss": 0.5720309019088745, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.3617021276595744, |
| "grad_norm": 0.427095890045166, |
| "learning_rate": 8.790016285116766e-06, |
| "loss": 0.49866631627082825, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.3702127659574468, |
| "grad_norm": 0.2689511179924011, |
| "learning_rate": 9.062104767049957e-06, |
| "loss": 0.8831391334533691, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.3787234042553191, |
| "grad_norm": 0.2776535451412201, |
| "learning_rate": 9.340692952642787e-06, |
| "loss": 0.6558242440223694, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.3872340425531915, |
| "grad_norm": 0.18160879611968994, |
| "learning_rate": 9.625559693407413e-06, |
| "loss": 0.8320285677909851, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.3957446808510638, |
| "grad_norm": 0.2418782263994217, |
| "learning_rate": 9.916478856822006e-06, |
| "loss": 0.6791567802429199, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.4042553191489362, |
| "grad_norm": 0.30612000823020935, |
| "learning_rate": 1.0213219505838985e-05, |
| "loss": 0.7776432633399963, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.4127659574468086, |
| "grad_norm": 0.2482786476612091, |
| "learning_rate": 1.0515546082207094e-05, |
| "loss": 0.8018679022789001, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.421276595744681, |
| "grad_norm": 0.2703081965446472, |
| "learning_rate": 1.082321859346199e-05, |
| "loss": 0.6729298830032349, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.4297872340425533, |
| "grad_norm": 0.21901671588420868, |
| "learning_rate": 1.1135992803436696e-05, |
| "loss": 0.6063901782035828, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.4382978723404256, |
| "grad_norm": 0.20921635627746582, |
| "learning_rate": 1.1453620426140791e-05, |
| "loss": 0.6617997288703918, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.4468085106382977, |
| "grad_norm": 0.19303618371486664, |
| "learning_rate": 1.1775849322854508e-05, |
| "loss": 0.5969347953796387, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.4553191489361703, |
| "grad_norm": 0.3104172646999359, |
| "learning_rate": 1.210242370228112e-05, |
| "loss": 0.6071085333824158, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.4638297872340424, |
| "grad_norm": 0.22875197231769562, |
| "learning_rate": 1.2433084323598791e-05, |
| "loss": 0.9178237915039062, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.472340425531915, |
| "grad_norm": 0.19736064970493317, |
| "learning_rate": 1.2767568702250838e-05, |
| "loss": 0.8496959209442139, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.4808510638297872, |
| "grad_norm": 0.24981622397899628, |
| "learning_rate": 1.3105611318310811e-05, |
| "loss": 0.7014995813369751, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.4893617021276595, |
| "grad_norm": 0.2884751558303833, |
| "learning_rate": 1.3446943827257184e-05, |
| "loss": 0.7825354337692261, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.4978723404255319, |
| "grad_norm": 0.42386531829833984, |
| "learning_rate": 1.3791295272990172e-05, |
| "loss": 0.33443060517311096, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.5063829787234042, |
| "grad_norm": 0.16379332542419434, |
| "learning_rate": 1.413839230292182e-05, |
| "loss": 0.6189748048782349, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.5148936170212766, |
| "grad_norm": 0.2462461143732071, |
| "learning_rate": 1.4487959384968276e-05, |
| "loss": 0.46220940351486206, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.523404255319149, |
| "grad_norm": 0.23771870136260986, |
| "learning_rate": 1.4839719026272373e-05, |
| "loss": 0.4545080363750458, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.5319148936170213, |
| "grad_norm": 0.18759848177433014, |
| "learning_rate": 1.5193391993482579e-05, |
| "loss": 0.9656761288642883, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.5404255319148936, |
| "grad_norm": 0.18730829656124115, |
| "learning_rate": 1.5548697534413636e-05, |
| "loss": 0.44390690326690674, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.548936170212766, |
| "grad_norm": 0.3202720582485199, |
| "learning_rate": 1.5905353600912894e-05, |
| "loss": 0.9684057235717773, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.5574468085106383, |
| "grad_norm": 0.13575418293476105, |
| "learning_rate": 1.6263077072755323e-05, |
| "loss": 0.8047466278076172, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.5659574468085107, |
| "grad_norm": 0.36194950342178345, |
| "learning_rate": 1.6621583982389707e-05, |
| "loss": 0.6687026619911194, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.574468085106383, |
| "grad_norm": 0.12254664301872253, |
| "learning_rate": 1.6980589740357294e-05, |
| "loss": 0.5913136601448059, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.5829787234042554, |
| "grad_norm": 0.13009262084960938, |
| "learning_rate": 1.733980936120425e-05, |
| "loss": 0.7752522230148315, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.5914893617021275, |
| "grad_norm": 0.5959559679031372, |
| "learning_rate": 1.7698957689708416e-05, |
| "loss": 0.9753284454345703, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.6, |
| "grad_norm": 0.5740659832954407, |
| "learning_rate": 1.805774962724083e-05, |
| "loss": 0.7318065762519836, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.6085106382978722, |
| "grad_norm": 0.5994616746902466, |
| "learning_rate": 1.8415900358082265e-05, |
| "loss": 0.5247442126274109, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.6170212765957448, |
| "grad_norm": 0.27797627449035645, |
| "learning_rate": 1.8773125575515357e-05, |
| "loss": 0.5938118696212769, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.625531914893617, |
| "grad_norm": 0.16901274025440216, |
| "learning_rate": 1.912914170751251e-05, |
| "loss": 0.761189341545105, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.6340425531914895, |
| "grad_norm": 0.2777624726295471, |
| "learning_rate": 1.9483666141840612e-05, |
| "loss": 0.9914259910583496, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.6425531914893616, |
| "grad_norm": 0.3253246247768402, |
| "learning_rate": 1.9836417450403974e-05, |
| "loss": 0.8401809930801392, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.6510638297872342, |
| "grad_norm": 0.16334128379821777, |
| "learning_rate": 2.0187115612647133e-05, |
| "loss": 0.694261372089386, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.6595744680851063, |
| "grad_norm": 0.16701728105545044, |
| "learning_rate": 2.05354822378404e-05, |
| "loss": 0.8252889513969421, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.6680851063829787, |
| "grad_norm": 0.10444586724042892, |
| "learning_rate": 2.0881240786071595e-05, |
| "loss": 0.8890981078147888, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.676595744680851, |
| "grad_norm": 0.4691241681575775, |
| "learning_rate": 2.122411678776854e-05, |
| "loss": 0.6046314835548401, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.6851063829787234, |
| "grad_norm": 0.11681725829839706, |
| "learning_rate": 2.1563838061578258e-05, |
| "loss": 0.8832032084465027, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.6936170212765957, |
| "grad_norm": 0.14530058205127716, |
| "learning_rate": 2.1900134930429396e-05, |
| "loss": 1.0986517667770386, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.702127659574468, |
| "grad_norm": 0.37800365686416626, |
| "learning_rate": 2.223274043560706e-05, |
| "loss": 0.9784438610076904, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.7106382978723405, |
| "grad_norm": 0.37573498487472534, |
| "learning_rate": 2.256139054866954e-05, |
| "loss": 0.47277578711509705, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.7191489361702128, |
| "grad_norm": 0.5493515729904175, |
| "learning_rate": 2.2885824381039028e-05, |
| "loss": 0.6571931838989258, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.7276595744680852, |
| "grad_norm": 0.24701279401779175, |
| "learning_rate": 2.3205784391099808e-05, |
| "loss": 0.5199674367904663, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.7361702127659573, |
| "grad_norm": 0.2061431109905243, |
| "learning_rate": 2.352101658863958e-05, |
| "loss": 0.7460405230522156, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.7446808510638299, |
| "grad_norm": 0.16800709068775177, |
| "learning_rate": 2.3831270736471703e-05, |
| "loss": 0.9135586023330688, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.753191489361702, |
| "grad_norm": 0.5365346074104309, |
| "learning_rate": 2.4136300549077973e-05, |
| "loss": 0.906153678894043, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.7617021276595746, |
| "grad_norm": 0.14748896658420563, |
| "learning_rate": 2.443586388811481e-05, |
| "loss": 0.7806675434112549, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.7702127659574467, |
| "grad_norm": 1.3085377216339111, |
| "learning_rate": 2.47297229546271e-05, |
| "loss": 0.6440402865409851, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.7787234042553193, |
| "grad_norm": 0.2483910322189331, |
| "learning_rate": 2.5017644477817418e-05, |
| "loss": 0.7717803120613098, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.7872340425531914, |
| "grad_norm": 0.4905950427055359, |
| "learning_rate": 2.5299399900220807e-05, |
| "loss": 0.6290927529335022, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.795744680851064, |
| "grad_norm": 0.11053664982318878, |
| "learning_rate": 2.5574765559137848e-05, |
| "loss": 0.7530769109725952, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.804255319148936, |
| "grad_norm": 0.11755101382732391, |
| "learning_rate": 2.584352286418239e-05, |
| "loss": 0.8429850935935974, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.8127659574468085, |
| "grad_norm": 0.14117616415023804, |
| "learning_rate": 2.6105458470802563e-05, |
| "loss": 0.8456153869628906, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.8212765957446808, |
| "grad_norm": 0.22968512773513794, |
| "learning_rate": 2.6360364449637683e-05, |
| "loss": 0.8607503771781921, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.8297872340425532, |
| "grad_norm": 0.3352075219154358, |
| "learning_rate": 2.6608038451576528e-05, |
| "loss": 0.9603878259658813, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.8382978723404255, |
| "grad_norm": 0.22618667781352997, |
| "learning_rate": 2.684828386838569e-05, |
| "loss": 0.7366024255752563, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.8468085106382979, |
| "grad_norm": 0.3134029805660248, |
| "learning_rate": 2.7080909988780982e-05, |
| "loss": 0.9086182117462158, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.8553191489361702, |
| "grad_norm": 0.12940463423728943, |
| "learning_rate": 2.7305732149817502e-05, |
| "loss": 0.642800509929657, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.8638297872340426, |
| "grad_norm": 0.3360910415649414, |
| "learning_rate": 2.7522571883478617e-05, |
| "loss": 1.0462186336517334, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.872340425531915, |
| "grad_norm": 0.18832530081272125, |
| "learning_rate": 2.77312570583471e-05, |
| "loss": 0.9516323804855347, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.8808510638297873, |
| "grad_norm": 0.12196290493011475, |
| "learning_rate": 2.7931622016246304e-05, |
| "loss": 0.9153946042060852, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.8893617021276596, |
| "grad_norm": 0.1658358871936798, |
| "learning_rate": 2.8123507703742727e-05, |
| "loss": 0.8443633913993835, |
| "step": 444 |
| }, |
| { |
| "epoch": 1.8978723404255318, |
| "grad_norm": 0.19309784471988678, |
| "learning_rate": 2.8306761798405522e-05, |
| "loss": 1.0077768564224243, |
| "step": 446 |
| }, |
| { |
| "epoch": 1.9063829787234043, |
| "grad_norm": 0.1583261489868164, |
| "learning_rate": 2.848123882972295e-05, |
| "loss": 0.9472059607505798, |
| "step": 448 |
| }, |
| { |
| "epoch": 1.9148936170212765, |
| "grad_norm": 0.5723341107368469, |
| "learning_rate": 2.8646800294579514e-05, |
| "loss": 0.6875810623168945, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.923404255319149, |
| "grad_norm": 0.339374303817749, |
| "learning_rate": 2.8803314767202376e-05, |
| "loss": 0.5654004812240601, |
| "step": 452 |
| }, |
| { |
| "epoch": 1.9319148936170212, |
| "grad_norm": 0.5859424471855164, |
| "learning_rate": 2.8950658003489534e-05, |
| "loss": 0.8492231965065002, |
| "step": 454 |
| }, |
| { |
| "epoch": 1.9404255319148938, |
| "grad_norm": 0.422346830368042, |
| "learning_rate": 2.908871303963711e-05, |
| "loss": 0.9699692130088806, |
| "step": 456 |
| }, |
| { |
| "epoch": 1.9489361702127659, |
| "grad_norm": 0.1916320025920868, |
| "learning_rate": 2.9217370284987434e-05, |
| "loss": 0.7598567605018616, |
| "step": 458 |
| }, |
| { |
| "epoch": 1.9574468085106385, |
| "grad_norm": 0.2646377384662628, |
| "learning_rate": 2.9336527609024075e-05, |
| "loss": 1.0349591970443726, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.9659574468085106, |
| "grad_norm": 0.19794800877571106, |
| "learning_rate": 2.9446090422445016e-05, |
| "loss": 0.7465431094169617, |
| "step": 462 |
| }, |
| { |
| "epoch": 1.974468085106383, |
| "grad_norm": 0.11537999659776688, |
| "learning_rate": 2.9545971752249376e-05, |
| "loss": 0.8450900316238403, |
| "step": 464 |
| }, |
| { |
| "epoch": 1.9829787234042553, |
| "grad_norm": 0.16437967121601105, |
| "learning_rate": 2.9636092310778195e-05, |
| "loss": 0.8825825452804565, |
| "step": 466 |
| }, |
| { |
| "epoch": 1.9914893617021276, |
| "grad_norm": 0.1353980153799057, |
| "learning_rate": 2.9716380558654445e-05, |
| "loss": 0.6045427918434143, |
| "step": 468 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.12124238908290863, |
| "learning_rate": 2.9786772761572338e-05, |
| "loss": 0.6623026728630066, |
| "step": 470 |
| }, |
| { |
| "epoch": 2.008510638297872, |
| "grad_norm": 0.24585317075252533, |
| "learning_rate": 2.9847213040890793e-05, |
| "loss": 0.36443179845809937, |
| "step": 472 |
| }, |
| { |
| "epoch": 2.0170212765957447, |
| "grad_norm": 0.13654032349586487, |
| "learning_rate": 2.989765341799095e-05, |
| "loss": 0.75254225730896, |
| "step": 474 |
| }, |
| { |
| "epoch": 2.025531914893617, |
| "grad_norm": 0.5691254138946533, |
| "learning_rate": 2.9938053852362484e-05, |
| "loss": 0.47092172503471375, |
| "step": 476 |
| }, |
| { |
| "epoch": 2.0340425531914894, |
| "grad_norm": 0.1691976934671402, |
| "learning_rate": 2.99683822733885e-05, |
| "loss": 0.6583607196807861, |
| "step": 478 |
| }, |
| { |
| "epoch": 2.0425531914893615, |
| "grad_norm": 0.16407643258571625, |
| "learning_rate": 2.9988614605803806e-05, |
| "loss": 0.5634033679962158, |
| "step": 480 |
| }, |
| { |
| "epoch": 2.051063829787234, |
| "grad_norm": 0.22440628707408905, |
| "learning_rate": 2.9998734788806287e-05, |
| "loss": 0.4582347571849823, |
| "step": 482 |
| }, |
| { |
| "epoch": 2.0595744680851062, |
| "grad_norm": 0.07732467353343964, |
| "learning_rate": 2.9998734788806287e-05, |
| "loss": 0.24273234605789185, |
| "step": 484 |
| }, |
| { |
| "epoch": 2.068085106382979, |
| "grad_norm": 0.46668562293052673, |
| "learning_rate": 2.9988614605803806e-05, |
| "loss": 0.6905072331428528, |
| "step": 486 |
| }, |
| { |
| "epoch": 2.076595744680851, |
| "grad_norm": 0.14304345846176147, |
| "learning_rate": 2.99683822733885e-05, |
| "loss": 0.4559966027736664, |
| "step": 488 |
| }, |
| { |
| "epoch": 2.0851063829787235, |
| "grad_norm": 0.370118111371994, |
| "learning_rate": 2.9938053852362484e-05, |
| "loss": 0.5135046243667603, |
| "step": 490 |
| }, |
| { |
| "epoch": 2.0936170212765957, |
| "grad_norm": 0.15926745533943176, |
| "learning_rate": 2.989765341799095e-05, |
| "loss": 0.7977150678634644, |
| "step": 492 |
| }, |
| { |
| "epoch": 2.1021276595744682, |
| "grad_norm": 0.13238710165023804, |
| "learning_rate": 2.9847213040890793e-05, |
| "loss": 0.6057368516921997, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.1106382978723404, |
| "grad_norm": 0.18792684376239777, |
| "learning_rate": 2.9786772761572338e-05, |
| "loss": 0.7654038071632385, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.119148936170213, |
| "grad_norm": 0.12919172644615173, |
| "learning_rate": 2.9716380558654445e-05, |
| "loss": 0.8083907961845398, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.127659574468085, |
| "grad_norm": 0.21281176805496216, |
| "learning_rate": 2.9636092310778195e-05, |
| "loss": 0.5940195322036743, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.1361702127659576, |
| "grad_norm": 0.1665232926607132, |
| "learning_rate": 2.9545971752249383e-05, |
| "loss": 0.5825145244598389, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.1446808510638298, |
| "grad_norm": 0.11076656728982925, |
| "learning_rate": 2.9446090422445016e-05, |
| "loss": 0.5060111284255981, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.153191489361702, |
| "grad_norm": 0.123874731361866, |
| "learning_rate": 2.933652760902408e-05, |
| "loss": 0.5895789265632629, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.1617021276595745, |
| "grad_norm": 0.24894492328166962, |
| "learning_rate": 2.9217370284987434e-05, |
| "loss": 0.3788684010505676, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.1702127659574466, |
| "grad_norm": 0.1470797210931778, |
| "learning_rate": 2.908871303963712e-05, |
| "loss": 0.3065789043903351, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.178723404255319, |
| "grad_norm": 0.2271808236837387, |
| "learning_rate": 2.8950658003489534e-05, |
| "loss": 0.3498597741127014, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.1872340425531913, |
| "grad_norm": 0.2549133896827698, |
| "learning_rate": 2.8803314767202387e-05, |
| "loss": 0.3588785231113434, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.195744680851064, |
| "grad_norm": 0.27323317527770996, |
| "learning_rate": 2.864680029457952e-05, |
| "loss": 0.4839470088481903, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.204255319148936, |
| "grad_norm": 0.7197582721710205, |
| "learning_rate": 2.8481238829722946e-05, |
| "loss": 0.6455433368682861, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.2127659574468086, |
| "grad_norm": 0.16234466433525085, |
| "learning_rate": 2.8306761798405533e-05, |
| "loss": 0.5413359999656677, |
| "step": 520 |
| }, |
| { |
| "epoch": 2.2212765957446807, |
| "grad_norm": 0.21601392328739166, |
| "learning_rate": 2.812350770374273e-05, |
| "loss": 0.33399829268455505, |
| "step": 522 |
| }, |
| { |
| "epoch": 2.2297872340425533, |
| "grad_norm": 0.3286589980125427, |
| "learning_rate": 2.793162201624631e-05, |
| "loss": 0.4370644688606262, |
| "step": 524 |
| }, |
| { |
| "epoch": 2.2382978723404254, |
| "grad_norm": 0.28274786472320557, |
| "learning_rate": 2.7731257058347098e-05, |
| "loss": 0.6098963618278503, |
| "step": 526 |
| }, |
| { |
| "epoch": 2.246808510638298, |
| "grad_norm": 0.1321384608745575, |
| "learning_rate": 2.752257188347862e-05, |
| "loss": 0.6518822908401489, |
| "step": 528 |
| }, |
| { |
| "epoch": 2.25531914893617, |
| "grad_norm": 0.4378323256969452, |
| "learning_rate": 2.7305732149817512e-05, |
| "loss": 0.6883822679519653, |
| "step": 530 |
| }, |
| { |
| "epoch": 2.2638297872340427, |
| "grad_norm": 0.12180285900831223, |
| "learning_rate": 2.708090998878098e-05, |
| "loss": 0.5799202919006348, |
| "step": 532 |
| }, |
| { |
| "epoch": 2.272340425531915, |
| "grad_norm": 0.2532312572002411, |
| "learning_rate": 2.68482838683857e-05, |
| "loss": 0.30189117789268494, |
| "step": 534 |
| }, |
| { |
| "epoch": 2.2808510638297874, |
| "grad_norm": 0.1414760947227478, |
| "learning_rate": 2.6608038451576528e-05, |
| "loss": 0.5066512823104858, |
| "step": 536 |
| }, |
| { |
| "epoch": 2.2893617021276595, |
| "grad_norm": 0.15154339373111725, |
| "learning_rate": 2.6360364449637686e-05, |
| "loss": 0.7329890131950378, |
| "step": 538 |
| }, |
| { |
| "epoch": 2.297872340425532, |
| "grad_norm": 0.438917338848114, |
| "learning_rate": 2.6105458470802563e-05, |
| "loss": 0.23619456589221954, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.3063829787234043, |
| "grad_norm": 0.10420854389667511, |
| "learning_rate": 2.5843522864182397e-05, |
| "loss": 0.2426261454820633, |
| "step": 542 |
| }, |
| { |
| "epoch": 2.3148936170212764, |
| "grad_norm": 0.3014877736568451, |
| "learning_rate": 2.557476555913786e-05, |
| "loss": 0.4293208420276642, |
| "step": 544 |
| }, |
| { |
| "epoch": 2.323404255319149, |
| "grad_norm": 0.16749997437000275, |
| "learning_rate": 2.52993999002208e-05, |
| "loss": 0.317562460899353, |
| "step": 546 |
| }, |
| { |
| "epoch": 2.331914893617021, |
| "grad_norm": 0.5269374847412109, |
| "learning_rate": 2.501764447781743e-05, |
| "loss": 0.7331305742263794, |
| "step": 548 |
| }, |
| { |
| "epoch": 2.3404255319148937, |
| "grad_norm": 0.5243010520935059, |
| "learning_rate": 2.4729722954627106e-05, |
| "loss": 0.4494149088859558, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.348936170212766, |
| "grad_norm": 0.16172371804714203, |
| "learning_rate": 2.4435863888114814e-05, |
| "loss": 0.5373607873916626, |
| "step": 552 |
| }, |
| { |
| "epoch": 2.3574468085106384, |
| "grad_norm": 0.13132323324680328, |
| "learning_rate": 2.413630054907798e-05, |
| "loss": 0.7187270522117615, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.3659574468085105, |
| "grad_norm": 0.16762499511241913, |
| "learning_rate": 2.383127073647171e-05, |
| "loss": 0.5384994745254517, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.374468085106383, |
| "grad_norm": 0.19120921194553375, |
| "learning_rate": 2.3521016588639598e-05, |
| "loss": 0.5281667709350586, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.382978723404255, |
| "grad_norm": 0.12356223165988922, |
| "learning_rate": 2.32057843910998e-05, |
| "loss": 0.47801631689071655, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.391489361702128, |
| "grad_norm": 0.11622758209705353, |
| "learning_rate": 2.2885824381039024e-05, |
| "loss": 0.5657482147216797, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.4, |
| "grad_norm": 0.18093258142471313, |
| "learning_rate": 2.2561390548669552e-05, |
| "loss": 0.43637847900390625, |
| "step": 564 |
| }, |
| { |
| "epoch": 2.4085106382978725, |
| "grad_norm": 0.1480812132358551, |
| "learning_rate": 2.2232740435607067e-05, |
| "loss": 0.4490112364292145, |
| "step": 566 |
| }, |
| { |
| "epoch": 2.4170212765957446, |
| "grad_norm": 0.3488391935825348, |
| "learning_rate": 2.1900134930429403e-05, |
| "loss": 0.5125418901443481, |
| "step": 568 |
| }, |
| { |
| "epoch": 2.425531914893617, |
| "grad_norm": 0.20989084243774414, |
| "learning_rate": 2.1563838061578264e-05, |
| "loss": 0.4947648346424103, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.4340425531914893, |
| "grad_norm": 0.1344413161277771, |
| "learning_rate": 2.122411678776855e-05, |
| "loss": 0.40764039754867554, |
| "step": 572 |
| }, |
| { |
| "epoch": 2.4425531914893615, |
| "grad_norm": 0.5959489345550537, |
| "learning_rate": 2.0881240786071588e-05, |
| "loss": 0.402820885181427, |
| "step": 574 |
| }, |
| { |
| "epoch": 2.451063829787234, |
| "grad_norm": 0.17302733659744263, |
| "learning_rate": 2.0535482237840398e-05, |
| "loss": 0.5862278342247009, |
| "step": 576 |
| }, |
| { |
| "epoch": 2.4595744680851066, |
| "grad_norm": 0.18545182049274445, |
| "learning_rate": 2.018711561264715e-05, |
| "loss": 0.5663528442382812, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.4680851063829787, |
| "grad_norm": 0.1751205325126648, |
| "learning_rate": 1.9836417450403978e-05, |
| "loss": 0.3827342689037323, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.476595744680851, |
| "grad_norm": 0.25646570324897766, |
| "learning_rate": 1.948366614184062e-05, |
| "loss": 0.5597113966941833, |
| "step": 582 |
| }, |
| { |
| "epoch": 2.4851063829787234, |
| "grad_norm": 0.1912815123796463, |
| "learning_rate": 1.9129141707512514e-05, |
| "loss": 0.6160271763801575, |
| "step": 584 |
| }, |
| { |
| "epoch": 2.4936170212765956, |
| "grad_norm": 0.3505466878414154, |
| "learning_rate": 1.877312557551536e-05, |
| "loss": 0.5502729415893555, |
| "step": 586 |
| }, |
| { |
| "epoch": 2.502127659574468, |
| "grad_norm": 0.17846280336380005, |
| "learning_rate": 1.841590035808227e-05, |
| "loss": 0.7185637354850769, |
| "step": 588 |
| }, |
| { |
| "epoch": 2.5106382978723403, |
| "grad_norm": 0.15607386827468872, |
| "learning_rate": 1.8057749627240822e-05, |
| "loss": 0.6139054894447327, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.519148936170213, |
| "grad_norm": 0.13966147601604462, |
| "learning_rate": 1.7698957689708436e-05, |
| "loss": 0.7389253973960876, |
| "step": 592 |
| }, |
| { |
| "epoch": 2.527659574468085, |
| "grad_norm": 0.14507059752941132, |
| "learning_rate": 1.7339809361204255e-05, |
| "loss": 0.35956406593322754, |
| "step": 594 |
| }, |
| { |
| "epoch": 2.5361702127659576, |
| "grad_norm": 0.15395186841487885, |
| "learning_rate": 1.6980589740357287e-05, |
| "loss": 0.5801402926445007, |
| "step": 596 |
| }, |
| { |
| "epoch": 2.5446808510638297, |
| "grad_norm": 0.13444769382476807, |
| "learning_rate": 1.6621583982389714e-05, |
| "loss": 0.4474773108959198, |
| "step": 598 |
| }, |
| { |
| "epoch": 2.5531914893617023, |
| "grad_norm": 0.21395254135131836, |
| "learning_rate": 1.626307707275533e-05, |
| "loss": 0.5679066777229309, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.5617021276595744, |
| "grad_norm": 0.1135779544711113, |
| "learning_rate": 1.59053536009129e-05, |
| "loss": 0.5999655723571777, |
| "step": 602 |
| }, |
| { |
| "epoch": 2.570212765957447, |
| "grad_norm": 0.11653319001197815, |
| "learning_rate": 1.5548697534413653e-05, |
| "loss": 0.577793300151825, |
| "step": 604 |
| }, |
| { |
| "epoch": 2.578723404255319, |
| "grad_norm": 0.1222231388092041, |
| "learning_rate": 1.5193391993482573e-05, |
| "loss": 0.6178088188171387, |
| "step": 606 |
| }, |
| { |
| "epoch": 2.5872340425531917, |
| "grad_norm": 0.1294461488723755, |
| "learning_rate": 1.483971902627239e-05, |
| "loss": 0.2757226824760437, |
| "step": 608 |
| }, |
| { |
| "epoch": 2.595744680851064, |
| "grad_norm": 0.12525388598442078, |
| "learning_rate": 1.4487959384968272e-05, |
| "loss": 0.5488746762275696, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.604255319148936, |
| "grad_norm": 0.3998476564884186, |
| "learning_rate": 1.4138392302921813e-05, |
| "loss": 0.6341690421104431, |
| "step": 612 |
| }, |
| { |
| "epoch": 2.6127659574468085, |
| "grad_norm": 0.1306917667388916, |
| "learning_rate": 1.3791295272990178e-05, |
| "loss": 0.6712806224822998, |
| "step": 614 |
| }, |
| { |
| "epoch": 2.621276595744681, |
| "grad_norm": 0.08778780698776245, |
| "learning_rate": 1.344694382725719e-05, |
| "loss": 0.45013314485549927, |
| "step": 616 |
| }, |
| { |
| "epoch": 2.629787234042553, |
| "grad_norm": 0.2586289048194885, |
| "learning_rate": 1.3105611318310828e-05, |
| "loss": 0.6572415828704834, |
| "step": 618 |
| }, |
| { |
| "epoch": 2.6382978723404253, |
| "grad_norm": 0.10759896785020828, |
| "learning_rate": 1.2767568702250834e-05, |
| "loss": 0.41325774788856506, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.646808510638298, |
| "grad_norm": 0.21708400547504425, |
| "learning_rate": 1.2433084323598806e-05, |
| "loss": 0.4483058750629425, |
| "step": 622 |
| }, |
| { |
| "epoch": 2.65531914893617, |
| "grad_norm": 0.17001259326934814, |
| "learning_rate": 1.2102423702281114e-05, |
| "loss": 0.6922531127929688, |
| "step": 624 |
| }, |
| { |
| "epoch": 2.6638297872340426, |
| "grad_norm": 0.21257923543453217, |
| "learning_rate": 1.1775849322854515e-05, |
| "loss": 0.5280181169509888, |
| "step": 626 |
| }, |
| { |
| "epoch": 2.6723404255319148, |
| "grad_norm": 0.18666359782218933, |
| "learning_rate": 1.1453620426140796e-05, |
| "loss": 0.5663700699806213, |
| "step": 628 |
| }, |
| { |
| "epoch": 2.6808510638297873, |
| "grad_norm": 1.1582063436508179, |
| "learning_rate": 1.1135992803436701e-05, |
| "loss": 0.5217815637588501, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.6893617021276595, |
| "grad_norm": 0.15095171332359314, |
| "learning_rate": 1.0823218593462006e-05, |
| "loss": 0.5002037882804871, |
| "step": 632 |
| }, |
| { |
| "epoch": 2.697872340425532, |
| "grad_norm": 0.258025199174881, |
| "learning_rate": 1.0515546082207089e-05, |
| "loss": 0.7550774812698364, |
| "step": 634 |
| }, |
| { |
| "epoch": 2.706382978723404, |
| "grad_norm": 0.2747920751571655, |
| "learning_rate": 1.0213219505839e-05, |
| "loss": 0.7289509773254395, |
| "step": 636 |
| }, |
| { |
| "epoch": 2.7148936170212767, |
| "grad_norm": 0.10981704294681549, |
| "learning_rate": 9.916478856822011e-06, |
| "loss": 0.4343630075454712, |
| "step": 638 |
| }, |
| { |
| "epoch": 2.723404255319149, |
| "grad_norm": 0.10845521837472916, |
| "learning_rate": 9.625559693407416e-06, |
| "loss": 0.8249025940895081, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.731914893617021, |
| "grad_norm": 0.15325450897216797, |
| "learning_rate": 9.34069295264279e-06, |
| "loss": 0.7887688875198364, |
| "step": 642 |
| }, |
| { |
| "epoch": 2.7404255319148936, |
| "grad_norm": 0.12251801788806915, |
| "learning_rate": 9.062104767049962e-06, |
| "loss": 0.8136211037635803, |
| "step": 644 |
| }, |
| { |
| "epoch": 2.748936170212766, |
| "grad_norm": 0.12305980175733566, |
| "learning_rate": 8.790016285116773e-06, |
| "loss": 0.3323878347873688, |
| "step": 646 |
| }, |
| { |
| "epoch": 2.7574468085106383, |
| "grad_norm": 0.16256316006183624, |
| "learning_rate": 8.5246434957453e-06, |
| "loss": 0.31487664580345154, |
| "step": 648 |
| }, |
| { |
| "epoch": 2.7659574468085104, |
| "grad_norm": 0.29724588990211487, |
| "learning_rate": 8.266197056795886e-06, |
| "loss": 0.43204954266548157, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.774468085106383, |
| "grad_norm": 0.21929951012134552, |
| "learning_rate": 8.014882127862926e-06, |
| "loss": 0.562320351600647, |
| "step": 652 |
| }, |
| { |
| "epoch": 2.7829787234042556, |
| "grad_norm": 0.11705265194177628, |
| "learning_rate": 7.77089820741542e-06, |
| "loss": 0.5608207583427429, |
| "step": 654 |
| }, |
| { |
| "epoch": 2.7914893617021277, |
| "grad_norm": 0.14389561116695404, |
| "learning_rate": 7.534438974431356e-06, |
| "loss": 0.44889208674430847, |
| "step": 656 |
| }, |
| { |
| "epoch": 2.8, |
| "grad_norm": 0.10500220209360123, |
| "learning_rate": 7.305692134651748e-06, |
| "loss": 0.5233709216117859, |
| "step": 658 |
| }, |
| { |
| "epoch": 2.8085106382978724, |
| "grad_norm": 0.11123310774564743, |
| "learning_rate": 7.0848392715763e-06, |
| "loss": 0.41444164514541626, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.8170212765957445, |
| "grad_norm": 0.14375989139080048, |
| "learning_rate": 6.872055702319048e-06, |
| "loss": 0.2203376591205597, |
| "step": 662 |
| }, |
| { |
| "epoch": 2.825531914893617, |
| "grad_norm": 2.4339959621429443, |
| "learning_rate": 6.66751033843842e-06, |
| "loss": 0.4634183943271637, |
| "step": 664 |
| }, |
| { |
| "epoch": 2.8340425531914892, |
| "grad_norm": 0.16652415692806244, |
| "learning_rate": 6.471365551852014e-06, |
| "loss": 0.36938074231147766, |
| "step": 666 |
| }, |
| { |
| "epoch": 2.842553191489362, |
| "grad_norm": 0.13066215813159943, |
| "learning_rate": 6.283777045942801e-06, |
| "loss": 0.5424007177352905, |
| "step": 668 |
| }, |
| { |
| "epoch": 2.851063829787234, |
| "grad_norm": 0.1719757467508316, |
| "learning_rate": 6.104893731958872e-06, |
| "loss": 0.7605326175689697, |
| "step": 670 |
| }, |
| { |
| "epoch": 2.8595744680851065, |
| "grad_norm": 0.15954963862895966, |
| "learning_rate": 5.934857610804912e-06, |
| "loss": 0.4137082099914551, |
| "step": 672 |
| }, |
| { |
| "epoch": 2.8680851063829786, |
| "grad_norm": 0.182533860206604, |
| "learning_rate": 5.773803660319231e-06, |
| "loss": 0.5575495362281799, |
| "step": 674 |
| }, |
| { |
| "epoch": 2.876595744680851, |
| "grad_norm": 0.13435368239879608, |
| "learning_rate": 5.6218597281258834e-06, |
| "loss": 0.3841201961040497, |
| "step": 676 |
| }, |
| { |
| "epoch": 2.8851063829787233, |
| "grad_norm": 0.09731018543243408, |
| "learning_rate": 5.479146430146783e-06, |
| "loss": 0.3572618365287781, |
| "step": 678 |
| }, |
| { |
| "epoch": 2.8936170212765955, |
| "grad_norm": 0.12130768597126007, |
| "learning_rate": 5.3457770548545805e-06, |
| "loss": 0.4105460047721863, |
| "step": 680 |
| }, |
| { |
| "epoch": 2.902127659574468, |
| "grad_norm": 0.4408089518547058, |
| "learning_rate": 5.22185747334215e-06, |
| "loss": 0.5799260139465332, |
| "step": 682 |
| }, |
| { |
| "epoch": 2.9106382978723406, |
| "grad_norm": 0.12350846827030182, |
| "learning_rate": 5.107486055280145e-06, |
| "loss": 0.4056027829647064, |
| "step": 684 |
| }, |
| { |
| "epoch": 2.9191489361702128, |
| "grad_norm": 3.8421390056610107, |
| "learning_rate": 5.002753590829352e-06, |
| "loss": 0.23622764647006989, |
| "step": 686 |
| }, |
| { |
| "epoch": 2.927659574468085, |
| "grad_norm": 0.163557767868042, |
| "learning_rate": 4.9077432185697e-06, |
| "loss": 0.33212676644325256, |
| "step": 688 |
| }, |
| { |
| "epoch": 2.9361702127659575, |
| "grad_norm": 0.143082395195961, |
| "learning_rate": 4.822530359503391e-06, |
| "loss": 0.7329955697059631, |
| "step": 690 |
| }, |
| { |
| "epoch": 2.94468085106383, |
| "grad_norm": 0.2611314058303833, |
| "learning_rate": 4.747182657184251e-06, |
| "loss": 0.43970412015914917, |
| "step": 692 |
| }, |
| { |
| "epoch": 2.953191489361702, |
| "grad_norm": 0.12711837887763977, |
| "learning_rate": 4.681759924021033e-06, |
| "loss": 0.642443835735321, |
| "step": 694 |
| }, |
| { |
| "epoch": 2.9617021276595743, |
| "grad_norm": 0.30099472403526306, |
| "learning_rate": 4.626314093797213e-06, |
| "loss": 0.35701537132263184, |
| "step": 696 |
| }, |
| { |
| "epoch": 2.970212765957447, |
| "grad_norm": 0.13871702551841736, |
| "learning_rate": 4.580889180444988e-06, |
| "loss": 0.48086193203926086, |
| "step": 698 |
| }, |
| { |
| "epoch": 2.978723404255319, |
| "grad_norm": 0.1269788146018982, |
| "learning_rate": 4.5455212431061985e-06, |
| "loss": 0.4837539792060852, |
| "step": 700 |
| }, |
| { |
| "epoch": 2.9872340425531916, |
| "grad_norm": 0.31502285599708557, |
| "learning_rate": 4.520238357507899e-06, |
| "loss": 0.5840956568717957, |
| "step": 702 |
| }, |
| { |
| "epoch": 2.9957446808510637, |
| "grad_norm": 0.1223224624991417, |
| "learning_rate": 4.505060593675342e-06, |
| "loss": 0.34119483828544617, |
| "step": 704 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 705, |
| "total_flos": 2.502111532985352e+18, |
| "train_loss": 0.8120331417161523, |
| "train_runtime": 7663.1544, |
| "train_samples_per_second": 5.888, |
| "train_steps_per_second": 0.092 |
| } |
| ], |
| "logging_steps": 2, |
| "max_steps": 705, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 99999, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.502111532985352e+18, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|