{ "best_metric": null, "best_model_checkpoint": null, "epoch": 1.0, "eval_steps": 500, "global_step": 1881, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.000531632110579479, "grad_norm": 23.579405331833645, "learning_rate": 5.291005291005291e-08, "loss": 1.3792, "step": 1 }, { "epoch": 0.002658160552897395, "grad_norm": 21.819838525914637, "learning_rate": 2.6455026455026455e-07, "loss": 1.3427, "step": 5 }, { "epoch": 0.00531632110579479, "grad_norm": 15.344590621513694, "learning_rate": 5.291005291005291e-07, "loss": 1.299, "step": 10 }, { "epoch": 0.007974481658692184, "grad_norm": 12.131441916410711, "learning_rate": 7.936507936507937e-07, "loss": 1.1542, "step": 15 }, { "epoch": 0.01063264221158958, "grad_norm": 8.98911329195494, "learning_rate": 1.0582010582010582e-06, "loss": 1.0463, "step": 20 }, { "epoch": 0.013290802764486975, "grad_norm": 3.501607778526649, "learning_rate": 1.3227513227513228e-06, "loss": 0.9424, "step": 25 }, { "epoch": 0.01594896331738437, "grad_norm": 3.423489710856073, "learning_rate": 1.5873015873015873e-06, "loss": 0.9018, "step": 30 }, { "epoch": 0.018607123870281767, "grad_norm": 3.001277367108441, "learning_rate": 1.8518518518518519e-06, "loss": 0.8748, "step": 35 }, { "epoch": 0.02126528442317916, "grad_norm": 2.9441592451950505, "learning_rate": 2.1164021164021164e-06, "loss": 0.8816, "step": 40 }, { "epoch": 0.023923444976076555, "grad_norm": 2.9271187812373496, "learning_rate": 2.380952380952381e-06, "loss": 0.8556, "step": 45 }, { "epoch": 0.02658160552897395, "grad_norm": 3.0958894415955793, "learning_rate": 2.6455026455026455e-06, "loss": 0.8353, "step": 50 }, { "epoch": 0.029239766081871343, "grad_norm": 2.922921509451761, "learning_rate": 2.9100529100529103e-06, "loss": 0.823, "step": 55 }, { "epoch": 0.03189792663476874, "grad_norm": 3.135548976301074, "learning_rate": 3.1746031746031746e-06, "loss": 0.8172, "step": 60 }, { "epoch": 0.03455608718766613, "grad_norm": 3.1362315132615355, "learning_rate": 3.4391534391534394e-06, "loss": 0.8077, "step": 65 }, { "epoch": 0.03721424774056353, "grad_norm": 3.025971829033113, "learning_rate": 3.7037037037037037e-06, "loss": 0.7988, "step": 70 }, { "epoch": 0.03987240829346093, "grad_norm": 3.2543761620825373, "learning_rate": 3.968253968253968e-06, "loss": 0.7917, "step": 75 }, { "epoch": 0.04253056884635832, "grad_norm": 2.979399910029388, "learning_rate": 4.232804232804233e-06, "loss": 0.7756, "step": 80 }, { "epoch": 0.045188729399255716, "grad_norm": 3.100212693141578, "learning_rate": 4.497354497354498e-06, "loss": 0.7715, "step": 85 }, { "epoch": 0.04784688995215311, "grad_norm": 3.0260363553205267, "learning_rate": 4.761904761904762e-06, "loss": 0.7827, "step": 90 }, { "epoch": 0.050505050505050504, "grad_norm": 3.5352951332042757, "learning_rate": 5.026455026455027e-06, "loss": 0.7658, "step": 95 }, { "epoch": 0.0531632110579479, "grad_norm": 2.9625298663916593, "learning_rate": 5.291005291005291e-06, "loss": 0.7384, "step": 100 }, { "epoch": 0.05582137161084529, "grad_norm": 2.989013375481517, "learning_rate": 5.555555555555557e-06, "loss": 0.7598, "step": 105 }, { "epoch": 0.05847953216374269, "grad_norm": 3.0615980905383045, "learning_rate": 5.820105820105821e-06, "loss": 0.7485, "step": 110 }, { "epoch": 0.06113769271664009, "grad_norm": 3.125309932477323, "learning_rate": 6.084656084656085e-06, "loss": 0.7404, "step": 115 }, { "epoch": 0.06379585326953748, "grad_norm": 3.2962079296846114, "learning_rate": 6.349206349206349e-06, "loss": 0.7421, "step": 120 }, { "epoch": 0.06645401382243488, "grad_norm": 3.122192839193298, "learning_rate": 6.613756613756615e-06, "loss": 0.73, "step": 125 }, { "epoch": 0.06911217437533226, "grad_norm": 2.9654473645168062, "learning_rate": 6.878306878306879e-06, "loss": 0.7296, "step": 130 }, { "epoch": 0.07177033492822966, "grad_norm": 3.0144620572162224, "learning_rate": 7.1428571428571436e-06, "loss": 0.7389, "step": 135 }, { "epoch": 0.07442849548112707, "grad_norm": 2.906104811780708, "learning_rate": 7.4074074074074075e-06, "loss": 0.7202, "step": 140 }, { "epoch": 0.07708665603402445, "grad_norm": 3.282063025489378, "learning_rate": 7.671957671957672e-06, "loss": 0.7202, "step": 145 }, { "epoch": 0.07974481658692185, "grad_norm": 3.113187144382696, "learning_rate": 7.936507936507936e-06, "loss": 0.7193, "step": 150 }, { "epoch": 0.08240297713981924, "grad_norm": 2.8167569906973875, "learning_rate": 8.201058201058202e-06, "loss": 0.7105, "step": 155 }, { "epoch": 0.08506113769271664, "grad_norm": 2.935672758755336, "learning_rate": 8.465608465608466e-06, "loss": 0.7127, "step": 160 }, { "epoch": 0.08771929824561403, "grad_norm": 3.4898920959480804, "learning_rate": 8.730158730158731e-06, "loss": 0.7252, "step": 165 }, { "epoch": 0.09037745879851143, "grad_norm": 2.7854650304361575, "learning_rate": 8.994708994708995e-06, "loss": 0.7138, "step": 170 }, { "epoch": 0.09303561935140882, "grad_norm": 2.6523273221320127, "learning_rate": 9.25925925925926e-06, "loss": 0.705, "step": 175 }, { "epoch": 0.09569377990430622, "grad_norm": 2.7747422098156074, "learning_rate": 9.523809523809525e-06, "loss": 0.7107, "step": 180 }, { "epoch": 0.09835194045720362, "grad_norm": 3.018940503132491, "learning_rate": 9.788359788359789e-06, "loss": 0.7177, "step": 185 }, { "epoch": 0.10101010101010101, "grad_norm": 2.9774305781097543, "learning_rate": 9.999991381356603e-06, "loss": 0.7049, "step": 190 }, { "epoch": 0.10366826156299841, "grad_norm": 2.9546281553147917, "learning_rate": 9.99968973195745e-06, "loss": 0.6982, "step": 195 }, { "epoch": 0.1063264221158958, "grad_norm": 2.7361250859772057, "learning_rate": 9.998957180100319e-06, "loss": 0.7164, "step": 200 }, { "epoch": 0.1089845826687932, "grad_norm": 2.7782156509674443, "learning_rate": 9.99779378892081e-06, "loss": 0.71, "step": 205 }, { "epoch": 0.11164274322169059, "grad_norm": 2.752067715623176, "learning_rate": 9.996199658686769e-06, "loss": 0.7103, "step": 210 }, { "epoch": 0.11430090377458799, "grad_norm": 2.696798343791542, "learning_rate": 9.994174926789648e-06, "loss": 0.6972, "step": 215 }, { "epoch": 0.11695906432748537, "grad_norm": 2.999464864426748, "learning_rate": 9.991719767732666e-06, "loss": 0.696, "step": 220 }, { "epoch": 0.11961722488038277, "grad_norm": 2.8474761936642152, "learning_rate": 9.988834393115768e-06, "loss": 0.7112, "step": 225 }, { "epoch": 0.12227538543328018, "grad_norm": 2.6201992243557415, "learning_rate": 9.985519051617385e-06, "loss": 0.7014, "step": 230 }, { "epoch": 0.12493354598617756, "grad_norm": 2.635339392667087, "learning_rate": 9.981774028973013e-06, "loss": 0.7055, "step": 235 }, { "epoch": 0.12759170653907495, "grad_norm": 2.681963719272812, "learning_rate": 9.977599647950572e-06, "loss": 0.6981, "step": 240 }, { "epoch": 0.13024986709197237, "grad_norm": 2.70825842745144, "learning_rate": 9.972996268322594e-06, "loss": 0.6948, "step": 245 }, { "epoch": 0.13290802764486975, "grad_norm": 3.1203174547960884, "learning_rate": 9.967964286835219e-06, "loss": 0.6935, "step": 250 }, { "epoch": 0.13556618819776714, "grad_norm": 2.6339980738414175, "learning_rate": 9.962504137173997e-06, "loss": 0.6981, "step": 255 }, { "epoch": 0.13822434875066453, "grad_norm": 2.503608868118079, "learning_rate": 9.956616289926512e-06, "loss": 0.6964, "step": 260 }, { "epoch": 0.14088250930356194, "grad_norm": 2.6711044853158037, "learning_rate": 9.950301252541824e-06, "loss": 0.6948, "step": 265 }, { "epoch": 0.14354066985645933, "grad_norm": 2.7158097067728497, "learning_rate": 9.943559569286731e-06, "loss": 0.6724, "step": 270 }, { "epoch": 0.14619883040935672, "grad_norm": 2.7405188241533955, "learning_rate": 9.936391821198868e-06, "loss": 0.678, "step": 275 }, { "epoch": 0.14885699096225413, "grad_norm": 2.5459004197727686, "learning_rate": 9.92879862603662e-06, "loss": 0.7093, "step": 280 }, { "epoch": 0.15151515151515152, "grad_norm": 2.5280845959479747, "learning_rate": 9.92078063822589e-06, "loss": 0.6663, "step": 285 }, { "epoch": 0.1541733120680489, "grad_norm": 2.470733174856602, "learning_rate": 9.91233854880369e-06, "loss": 0.6557, "step": 290 }, { "epoch": 0.1568314726209463, "grad_norm": 2.591047305582084, "learning_rate": 9.903473085358589e-06, "loss": 0.6709, "step": 295 }, { "epoch": 0.1594896331738437, "grad_norm": 2.646497143472905, "learning_rate": 9.894185011967994e-06, "loss": 0.6764, "step": 300 }, { "epoch": 0.1621477937267411, "grad_norm": 2.793394708582568, "learning_rate": 9.884475129132312e-06, "loss": 0.6712, "step": 305 }, { "epoch": 0.16480595427963848, "grad_norm": 2.9489215604304992, "learning_rate": 9.874344273705949e-06, "loss": 0.6765, "step": 310 }, { "epoch": 0.1674641148325359, "grad_norm": 6.451313581277316, "learning_rate": 9.863793318825186e-06, "loss": 0.6644, "step": 315 }, { "epoch": 0.17012227538543329, "grad_norm": 2.5605827390740914, "learning_rate": 9.852823173832932e-06, "loss": 0.6873, "step": 320 }, { "epoch": 0.17278043593833067, "grad_norm": 2.6038006790137915, "learning_rate": 9.841434784200341e-06, "loss": 0.6701, "step": 325 }, { "epoch": 0.17543859649122806, "grad_norm": 2.5977149163301916, "learning_rate": 9.829629131445342e-06, "loss": 0.6592, "step": 330 }, { "epoch": 0.17809675704412548, "grad_norm": 2.498965621247738, "learning_rate": 9.817407233048028e-06, "loss": 0.6581, "step": 335 }, { "epoch": 0.18075491759702286, "grad_norm": 2.4810761737552274, "learning_rate": 9.804770142362977e-06, "loss": 0.6251, "step": 340 }, { "epoch": 0.18341307814992025, "grad_norm": 2.546278672778942, "learning_rate": 9.791718948528457e-06, "loss": 0.6552, "step": 345 }, { "epoch": 0.18607123870281764, "grad_norm": 2.6543352578309123, "learning_rate": 9.778254776372576e-06, "loss": 0.6609, "step": 350 }, { "epoch": 0.18872939925571505, "grad_norm": 2.3974140051030117, "learning_rate": 9.76437878631631e-06, "loss": 0.6497, "step": 355 }, { "epoch": 0.19138755980861244, "grad_norm": 2.813278650430626, "learning_rate": 9.75009217427352e-06, "loss": 0.6457, "step": 360 }, { "epoch": 0.19404572036150983, "grad_norm": 2.480141091317464, "learning_rate": 9.735396171547859e-06, "loss": 0.6694, "step": 365 }, { "epoch": 0.19670388091440724, "grad_norm": 2.6544163749167415, "learning_rate": 9.720292044726664e-06, "loss": 0.6403, "step": 370 }, { "epoch": 0.19936204146730463, "grad_norm": 2.419192069753362, "learning_rate": 9.704781095571788e-06, "loss": 0.6478, "step": 375 }, { "epoch": 0.20202020202020202, "grad_norm": 2.440380141428742, "learning_rate": 9.68886466090741e-06, "loss": 0.6453, "step": 380 }, { "epoch": 0.2046783625730994, "grad_norm": 2.461462521030907, "learning_rate": 9.672544112504813e-06, "loss": 0.6459, "step": 385 }, { "epoch": 0.20733652312599682, "grad_norm": 2.6891487247733448, "learning_rate": 9.655820856964171e-06, "loss": 0.6566, "step": 390 }, { "epoch": 0.2099946836788942, "grad_norm": 2.476250180113438, "learning_rate": 9.638696335593304e-06, "loss": 0.6505, "step": 395 }, { "epoch": 0.2126528442317916, "grad_norm": 2.252029809034869, "learning_rate": 9.621172024283468e-06, "loss": 0.6506, "step": 400 }, { "epoch": 0.215311004784689, "grad_norm": 2.4009100256649027, "learning_rate": 9.603249433382145e-06, "loss": 0.6472, "step": 405 }, { "epoch": 0.2179691653375864, "grad_norm": 2.65974014911984, "learning_rate": 9.584930107562885e-06, "loss": 0.6491, "step": 410 }, { "epoch": 0.22062732589048378, "grad_norm": 2.5396822097120046, "learning_rate": 9.566215625692168e-06, "loss": 0.649, "step": 415 }, { "epoch": 0.22328548644338117, "grad_norm": 2.4691530853144146, "learning_rate": 9.547107600693328e-06, "loss": 0.6382, "step": 420 }, { "epoch": 0.22594364699627859, "grad_norm": 2.4814623573220156, "learning_rate": 9.527607679407545e-06, "loss": 0.6439, "step": 425 }, { "epoch": 0.22860180754917597, "grad_norm": 2.507720516805312, "learning_rate": 9.50771754245191e-06, "loss": 0.6185, "step": 430 }, { "epoch": 0.23125996810207336, "grad_norm": 2.381710754559422, "learning_rate": 9.487438904074581e-06, "loss": 0.6183, "step": 435 }, { "epoch": 0.23391812865497075, "grad_norm": 2.4338731752331504, "learning_rate": 9.466773512007032e-06, "loss": 0.6345, "step": 440 }, { "epoch": 0.23657628920786816, "grad_norm": 2.475985053897146, "learning_rate": 9.445723147313434e-06, "loss": 0.6159, "step": 445 }, { "epoch": 0.23923444976076555, "grad_norm": 2.571693018211461, "learning_rate": 9.424289624237143e-06, "loss": 0.6195, "step": 450 }, { "epoch": 0.24189261031366294, "grad_norm": 2.4353278379885754, "learning_rate": 9.402474790044348e-06, "loss": 0.6171, "step": 455 }, { "epoch": 0.24455077086656035, "grad_norm": 2.5936563595071855, "learning_rate": 9.380280524864846e-06, "loss": 0.6087, "step": 460 }, { "epoch": 0.24720893141945774, "grad_norm": 2.6003404377422163, "learning_rate": 9.357708741530025e-06, "loss": 0.607, "step": 465 }, { "epoch": 0.24986709197235513, "grad_norm": 2.4539600499134804, "learning_rate": 9.334761385407984e-06, "loss": 0.6165, "step": 470 }, { "epoch": 0.25252525252525254, "grad_norm": 2.396035179563128, "learning_rate": 9.311440434235879e-06, "loss": 0.6131, "step": 475 }, { "epoch": 0.2551834130781499, "grad_norm": 2.5173704413463214, "learning_rate": 9.287747897949471e-06, "loss": 0.6174, "step": 480 }, { "epoch": 0.2578415736310473, "grad_norm": 2.35543719713761, "learning_rate": 9.263685818509895e-06, "loss": 0.624, "step": 485 }, { "epoch": 0.26049973418394473, "grad_norm": 2.4747468604717633, "learning_rate": 9.239256269727675e-06, "loss": 0.5977, "step": 490 }, { "epoch": 0.2631578947368421, "grad_norm": 2.6700657630003457, "learning_rate": 9.214461357083986e-06, "loss": 0.6316, "step": 495 }, { "epoch": 0.2658160552897395, "grad_norm": 2.6019820288128614, "learning_rate": 9.189303217549195e-06, "loss": 0.612, "step": 500 }, { "epoch": 0.2684742158426369, "grad_norm": 2.4320657082358133, "learning_rate": 9.163784019398686e-06, "loss": 0.5974, "step": 505 }, { "epoch": 0.2711323763955343, "grad_norm": 2.3980618352258682, "learning_rate": 9.137905962025977e-06, "loss": 0.5974, "step": 510 }, { "epoch": 0.2737905369484317, "grad_norm": 2.5423673726235982, "learning_rate": 9.111671275753175e-06, "loss": 0.6067, "step": 515 }, { "epoch": 0.27644869750132905, "grad_norm": 2.4523167139493167, "learning_rate": 9.08508222163875e-06, "loss": 0.5995, "step": 520 }, { "epoch": 0.27910685805422647, "grad_norm": 2.4117951211461173, "learning_rate": 9.058141091282656e-06, "loss": 0.6013, "step": 525 }, { "epoch": 0.2817650186071239, "grad_norm": 2.4016801112230177, "learning_rate": 9.030850206628836e-06, "loss": 0.6012, "step": 530 }, { "epoch": 0.28442317916002124, "grad_norm": 2.528921774636397, "learning_rate": 9.003211919765102e-06, "loss": 0.6028, "step": 535 }, { "epoch": 0.28708133971291866, "grad_norm": 2.4727747007177077, "learning_rate": 8.975228612720415e-06, "loss": 0.5861, "step": 540 }, { "epoch": 0.2897395002658161, "grad_norm": 2.4874651156635164, "learning_rate": 8.946902697259593e-06, "loss": 0.5919, "step": 545 }, { "epoch": 0.29239766081871343, "grad_norm": 2.4347654892463457, "learning_rate": 8.918236614675446e-06, "loss": 0.585, "step": 550 }, { "epoch": 0.29505582137161085, "grad_norm": 2.312313083900426, "learning_rate": 8.889232835578372e-06, "loss": 0.5921, "step": 555 }, { "epoch": 0.29771398192450826, "grad_norm": 2.4973530243506694, "learning_rate": 8.859893859683429e-06, "loss": 0.5899, "step": 560 }, { "epoch": 0.3003721424774056, "grad_norm": 2.3656255407077316, "learning_rate": 8.83022221559489e-06, "loss": 0.5963, "step": 565 }, { "epoch": 0.30303030303030304, "grad_norm": 2.328665705259469, "learning_rate": 8.800220460588321e-06, "loss": 0.5957, "step": 570 }, { "epoch": 0.3056884635832004, "grad_norm": 2.676801074518756, "learning_rate": 8.769891180390168e-06, "loss": 0.5647, "step": 575 }, { "epoch": 0.3083466241360978, "grad_norm": 2.5775837307061216, "learning_rate": 8.739236988954913e-06, "loss": 0.5874, "step": 580 }, { "epoch": 0.31100478468899523, "grad_norm": 2.4252458009696922, "learning_rate": 8.708260528239788e-06, "loss": 0.5733, "step": 585 }, { "epoch": 0.3136629452418926, "grad_norm": 2.4329948098909067, "learning_rate": 8.676964467977072e-06, "loss": 0.5843, "step": 590 }, { "epoch": 0.31632110579479, "grad_norm": 2.617177470446975, "learning_rate": 8.645351505443997e-06, "loss": 0.5779, "step": 595 }, { "epoch": 0.3189792663476874, "grad_norm": 2.3752490352145506, "learning_rate": 8.613424365230287e-06, "loss": 0.5777, "step": 600 }, { "epoch": 0.3216374269005848, "grad_norm": 2.5878429884840046, "learning_rate": 8.581185799003334e-06, "loss": 0.579, "step": 605 }, { "epoch": 0.3242955874534822, "grad_norm": 2.4302231285818214, "learning_rate": 8.548638585271034e-06, "loss": 0.597, "step": 610 }, { "epoch": 0.3269537480063796, "grad_norm": 2.493522528062739, "learning_rate": 8.515785529142339e-06, "loss": 0.5766, "step": 615 }, { "epoch": 0.32961190855927697, "grad_norm": 2.3498385912371074, "learning_rate": 8.482629462085479e-06, "loss": 0.5683, "step": 620 }, { "epoch": 0.3322700691121744, "grad_norm": 2.3651303006417024, "learning_rate": 8.449173241683934e-06, "loss": 0.5687, "step": 625 }, { "epoch": 0.3349282296650718, "grad_norm": 2.3964339925589635, "learning_rate": 8.415419751390155e-06, "loss": 0.5607, "step": 630 }, { "epoch": 0.33758639021796916, "grad_norm": 2.4646605695831636, "learning_rate": 8.381371900277045e-06, "loss": 0.5693, "step": 635 }, { "epoch": 0.34024455077086657, "grad_norm": 2.308318162132283, "learning_rate": 8.347032622787245e-06, "loss": 0.563, "step": 640 }, { "epoch": 0.34290271132376393, "grad_norm": 2.5479453691410545, "learning_rate": 8.312404878480222e-06, "loss": 0.545, "step": 645 }, { "epoch": 0.34556087187666135, "grad_norm": 2.4905341758041817, "learning_rate": 8.277491651777196e-06, "loss": 0.5565, "step": 650 }, { "epoch": 0.34821903242955876, "grad_norm": 2.38426332119931, "learning_rate": 8.24229595170393e-06, "loss": 0.5593, "step": 655 }, { "epoch": 0.3508771929824561, "grad_norm": 2.257211174728351, "learning_rate": 8.206820811631387e-06, "loss": 0.5528, "step": 660 }, { "epoch": 0.35353535353535354, "grad_norm": 2.4047107162822305, "learning_rate": 8.171069289014307e-06, "loss": 0.5591, "step": 665 }, { "epoch": 0.35619351408825095, "grad_norm": 2.359712499357688, "learning_rate": 8.135044465127687e-06, "loss": 0.5567, "step": 670 }, { "epoch": 0.3588516746411483, "grad_norm": 2.617789847429391, "learning_rate": 8.098749444801226e-06, "loss": 0.5768, "step": 675 }, { "epoch": 0.3615098351940457, "grad_norm": 2.279043335158844, "learning_rate": 8.062187356151726e-06, "loss": 0.5584, "step": 680 }, { "epoch": 0.36416799574694314, "grad_norm": 2.5468624672218034, "learning_rate": 8.025361350313506e-06, "loss": 0.5596, "step": 685 }, { "epoch": 0.3668261562998405, "grad_norm": 2.5020390139987945, "learning_rate": 7.9882746011668e-06, "loss": 0.5514, "step": 690 }, { "epoch": 0.3694843168527379, "grad_norm": 2.4750957061628154, "learning_rate": 7.950930305064224e-06, "loss": 0.5509, "step": 695 }, { "epoch": 0.3721424774056353, "grad_norm": 2.3205069346022684, "learning_rate": 7.913331680555299e-06, "loss": 0.5475, "step": 700 }, { "epoch": 0.3748006379585327, "grad_norm": 2.2679048857253625, "learning_rate": 7.875481968109052e-06, "loss": 0.5603, "step": 705 }, { "epoch": 0.3774587985114301, "grad_norm": 2.281345445666937, "learning_rate": 7.837384429834736e-06, "loss": 0.5529, "step": 710 }, { "epoch": 0.38011695906432746, "grad_norm": 2.4761501881024373, "learning_rate": 7.799042349200672e-06, "loss": 0.5462, "step": 715 }, { "epoch": 0.3827751196172249, "grad_norm": 2.3525147379450133, "learning_rate": 7.760459030751285e-06, "loss": 0.5288, "step": 720 }, { "epoch": 0.3854332801701223, "grad_norm": 2.3082844177093396, "learning_rate": 7.721637799822269e-06, "loss": 0.5444, "step": 725 }, { "epoch": 0.38809144072301965, "grad_norm": 2.428710636700351, "learning_rate": 7.682582002254015e-06, "loss": 0.5526, "step": 730 }, { "epoch": 0.39074960127591707, "grad_norm": 2.4386787256645035, "learning_rate": 7.643295004103232e-06, "loss": 0.5322, "step": 735 }, { "epoch": 0.3934077618288145, "grad_norm": 2.5428861644636838, "learning_rate": 7.6037801913528474e-06, "loss": 0.5415, "step": 740 }, { "epoch": 0.39606592238171184, "grad_norm": 2.284207445173577, "learning_rate": 7.564040969620179e-06, "loss": 0.5206, "step": 745 }, { "epoch": 0.39872408293460926, "grad_norm": 2.4142085070722357, "learning_rate": 7.524080763863422e-06, "loss": 0.5447, "step": 750 }, { "epoch": 0.4013822434875066, "grad_norm": 2.524753101311306, "learning_rate": 7.483903018086466e-06, "loss": 0.5318, "step": 755 }, { "epoch": 0.40404040404040403, "grad_norm": 2.4624906545945455, "learning_rate": 7.443511195042068e-06, "loss": 0.536, "step": 760 }, { "epoch": 0.40669856459330145, "grad_norm": 2.584362652216828, "learning_rate": 7.402908775933419e-06, "loss": 0.5335, "step": 765 }, { "epoch": 0.4093567251461988, "grad_norm": 2.3792982549216823, "learning_rate": 7.362099260114104e-06, "loss": 0.5341, "step": 770 }, { "epoch": 0.4120148856990962, "grad_norm": 2.377464889890606, "learning_rate": 7.321086164786513e-06, "loss": 0.5369, "step": 775 }, { "epoch": 0.41467304625199364, "grad_norm": 2.380837303007001, "learning_rate": 7.2798730246987056e-06, "loss": 0.5201, "step": 780 }, { "epoch": 0.417331206804891, "grad_norm": 2.330648347753563, "learning_rate": 7.23846339183977e-06, "loss": 0.5119, "step": 785 }, { "epoch": 0.4199893673577884, "grad_norm": 2.3284141931963362, "learning_rate": 7.196860835133686e-06, "loss": 0.5276, "step": 790 }, { "epoch": 0.4226475279106858, "grad_norm": 2.3728628327177774, "learning_rate": 7.155068940131741e-06, "loss": 0.5057, "step": 795 }, { "epoch": 0.4253056884635832, "grad_norm": 2.386729416032167, "learning_rate": 7.113091308703498e-06, "loss": 0.5246, "step": 800 }, { "epoch": 0.4279638490164806, "grad_norm": 2.419791153850835, "learning_rate": 7.070931558726373e-06, "loss": 0.5178, "step": 805 }, { "epoch": 0.430622009569378, "grad_norm": 2.5457575458937183, "learning_rate": 7.028593323773819e-06, "loss": 0.5222, "step": 810 }, { "epoch": 0.4332801701222754, "grad_norm": 2.327614444176051, "learning_rate": 6.9860802528021705e-06, "loss": 0.5034, "step": 815 }, { "epoch": 0.4359383306751728, "grad_norm": 2.3495489783212995, "learning_rate": 6.943396009836147e-06, "loss": 0.4935, "step": 820 }, { "epoch": 0.43859649122807015, "grad_norm": 2.6463956003818256, "learning_rate": 6.9005442736530745e-06, "loss": 0.5076, "step": 825 }, { "epoch": 0.44125465178096757, "grad_norm": 2.3848887187645564, "learning_rate": 6.8575287374658185e-06, "loss": 0.5217, "step": 830 }, { "epoch": 0.443912812333865, "grad_norm": 2.556811219304697, "learning_rate": 6.814353108604488e-06, "loss": 0.4953, "step": 835 }, { "epoch": 0.44657097288676234, "grad_norm": 2.378175171662646, "learning_rate": 6.771021108196912e-06, "loss": 0.5031, "step": 840 }, { "epoch": 0.44922913343965976, "grad_norm": 2.359632767008804, "learning_rate": 6.7275364708479316e-06, "loss": 0.4892, "step": 845 }, { "epoch": 0.45188729399255717, "grad_norm": 2.5062942550983163, "learning_rate": 6.683902944317535e-06, "loss": 0.5138, "step": 850 }, { "epoch": 0.45454545454545453, "grad_norm": 2.325718024430296, "learning_rate": 6.640124289197845e-06, "loss": 0.4969, "step": 855 }, { "epoch": 0.45720361509835195, "grad_norm": 2.4421884573669295, "learning_rate": 6.596204278589019e-06, "loss": 0.5063, "step": 860 }, { "epoch": 0.45986177565124936, "grad_norm": 2.3219816814492167, "learning_rate": 6.552146697774049e-06, "loss": 0.4947, "step": 865 }, { "epoch": 0.4625199362041467, "grad_norm": 2.2777725562246816, "learning_rate": 6.507955343892536e-06, "loss": 0.5039, "step": 870 }, { "epoch": 0.46517809675704413, "grad_norm": 2.3140852762780435, "learning_rate": 6.4636340256134224e-06, "loss": 0.4839, "step": 875 }, { "epoch": 0.4678362573099415, "grad_norm": 2.4627138587993214, "learning_rate": 6.419186562806742e-06, "loss": 0.4908, "step": 880 }, { "epoch": 0.4704944178628389, "grad_norm": 2.401799791274382, "learning_rate": 6.374616786214402e-06, "loss": 0.4888, "step": 885 }, { "epoch": 0.4731525784157363, "grad_norm": 2.3301260740564707, "learning_rate": 6.329928537120024e-06, "loss": 0.5022, "step": 890 }, { "epoch": 0.4758107389686337, "grad_norm": 2.406357131352656, "learning_rate": 6.285125667017886e-06, "loss": 0.4919, "step": 895 }, { "epoch": 0.4784688995215311, "grad_norm": 2.4285508393783557, "learning_rate": 6.240212037280967e-06, "loss": 0.492, "step": 900 }, { "epoch": 0.4811270600744285, "grad_norm": 2.266893075319626, "learning_rate": 6.195191518828163e-06, "loss": 0.4921, "step": 905 }, { "epoch": 0.4837852206273259, "grad_norm": 2.416578670315929, "learning_rate": 6.1500679917906615e-06, "loss": 0.486, "step": 910 }, { "epoch": 0.4864433811802233, "grad_norm": 2.473861188532911, "learning_rate": 6.1048453451775305e-06, "loss": 0.4798, "step": 915 }, { "epoch": 0.4891015417331207, "grad_norm": 2.3504741024043443, "learning_rate": 6.059527476540546e-06, "loss": 0.4781, "step": 920 }, { "epoch": 0.49175970228601806, "grad_norm": 2.382117365607024, "learning_rate": 6.014118291638272e-06, "loss": 0.4689, "step": 925 }, { "epoch": 0.4944178628389155, "grad_norm": 2.4835266299739307, "learning_rate": 5.96862170409944e-06, "loss": 0.4799, "step": 930 }, { "epoch": 0.49707602339181284, "grad_norm": 2.285188764967759, "learning_rate": 5.9230416350856505e-06, "loss": 0.4703, "step": 935 }, { "epoch": 0.49973418394471025, "grad_norm": 2.4072497294257373, "learning_rate": 5.877382012953429e-06, "loss": 0.4738, "step": 940 }, { "epoch": 0.5023923444976076, "grad_norm": 2.3497927288764755, "learning_rate": 5.831646772915651e-06, "loss": 0.465, "step": 945 }, { "epoch": 0.5050505050505051, "grad_norm": 2.2889113825693626, "learning_rate": 5.785839856702383e-06, "loss": 0.4676, "step": 950 }, { "epoch": 0.5077086656034024, "grad_norm": 2.444844273958781, "learning_rate": 5.739965212221168e-06, "loss": 0.4761, "step": 955 }, { "epoch": 0.5103668261562998, "grad_norm": 2.2070310255406236, "learning_rate": 5.69402679321676e-06, "loss": 0.474, "step": 960 }, { "epoch": 0.5130249867091973, "grad_norm": 2.484818119826392, "learning_rate": 5.64802855893038e-06, "loss": 0.4648, "step": 965 }, { "epoch": 0.5156831472620946, "grad_norm": 2.2539805183718804, "learning_rate": 5.601974473758472e-06, "loss": 0.4725, "step": 970 }, { "epoch": 0.518341307814992, "grad_norm": 2.2192494500574265, "learning_rate": 5.5558685069110444e-06, "loss": 0.4684, "step": 975 }, { "epoch": 0.5209994683678895, "grad_norm": 2.269435937295936, "learning_rate": 5.509714632069564e-06, "loss": 0.4653, "step": 980 }, { "epoch": 0.5236576289207868, "grad_norm": 2.5740150564664184, "learning_rate": 5.463516827044492e-06, "loss": 0.4716, "step": 985 }, { "epoch": 0.5263157894736842, "grad_norm": 2.310418189573713, "learning_rate": 5.41727907343245e-06, "loss": 0.4674, "step": 990 }, { "epoch": 0.5289739500265817, "grad_norm": 2.527471668820078, "learning_rate": 5.371005356273058e-06, "loss": 0.495, "step": 995 }, { "epoch": 0.531632110579479, "grad_norm": 2.139852400045247, "learning_rate": 5.32469966370549e-06, "loss": 0.4612, "step": 1000 }, { "epoch": 0.5342902711323764, "grad_norm": 2.3593017053732783, "learning_rate": 5.278365986624743e-06, "loss": 0.4763, "step": 1005 }, { "epoch": 0.5369484316852738, "grad_norm": 2.2465306122210844, "learning_rate": 5.232008318337682e-06, "loss": 0.4533, "step": 1010 }, { "epoch": 0.5396065922381712, "grad_norm": 2.326189324358019, "learning_rate": 5.1856306542188805e-06, "loss": 0.4509, "step": 1015 }, { "epoch": 0.5422647527910686, "grad_norm": 2.3918713302924526, "learning_rate": 5.1392369913662646e-06, "loss": 0.4667, "step": 1020 }, { "epoch": 0.5449229133439659, "grad_norm": 2.2595301249124624, "learning_rate": 5.0928313282566255e-06, "loss": 0.4478, "step": 1025 }, { "epoch": 0.5475810738968634, "grad_norm": 2.422833352188881, "learning_rate": 5.046417664401005e-06, "loss": 0.4468, "step": 1030 }, { "epoch": 0.5502392344497608, "grad_norm": 2.416580263699029, "learning_rate": 5e-06, "loss": 0.4666, "step": 1035 }, { "epoch": 0.5528973950026581, "grad_norm": 2.2774632375053088, "learning_rate": 4.953582335598996e-06, "loss": 0.4548, "step": 1040 }, { "epoch": 0.5555555555555556, "grad_norm": 2.2466213084914872, "learning_rate": 4.907168671743377e-06, "loss": 0.4413, "step": 1045 }, { "epoch": 0.5582137161084529, "grad_norm": 2.1195454673730447, "learning_rate": 4.860763008633736e-06, "loss": 0.4441, "step": 1050 }, { "epoch": 0.5608718766613503, "grad_norm": 2.4552591249815836, "learning_rate": 4.814369345781121e-06, "loss": 0.4486, "step": 1055 }, { "epoch": 0.5635300372142478, "grad_norm": 2.2160694909825316, "learning_rate": 4.7679916816623185e-06, "loss": 0.4493, "step": 1060 }, { "epoch": 0.5661881977671451, "grad_norm": 2.34664383603718, "learning_rate": 4.7216340133752604e-06, "loss": 0.4572, "step": 1065 }, { "epoch": 0.5688463583200425, "grad_norm": 2.3926383813223455, "learning_rate": 4.675300336294511e-06, "loss": 0.4417, "step": 1070 }, { "epoch": 0.57150451887294, "grad_norm": 2.264865501037663, "learning_rate": 4.628994643726942e-06, "loss": 0.4479, "step": 1075 }, { "epoch": 0.5741626794258373, "grad_norm": 2.4178547328629416, "learning_rate": 4.582720926567552e-06, "loss": 0.4296, "step": 1080 }, { "epoch": 0.5768208399787347, "grad_norm": 2.2005790727461183, "learning_rate": 4.53648317295551e-06, "loss": 0.4439, "step": 1085 }, { "epoch": 0.5794790005316321, "grad_norm": 2.2954610706486838, "learning_rate": 4.490285367930438e-06, "loss": 0.4254, "step": 1090 }, { "epoch": 0.5821371610845295, "grad_norm": 2.26298044130641, "learning_rate": 4.444131493088956e-06, "loss": 0.4361, "step": 1095 }, { "epoch": 0.5847953216374269, "grad_norm": 2.4783364089595383, "learning_rate": 4.3980255262415295e-06, "loss": 0.4232, "step": 1100 }, { "epoch": 0.5874534821903243, "grad_norm": 2.2956707433327446, "learning_rate": 4.351971441069622e-06, "loss": 0.4336, "step": 1105 }, { "epoch": 0.5901116427432217, "grad_norm": 2.1801979127212445, "learning_rate": 4.305973206783241e-06, "loss": 0.4205, "step": 1110 }, { "epoch": 0.5927698032961191, "grad_norm": 2.3074118997829944, "learning_rate": 4.260034787778833e-06, "loss": 0.4403, "step": 1115 }, { "epoch": 0.5954279638490165, "grad_norm": 2.2135932778785117, "learning_rate": 4.214160143297618e-06, "loss": 0.4484, "step": 1120 }, { "epoch": 0.5980861244019139, "grad_norm": 2.3686411194735943, "learning_rate": 4.1683532270843505e-06, "loss": 0.4307, "step": 1125 }, { "epoch": 0.6007442849548112, "grad_norm": 2.2766648664475753, "learning_rate": 4.122617987046571e-06, "loss": 0.4281, "step": 1130 }, { "epoch": 0.6034024455077087, "grad_norm": 2.3707141910175165, "learning_rate": 4.076958364914352e-06, "loss": 0.4363, "step": 1135 }, { "epoch": 0.6060606060606061, "grad_norm": 2.3439272228715304, "learning_rate": 4.031378295900562e-06, "loss": 0.4332, "step": 1140 }, { "epoch": 0.6087187666135034, "grad_norm": 2.2202025277585737, "learning_rate": 3.985881708361729e-06, "loss": 0.421, "step": 1145 }, { "epoch": 0.6113769271664008, "grad_norm": 2.270428210567009, "learning_rate": 3.940472523459456e-06, "loss": 0.4182, "step": 1150 }, { "epoch": 0.6140350877192983, "grad_norm": 2.303418109483892, "learning_rate": 3.895154654822471e-06, "loss": 0.4139, "step": 1155 }, { "epoch": 0.6166932482721956, "grad_norm": 2.2123434660028582, "learning_rate": 3.84993200820934e-06, "loss": 0.417, "step": 1160 }, { "epoch": 0.619351408825093, "grad_norm": 2.42258236412943, "learning_rate": 3.8048084811718377e-06, "loss": 0.4414, "step": 1165 }, { "epoch": 0.6220095693779905, "grad_norm": 2.2752955087581768, "learning_rate": 3.7597879627190337e-06, "loss": 0.4182, "step": 1170 }, { "epoch": 0.6246677299308878, "grad_norm": 2.4664245003262546, "learning_rate": 3.7148743329821146e-06, "loss": 0.4317, "step": 1175 }, { "epoch": 0.6273258904837852, "grad_norm": 2.290843653500035, "learning_rate": 3.670071462879975e-06, "loss": 0.4186, "step": 1180 }, { "epoch": 0.6299840510366826, "grad_norm": 2.277338208428442, "learning_rate": 3.6253832137856e-06, "loss": 0.4036, "step": 1185 }, { "epoch": 0.63264221158958, "grad_norm": 2.328350906451804, "learning_rate": 3.5808134371932603e-06, "loss": 0.4029, "step": 1190 }, { "epoch": 0.6353003721424774, "grad_norm": 2.342438907892454, "learning_rate": 3.5363659743865797e-06, "loss": 0.4204, "step": 1195 }, { "epoch": 0.6379585326953748, "grad_norm": 2.4035043063158237, "learning_rate": 3.4920446561074673e-06, "loss": 0.4218, "step": 1200 }, { "epoch": 0.6406166932482722, "grad_norm": 2.224515711259058, "learning_rate": 3.4478533022259527e-06, "loss": 0.4152, "step": 1205 }, { "epoch": 0.6432748538011696, "grad_norm": 2.1644969476013847, "learning_rate": 3.403795721410983e-06, "loss": 0.4235, "step": 1210 }, { "epoch": 0.645933014354067, "grad_norm": 2.225901225466717, "learning_rate": 3.3598757108021546e-06, "loss": 0.4205, "step": 1215 }, { "epoch": 0.6485911749069644, "grad_norm": 2.960907864932211, "learning_rate": 3.3160970556824666e-06, "loss": 0.4195, "step": 1220 }, { "epoch": 0.6512493354598617, "grad_norm": 2.2220590721554303, "learning_rate": 3.2724635291520697e-06, "loss": 0.4039, "step": 1225 }, { "epoch": 0.6539074960127592, "grad_norm": 2.222896907055418, "learning_rate": 3.2289788918030894e-06, "loss": 0.4037, "step": 1230 }, { "epoch": 0.6565656565656566, "grad_norm": 2.546975428146142, "learning_rate": 3.185646891395514e-06, "loss": 0.4215, "step": 1235 }, { "epoch": 0.6592238171185539, "grad_norm": 2.26370807319166, "learning_rate": 3.1424712625341836e-06, "loss": 0.4095, "step": 1240 }, { "epoch": 0.6618819776714514, "grad_norm": 2.1848679188015074, "learning_rate": 3.0994557263469267e-06, "loss": 0.3984, "step": 1245 }, { "epoch": 0.6645401382243488, "grad_norm": 2.1394700353392424, "learning_rate": 3.0566039901638534e-06, "loss": 0.4086, "step": 1250 }, { "epoch": 0.6671982987772461, "grad_norm": 2.2219765632899473, "learning_rate": 3.013919747197832e-06, "loss": 0.4018, "step": 1255 }, { "epoch": 0.6698564593301436, "grad_norm": 2.2717838227817557, "learning_rate": 2.9714066762261825e-06, "loss": 0.4228, "step": 1260 }, { "epoch": 0.672514619883041, "grad_norm": 2.1650786912116855, "learning_rate": 2.929068441273629e-06, "loss": 0.4032, "step": 1265 }, { "epoch": 0.6751727804359383, "grad_norm": 2.30009309210609, "learning_rate": 2.886908691296504e-06, "loss": 0.4117, "step": 1270 }, { "epoch": 0.6778309409888357, "grad_norm": 2.326017307920666, "learning_rate": 2.844931059868261e-06, "loss": 0.4013, "step": 1275 }, { "epoch": 0.6804891015417331, "grad_norm": 2.1732526869160007, "learning_rate": 2.8031391648663153e-06, "loss": 0.3979, "step": 1280 }, { "epoch": 0.6831472620946305, "grad_norm": 2.159518899715084, "learning_rate": 2.7615366081602306e-06, "loss": 0.4057, "step": 1285 }, { "epoch": 0.6858054226475279, "grad_norm": 2.2767441852506036, "learning_rate": 2.720126975301297e-06, "loss": 0.4014, "step": 1290 }, { "epoch": 0.6884635832004253, "grad_norm": 2.3794328152713558, "learning_rate": 2.6789138352134885e-06, "loss": 0.4055, "step": 1295 }, { "epoch": 0.6911217437533227, "grad_norm": 2.1759938658536826, "learning_rate": 2.637900739885897e-06, "loss": 0.398, "step": 1300 }, { "epoch": 0.69377990430622, "grad_norm": 2.463151016723552, "learning_rate": 2.5970912240665815e-06, "loss": 0.3929, "step": 1305 }, { "epoch": 0.6964380648591175, "grad_norm": 2.212295559136495, "learning_rate": 2.556488804957933e-06, "loss": 0.3912, "step": 1310 }, { "epoch": 0.6990962254120149, "grad_norm": 2.2913230696574782, "learning_rate": 2.5160969819135368e-06, "loss": 0.3965, "step": 1315 }, { "epoch": 0.7017543859649122, "grad_norm": 2.1524999645283933, "learning_rate": 2.475919236136579e-06, "loss": 0.3979, "step": 1320 }, { "epoch": 0.7044125465178097, "grad_norm": 2.0851767285574727, "learning_rate": 2.4359590303798243e-06, "loss": 0.378, "step": 1325 }, { "epoch": 0.7070707070707071, "grad_norm": 2.421263572521297, "learning_rate": 2.3962198086471534e-06, "loss": 0.3944, "step": 1330 }, { "epoch": 0.7097288676236044, "grad_norm": 2.0823768908851052, "learning_rate": 2.356704995896768e-06, "loss": 0.4023, "step": 1335 }, { "epoch": 0.7123870281765019, "grad_norm": 2.127738595725943, "learning_rate": 2.3174179977459853e-06, "loss": 0.3863, "step": 1340 }, { "epoch": 0.7150451887293993, "grad_norm": 2.213409237609825, "learning_rate": 2.2783622001777322e-06, "loss": 0.3911, "step": 1345 }, { "epoch": 0.7177033492822966, "grad_norm": 2.169892497104111, "learning_rate": 2.2395409692487174e-06, "loss": 0.3878, "step": 1350 }, { "epoch": 0.7203615098351941, "grad_norm": 3.002256232083388, "learning_rate": 2.2009576507993273e-06, "loss": 0.3924, "step": 1355 }, { "epoch": 0.7230196703880915, "grad_norm": 2.280280445481655, "learning_rate": 2.1626155701652678e-06, "loss": 0.389, "step": 1360 }, { "epoch": 0.7256778309409888, "grad_norm": 2.116741751685681, "learning_rate": 2.1245180318909482e-06, "loss": 0.3862, "step": 1365 }, { "epoch": 0.7283359914938863, "grad_norm": 2.137413236942858, "learning_rate": 2.0866683194447014e-06, "loss": 0.3876, "step": 1370 }, { "epoch": 0.7309941520467836, "grad_norm": 2.229506939617801, "learning_rate": 2.0490696949357774e-06, "loss": 0.3787, "step": 1375 }, { "epoch": 0.733652312599681, "grad_norm": 2.1611083069515398, "learning_rate": 2.0117253988332023e-06, "loss": 0.3831, "step": 1380 }, { "epoch": 0.7363104731525785, "grad_norm": 2.3092042098288466, "learning_rate": 1.974638649686495e-06, "loss": 0.3752, "step": 1385 }, { "epoch": 0.7389686337054758, "grad_norm": 2.32230572914138, "learning_rate": 1.9378126438482727e-06, "loss": 0.3956, "step": 1390 }, { "epoch": 0.7416267942583732, "grad_norm": 2.1871471331082404, "learning_rate": 1.9012505551987764e-06, "loss": 0.3862, "step": 1395 }, { "epoch": 0.7442849548112705, "grad_norm": 2.2031667706286253, "learning_rate": 1.8649555348723137e-06, "loss": 0.3988, "step": 1400 }, { "epoch": 0.746943115364168, "grad_norm": 2.1919111575981014, "learning_rate": 1.8289307109856941e-06, "loss": 0.3831, "step": 1405 }, { "epoch": 0.7496012759170654, "grad_norm": 2.248650896915019, "learning_rate": 1.7931791883686155e-06, "loss": 0.3755, "step": 1410 }, { "epoch": 0.7522594364699627, "grad_norm": 2.189295928946117, "learning_rate": 1.7577040482960723e-06, "loss": 0.377, "step": 1415 }, { "epoch": 0.7549175970228602, "grad_norm": 2.1283848120533038, "learning_rate": 1.722508348222805e-06, "loss": 0.3795, "step": 1420 }, { "epoch": 0.7575757575757576, "grad_norm": 2.304562723613879, "learning_rate": 1.6875951215197779e-06, "loss": 0.3727, "step": 1425 }, { "epoch": 0.7602339181286549, "grad_norm": 2.2072516461560414, "learning_rate": 1.6529673772127563e-06, "loss": 0.3816, "step": 1430 }, { "epoch": 0.7628920786815524, "grad_norm": 2.1146394474437087, "learning_rate": 1.618628099722957e-06, "loss": 0.3809, "step": 1435 }, { "epoch": 0.7655502392344498, "grad_norm": 2.208201316045487, "learning_rate": 1.5845802486098461e-06, "loss": 0.3815, "step": 1440 }, { "epoch": 0.7682083997873471, "grad_norm": 2.1555651439065744, "learning_rate": 1.550826758316068e-06, "loss": 0.3868, "step": 1445 }, { "epoch": 0.7708665603402446, "grad_norm": 2.0230904275428787, "learning_rate": 1.5173705379145214e-06, "loss": 0.369, "step": 1450 }, { "epoch": 0.773524720893142, "grad_norm": 2.102831085552095, "learning_rate": 1.4842144708576606e-06, "loss": 0.3856, "step": 1455 }, { "epoch": 0.7761828814460393, "grad_norm": 2.0750334703551876, "learning_rate": 1.4513614147289663e-06, "loss": 0.3787, "step": 1460 }, { "epoch": 0.7788410419989368, "grad_norm": 2.1638405819961952, "learning_rate": 1.4188142009966689e-06, "loss": 0.3849, "step": 1465 }, { "epoch": 0.7814992025518341, "grad_norm": 2.1286655426576675, "learning_rate": 1.386575634769714e-06, "loss": 0.3698, "step": 1470 }, { "epoch": 0.7841573631047315, "grad_norm": 2.235079695000213, "learning_rate": 1.3546484945560029e-06, "loss": 0.3719, "step": 1475 }, { "epoch": 0.786815523657629, "grad_norm": 2.12112362882432, "learning_rate": 1.3230355320229305e-06, "loss": 0.3867, "step": 1480 }, { "epoch": 0.7894736842105263, "grad_norm": 2.3360347870351466, "learning_rate": 1.2917394717602123e-06, "loss": 0.3714, "step": 1485 }, { "epoch": 0.7921318447634237, "grad_norm": 2.100340041221499, "learning_rate": 1.2607630110450874e-06, "loss": 0.359, "step": 1490 }, { "epoch": 0.7947900053163212, "grad_norm": 2.2736377834909356, "learning_rate": 1.2301088196098332e-06, "loss": 0.3552, "step": 1495 }, { "epoch": 0.7974481658692185, "grad_norm": 2.1462090737136763, "learning_rate": 1.1997795394116802e-06, "loss": 0.3706, "step": 1500 }, { "epoch": 0.8001063264221159, "grad_norm": 2.330927480070287, "learning_rate": 1.1697777844051105e-06, "loss": 0.3634, "step": 1505 }, { "epoch": 0.8027644869750132, "grad_norm": 2.1032603761667343, "learning_rate": 1.140106140316572e-06, "loss": 0.3595, "step": 1510 }, { "epoch": 0.8054226475279107, "grad_norm": 2.279846255920788, "learning_rate": 1.1107671644216305e-06, "loss": 0.3538, "step": 1515 }, { "epoch": 0.8080808080808081, "grad_norm": 2.154018660092749, "learning_rate": 1.081763385324555e-06, "loss": 0.3631, "step": 1520 }, { "epoch": 0.8107389686337054, "grad_norm": 2.143933197813529, "learning_rate": 1.0530973027404073e-06, "loss": 0.3681, "step": 1525 }, { "epoch": 0.8133971291866029, "grad_norm": 2.1792844064843275, "learning_rate": 1.024771387279585e-06, "loss": 0.3486, "step": 1530 }, { "epoch": 0.8160552897395003, "grad_norm": 2.1425132944113607, "learning_rate": 9.967880802348989e-07, "loss": 0.3733, "step": 1535 }, { "epoch": 0.8187134502923976, "grad_norm": 2.1700552513537894, "learning_rate": 9.691497933711646e-07, "loss": 0.367, "step": 1540 }, { "epoch": 0.8213716108452951, "grad_norm": 2.277866930279838, "learning_rate": 9.418589087173441e-07, "loss": 0.3565, "step": 1545 }, { "epoch": 0.8240297713981924, "grad_norm": 2.0123435432750227, "learning_rate": 9.149177783612512e-07, "loss": 0.3565, "step": 1550 }, { "epoch": 0.8266879319510898, "grad_norm": 2.066692850970878, "learning_rate": 8.883287242468242e-07, "loss": 0.3694, "step": 1555 }, { "epoch": 0.8293460925039873, "grad_norm": 2.3197695805880376, "learning_rate": 8.620940379740245e-07, "loss": 0.3598, "step": 1560 }, { "epoch": 0.8320042530568846, "grad_norm": 1.948357495635851, "learning_rate": 8.362159806013176e-07, "loss": 0.3593, "step": 1565 }, { "epoch": 0.834662413609782, "grad_norm": 2.0700125518419745, "learning_rate": 8.106967824508061e-07, "loss": 0.3633, "step": 1570 }, { "epoch": 0.8373205741626795, "grad_norm": 1.9659833814171273, "learning_rate": 7.85538642916015e-07, "loss": 0.3582, "step": 1575 }, { "epoch": 0.8399787347155768, "grad_norm": 2.1056761901025483, "learning_rate": 7.607437302723247e-07, "loss": 0.3557, "step": 1580 }, { "epoch": 0.8426368952684742, "grad_norm": 2.3048662605616506, "learning_rate": 7.363141814901054e-07, "loss": 0.3483, "step": 1585 }, { "epoch": 0.8452950558213717, "grad_norm": 2.2968219659859708, "learning_rate": 7.122521020505302e-07, "loss": 0.3675, "step": 1590 }, { "epoch": 0.847953216374269, "grad_norm": 2.1004495243983423, "learning_rate": 6.885595657641214e-07, "loss": 0.351, "step": 1595 }, { "epoch": 0.8506113769271664, "grad_norm": 2.2866051769809315, "learning_rate": 6.652386145920181e-07, "loss": 0.3635, "step": 1600 }, { "epoch": 0.8532695374800638, "grad_norm": 2.118537926421423, "learning_rate": 6.422912584699753e-07, "loss": 0.3501, "step": 1605 }, { "epoch": 0.8559276980329612, "grad_norm": 2.016105032165951, "learning_rate": 6.197194751351543e-07, "loss": 0.3589, "step": 1610 }, { "epoch": 0.8585858585858586, "grad_norm": 2.0032229318591126, "learning_rate": 5.975252099556544e-07, "loss": 0.3627, "step": 1615 }, { "epoch": 0.861244019138756, "grad_norm": 2.191449628463197, "learning_rate": 5.757103757628573e-07, "loss": 0.3687, "step": 1620 }, { "epoch": 0.8639021796916534, "grad_norm": 1.9571952063843965, "learning_rate": 5.542768526865678e-07, "loss": 0.3573, "step": 1625 }, { "epoch": 0.8665603402445508, "grad_norm": 2.1555005088207846, "learning_rate": 5.332264879929682e-07, "loss": 0.3607, "step": 1630 }, { "epoch": 0.8692185007974481, "grad_norm": 2.312792401325662, "learning_rate": 5.125610959254213e-07, "loss": 0.3496, "step": 1635 }, { "epoch": 0.8718766613503456, "grad_norm": 2.1171904493372318, "learning_rate": 4.9228245754809e-07, "loss": 0.362, "step": 1640 }, { "epoch": 0.8745348219032429, "grad_norm": 2.1733472639430595, "learning_rate": 4.723923205924558e-07, "loss": 0.3604, "step": 1645 }, { "epoch": 0.8771929824561403, "grad_norm": 2.175043851294071, "learning_rate": 4.5289239930667304e-07, "loss": 0.3657, "step": 1650 }, { "epoch": 0.8798511430090378, "grad_norm": 2.368596065401823, "learning_rate": 4.3378437430783294e-07, "loss": 0.3629, "step": 1655 }, { "epoch": 0.8825093035619351, "grad_norm": 2.181916703721701, "learning_rate": 4.15069892437116e-07, "loss": 0.3603, "step": 1660 }, { "epoch": 0.8851674641148325, "grad_norm": 1.9884410794596379, "learning_rate": 3.9675056661785563e-07, "loss": 0.3483, "step": 1665 }, { "epoch": 0.88782562466773, "grad_norm": 2.030195451894179, "learning_rate": 3.7882797571653473e-07, "loss": 0.3602, "step": 1670 }, { "epoch": 0.8904837852206273, "grad_norm": 2.2030014882431312, "learning_rate": 3.6130366440669693e-07, "loss": 0.3452, "step": 1675 }, { "epoch": 0.8931419457735247, "grad_norm": 2.1504915530455424, "learning_rate": 3.441791430358299e-07, "loss": 0.3517, "step": 1680 }, { "epoch": 0.8958001063264222, "grad_norm": 2.0548225206052186, "learning_rate": 3.2745588749518775e-07, "loss": 0.3673, "step": 1685 }, { "epoch": 0.8984582668793195, "grad_norm": 2.0116181749449753, "learning_rate": 3.111353390925925e-07, "loss": 0.354, "step": 1690 }, { "epoch": 0.9011164274322169, "grad_norm": 2.2407951496611584, "learning_rate": 2.9521890442821276e-07, "loss": 0.356, "step": 1695 }, { "epoch": 0.9037745879851143, "grad_norm": 2.036787818054975, "learning_rate": 2.7970795527333563e-07, "loss": 0.3551, "step": 1700 }, { "epoch": 0.9064327485380117, "grad_norm": 1.8662143528967177, "learning_rate": 2.6460382845214125e-07, "loss": 0.3473, "step": 1705 }, { "epoch": 0.9090909090909091, "grad_norm": 1.9521145818680103, "learning_rate": 2.4990782572647977e-07, "loss": 0.3498, "step": 1710 }, { "epoch": 0.9117490696438065, "grad_norm": 2.0807157162150367, "learning_rate": 2.356212136836894e-07, "loss": 0.3526, "step": 1715 }, { "epoch": 0.9144072301967039, "grad_norm": 2.142229762626437, "learning_rate": 2.2174522362742647e-07, "loss": 0.3562, "step": 1720 }, { "epoch": 0.9170653907496013, "grad_norm": 2.094304223215669, "learning_rate": 2.0828105147154275e-07, "loss": 0.3467, "step": 1725 }, { "epoch": 0.9197235513024987, "grad_norm": 2.1191691095904246, "learning_rate": 1.952298576370254e-07, "loss": 0.3573, "step": 1730 }, { "epoch": 0.9223817118553961, "grad_norm": 2.2074162527799372, "learning_rate": 1.825927669519728e-07, "loss": 0.3519, "step": 1735 }, { "epoch": 0.9250398724082934, "grad_norm": 2.1885606423221016, "learning_rate": 1.7037086855465902e-07, "loss": 0.3444, "step": 1740 }, { "epoch": 0.9276980329611909, "grad_norm": 2.197465057188687, "learning_rate": 1.5856521579965866e-07, "loss": 0.3417, "step": 1745 }, { "epoch": 0.9303561935140883, "grad_norm": 2.1155552965012694, "learning_rate": 1.4717682616706917e-07, "loss": 0.3628, "step": 1750 }, { "epoch": 0.9330143540669856, "grad_norm": 2.0604186286349138, "learning_rate": 1.3620668117481471e-07, "loss": 0.3621, "step": 1755 }, { "epoch": 0.935672514619883, "grad_norm": 2.0894295505567664, "learning_rate": 1.256557262940522e-07, "loss": 0.3591, "step": 1760 }, { "epoch": 0.9383306751727805, "grad_norm": 2.2532084612884464, "learning_rate": 1.1552487086768871e-07, "loss": 0.3466, "step": 1765 }, { "epoch": 0.9409888357256778, "grad_norm": 1.9966735008414798, "learning_rate": 1.0581498803200696e-07, "loss": 0.3501, "step": 1770 }, { "epoch": 0.9436469962785752, "grad_norm": 2.1187544981067195, "learning_rate": 9.652691464141273e-08, "loss": 0.344, "step": 1775 }, { "epoch": 0.9463051568314726, "grad_norm": 2.030559494863266, "learning_rate": 8.766145119630976e-08, "loss": 0.3588, "step": 1780 }, { "epoch": 0.94896331738437, "grad_norm": 2.087686270810844, "learning_rate": 7.921936177411049e-08, "loss": 0.3419, "step": 1785 }, { "epoch": 0.9516214779372674, "grad_norm": 1.9460600257508178, "learning_rate": 7.1201373963381e-08, "loss": 0.346, "step": 1790 }, { "epoch": 0.9542796384901648, "grad_norm": 2.1336394767825855, "learning_rate": 6.360817880113335e-08, "loss": 0.3499, "step": 1795 }, { "epoch": 0.9569377990430622, "grad_norm": 2.27235436719128, "learning_rate": 5.6440430713269325e-08, "loss": 0.353, "step": 1800 }, { "epoch": 0.9595959595959596, "grad_norm": 2.244896225281087, "learning_rate": 4.9698747458176714e-08, "loss": 0.35, "step": 1805 }, { "epoch": 0.962254120148857, "grad_norm": 2.065314709776108, "learning_rate": 4.338371007348852e-08, "loss": 0.3459, "step": 1810 }, { "epoch": 0.9649122807017544, "grad_norm": 2.09774223772813, "learning_rate": 3.749586282600359e-08, "loss": 0.3482, "step": 1815 }, { "epoch": 0.9675704412546517, "grad_norm": 2.1010041159494173, "learning_rate": 3.2035713164781936e-08, "loss": 0.3392, "step": 1820 }, { "epoch": 0.9702286018075492, "grad_norm": 2.183490505896793, "learning_rate": 2.700373167740744e-08, "loss": 0.3423, "step": 1825 }, { "epoch": 0.9728867623604466, "grad_norm": 2.03759593582233, "learning_rate": 2.2400352049429807e-08, "loss": 0.345, "step": 1830 }, { "epoch": 0.9755449229133439, "grad_norm": 2.1153372258468606, "learning_rate": 1.8225971026987755e-08, "loss": 0.3511, "step": 1835 }, { "epoch": 0.9782030834662414, "grad_norm": 1.9483951001160797, "learning_rate": 1.4480948382615267e-08, "loss": 0.3448, "step": 1840 }, { "epoch": 0.9808612440191388, "grad_norm": 1.8908522932363727, "learning_rate": 1.1165606884234182e-08, "loss": 0.3559, "step": 1845 }, { "epoch": 0.9835194045720361, "grad_norm": 2.3270461293558475, "learning_rate": 8.280232267334765e-09, "loss": 0.3434, "step": 1850 }, { "epoch": 0.9861775651249336, "grad_norm": 1.9237250529226086, "learning_rate": 5.825073210352084e-09, "loss": 0.349, "step": 1855 }, { "epoch": 0.988835725677831, "grad_norm": 2.0893502612570267, "learning_rate": 3.8003413132309265e-09, "loss": 0.3402, "step": 1860 }, { "epoch": 0.9914938862307283, "grad_norm": 2.06568498566462, "learning_rate": 2.20621107918928e-09, "loss": 0.3391, "step": 1865 }, { "epoch": 0.9941520467836257, "grad_norm": 2.1868236662669154, "learning_rate": 1.0428198996814688e-09, "loss": 0.3456, "step": 1870 }, { "epoch": 0.9968102073365231, "grad_norm": 2.34269464712247, "learning_rate": 3.1026804255207544e-10, "loss": 0.3501, "step": 1875 }, { "epoch": 0.9994683678894205, "grad_norm": 2.1579164945148563, "learning_rate": 8.618643398405902e-12, "loss": 0.3623, "step": 1880 }, { "epoch": 1.0, "eval_runtime": 3.3807, "eval_samples_per_second": 2.958, "eval_steps_per_second": 0.887, "step": 1881 }, { "epoch": 1.0, "step": 1881, "total_flos": 196921566167040.0, "train_loss": 0.5133170171946303, "train_runtime": 16745.7461, "train_samples_per_second": 1.797, "train_steps_per_second": 0.112 } ], "logging_steps": 5, "max_steps": 1881, "num_input_tokens_seen": 0, "num_train_epochs": 1, "save_steps": 100, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": true }, "attributes": {} } }, "total_flos": 196921566167040.0, "train_batch_size": 4, "trial_name": null, "trial_params": null }