| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.11602740031684405, | |
| "eval_steps": 200.0, | |
| "global_step": 2600, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 4.4625923198786177e-05, | |
| "grad_norm": 0.38751721382141113, | |
| "learning_rate": 2.677376171352075e-09, | |
| "loss": 5.556500434875488, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.00022312961599393088, | |
| "grad_norm": 0.37884023785591125, | |
| "learning_rate": 1.3386880856760375e-08, | |
| "loss": 5.5404791831970215, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.00044625923198786177, | |
| "grad_norm": 0.38282614946365356, | |
| "learning_rate": 2.677376171352075e-08, | |
| "loss": 5.552686309814453, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0006693888479817927, | |
| "grad_norm": 0.3849342167377472, | |
| "learning_rate": 4.016064257028113e-08, | |
| "loss": 5.491200637817383, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.0008925184639757235, | |
| "grad_norm": 0.3901270925998688, | |
| "learning_rate": 5.35475234270415e-08, | |
| "loss": 5.454954147338867, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0011156480799696543, | |
| "grad_norm": 0.38169801235198975, | |
| "learning_rate": 6.693440428380188e-08, | |
| "loss": 5.510653686523438, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.0013387776959635854, | |
| "grad_norm": 0.37212520837783813, | |
| "learning_rate": 8.032128514056226e-08, | |
| "loss": 5.510681915283203, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.0015619073119575162, | |
| "grad_norm": 0.37682151794433594, | |
| "learning_rate": 9.370816599732262e-08, | |
| "loss": 5.651955413818359, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.001785036927951447, | |
| "grad_norm": 0.4001241624355316, | |
| "learning_rate": 1.07095046854083e-07, | |
| "loss": 5.452754211425781, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.002008166543945378, | |
| "grad_norm": 0.405489057302475, | |
| "learning_rate": 1.2048192771084337e-07, | |
| "loss": 5.520745849609375, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.0022312961599393086, | |
| "grad_norm": 0.39249682426452637, | |
| "learning_rate": 1.3386880856760375e-07, | |
| "loss": 5.529154205322266, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.0024544257759332396, | |
| "grad_norm": 0.3897538483142853, | |
| "learning_rate": 1.4725568942436413e-07, | |
| "loss": 5.533833312988281, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.0026775553919271707, | |
| "grad_norm": 0.3601232171058655, | |
| "learning_rate": 1.606425702811245e-07, | |
| "loss": 5.433098220825196, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0029006850079211013, | |
| "grad_norm": 0.3851630985736847, | |
| "learning_rate": 1.7402945113788486e-07, | |
| "loss": 5.608365631103515, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.0031238146239150324, | |
| "grad_norm": 0.37249451875686646, | |
| "learning_rate": 1.8741633199464525e-07, | |
| "loss": 5.5082344055175785, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.003346944239908963, | |
| "grad_norm": 0.37606358528137207, | |
| "learning_rate": 2.0080321285140563e-07, | |
| "loss": 5.465315246582032, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.003570073855902894, | |
| "grad_norm": 0.3777235448360443, | |
| "learning_rate": 2.14190093708166e-07, | |
| "loss": 5.494733428955078, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.0037932034718968248, | |
| "grad_norm": 0.3441930115222931, | |
| "learning_rate": 2.2757697456492636e-07, | |
| "loss": 5.509701538085937, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.004016333087890756, | |
| "grad_norm": 0.41749539971351624, | |
| "learning_rate": 2.4096385542168674e-07, | |
| "loss": 5.484718704223633, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.004239462703884687, | |
| "grad_norm": 0.426409512758255, | |
| "learning_rate": 2.543507362784471e-07, | |
| "loss": 5.485423278808594, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.004462592319878617, | |
| "grad_norm": 0.3890352249145508, | |
| "learning_rate": 2.677376171352075e-07, | |
| "loss": 5.415613174438477, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.004685721935872548, | |
| "grad_norm": 0.3934517502784729, | |
| "learning_rate": 2.811244979919679e-07, | |
| "loss": 5.502951049804688, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.004908851551866479, | |
| "grad_norm": 0.418043315410614, | |
| "learning_rate": 2.9451137884872826e-07, | |
| "loss": 5.533256149291992, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.00513198116786041, | |
| "grad_norm": 0.38715749979019165, | |
| "learning_rate": 3.078982597054886e-07, | |
| "loss": 5.538555908203125, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.005355110783854341, | |
| "grad_norm": 0.38706645369529724, | |
| "learning_rate": 3.21285140562249e-07, | |
| "loss": 5.495933151245117, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.005578240399848272, | |
| "grad_norm": 0.37968552112579346, | |
| "learning_rate": 3.346720214190094e-07, | |
| "loss": 5.507209014892578, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.005801370015842203, | |
| "grad_norm": 0.37994489073753357, | |
| "learning_rate": 3.4805890227576973e-07, | |
| "loss": 5.505420684814453, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.006024499631836134, | |
| "grad_norm": 0.37164536118507385, | |
| "learning_rate": 3.614457831325301e-07, | |
| "loss": 5.491921615600586, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.006247629247830065, | |
| "grad_norm": 0.3722690939903259, | |
| "learning_rate": 3.748326639892905e-07, | |
| "loss": 5.4893798828125, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.006470758863823995, | |
| "grad_norm": 0.37924614548683167, | |
| "learning_rate": 3.882195448460509e-07, | |
| "loss": 5.498228073120117, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.006693888479817926, | |
| "grad_norm": 0.4063311517238617, | |
| "learning_rate": 4.0160642570281125e-07, | |
| "loss": 5.492959213256836, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.006917018095811857, | |
| "grad_norm": 0.40316569805145264, | |
| "learning_rate": 4.149933065595716e-07, | |
| "loss": 5.330196762084961, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.007140147711805788, | |
| "grad_norm": 0.38558679819107056, | |
| "learning_rate": 4.28380187416332e-07, | |
| "loss": 5.483318328857422, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.0073632773277997185, | |
| "grad_norm": 0.3990952968597412, | |
| "learning_rate": 4.417670682730924e-07, | |
| "loss": 5.4153087615966795, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.0075864069437936495, | |
| "grad_norm": 0.41763484477996826, | |
| "learning_rate": 4.551539491298527e-07, | |
| "loss": 5.46466178894043, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.007809536559787581, | |
| "grad_norm": 0.41940048336982727, | |
| "learning_rate": 4.6854082998661315e-07, | |
| "loss": 5.480670166015625, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.008032666175781512, | |
| "grad_norm": 0.4029385447502136, | |
| "learning_rate": 4.819277108433735e-07, | |
| "loss": 5.44397201538086, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.008255795791775442, | |
| "grad_norm": 0.4002819061279297, | |
| "learning_rate": 4.953145917001339e-07, | |
| "loss": 5.355976867675781, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.008478925407769374, | |
| "grad_norm": 0.41026008129119873, | |
| "learning_rate": 5.087014725568942e-07, | |
| "loss": 5.4827728271484375, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.008702055023763304, | |
| "grad_norm": 0.4057953655719757, | |
| "learning_rate": 5.220883534136546e-07, | |
| "loss": 5.4723457336425785, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.008925184639757234, | |
| "grad_norm": 0.39414113759994507, | |
| "learning_rate": 5.35475234270415e-07, | |
| "loss": 5.458871841430664, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.009148314255751166, | |
| "grad_norm": 0.40346527099609375, | |
| "learning_rate": 5.488621151271754e-07, | |
| "loss": 5.332700729370117, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.009371443871745096, | |
| "grad_norm": 0.4204488694667816, | |
| "learning_rate": 5.622489959839358e-07, | |
| "loss": 5.45002326965332, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.009594573487739028, | |
| "grad_norm": 0.4042298495769501, | |
| "learning_rate": 5.756358768406961e-07, | |
| "loss": 5.469032287597656, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.009817703103732959, | |
| "grad_norm": 0.42059728503227234, | |
| "learning_rate": 5.890227576974565e-07, | |
| "loss": 5.416038513183594, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.010040832719726889, | |
| "grad_norm": 0.4257930517196655, | |
| "learning_rate": 6.024096385542169e-07, | |
| "loss": 5.365918731689453, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.01026396233572082, | |
| "grad_norm": 0.4297005534172058, | |
| "learning_rate": 6.157965194109772e-07, | |
| "loss": 5.386648941040039, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.010487091951714751, | |
| "grad_norm": 0.44034355878829956, | |
| "learning_rate": 6.291834002677377e-07, | |
| "loss": 5.4175865173339846, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.010710221567708683, | |
| "grad_norm": 0.4148600995540619, | |
| "learning_rate": 6.42570281124498e-07, | |
| "loss": 5.456137084960938, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.010933351183702613, | |
| "grad_norm": 0.3927938938140869, | |
| "learning_rate": 6.559571619812583e-07, | |
| "loss": 5.421617889404297, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.011156480799696543, | |
| "grad_norm": 0.46364349126815796, | |
| "learning_rate": 6.693440428380188e-07, | |
| "loss": 5.263519287109375, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.011379610415690475, | |
| "grad_norm": 0.4380209147930145, | |
| "learning_rate": 6.827309236947791e-07, | |
| "loss": 5.4163257598876955, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.011602740031684405, | |
| "grad_norm": 0.44802144169807434, | |
| "learning_rate": 6.961178045515395e-07, | |
| "loss": 5.307229232788086, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.011825869647678336, | |
| "grad_norm": 0.48721882700920105, | |
| "learning_rate": 7.095046854082999e-07, | |
| "loss": 5.361153030395508, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.012048999263672268, | |
| "grad_norm": 0.4634721875190735, | |
| "learning_rate": 7.228915662650602e-07, | |
| "loss": 5.335968017578125, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.012272128879666198, | |
| "grad_norm": 0.44632282853126526, | |
| "learning_rate": 7.362784471218206e-07, | |
| "loss": 5.377736282348633, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.01249525849566013, | |
| "grad_norm": 0.46110817790031433, | |
| "learning_rate": 7.49665327978581e-07, | |
| "loss": 5.379433059692383, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.01271838811165406, | |
| "grad_norm": 0.4558963477611542, | |
| "learning_rate": 7.630522088353414e-07, | |
| "loss": 5.252085876464844, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.01294151772764799, | |
| "grad_norm": 0.4590746760368347, | |
| "learning_rate": 7.764390896921018e-07, | |
| "loss": 5.320807647705078, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.013164647343641922, | |
| "grad_norm": 0.458065927028656, | |
| "learning_rate": 7.898259705488621e-07, | |
| "loss": 5.342620086669922, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.013387776959635852, | |
| "grad_norm": 0.47713929414749146, | |
| "learning_rate": 8.032128514056225e-07, | |
| "loss": 5.309723281860352, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.013610906575629784, | |
| "grad_norm": 0.4549182653427124, | |
| "learning_rate": 8.16599732262383e-07, | |
| "loss": 5.263495635986328, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.013834036191623714, | |
| "grad_norm": 0.45380067825317383, | |
| "learning_rate": 8.299866131191432e-07, | |
| "loss": 5.270233535766602, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.014057165807617645, | |
| "grad_norm": 0.4339616894721985, | |
| "learning_rate": 8.433734939759036e-07, | |
| "loss": 5.273446655273437, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.014280295423611577, | |
| "grad_norm": 0.44467630982398987, | |
| "learning_rate": 8.56760374832664e-07, | |
| "loss": 5.210543823242188, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.014503425039605507, | |
| "grad_norm": 0.4964953064918518, | |
| "learning_rate": 8.701472556894243e-07, | |
| "loss": 5.216452026367188, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.014726554655599437, | |
| "grad_norm": 0.4745877683162689, | |
| "learning_rate": 8.835341365461848e-07, | |
| "loss": 5.237066650390625, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.014949684271593369, | |
| "grad_norm": 0.4923485815525055, | |
| "learning_rate": 8.969210174029452e-07, | |
| "loss": 5.16167106628418, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.015172813887587299, | |
| "grad_norm": 0.45548155903816223, | |
| "learning_rate": 9.103078982597054e-07, | |
| "loss": 5.184793090820312, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.015395943503581231, | |
| "grad_norm": 0.5323435068130493, | |
| "learning_rate": 9.236947791164659e-07, | |
| "loss": 5.1162841796875, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.015619073119575161, | |
| "grad_norm": 0.5220831036567688, | |
| "learning_rate": 9.370816599732263e-07, | |
| "loss": 5.135643005371094, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.01584220273556909, | |
| "grad_norm": 0.5357551574707031, | |
| "learning_rate": 9.504685408299866e-07, | |
| "loss": 5.001235580444336, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.016065332351563023, | |
| "grad_norm": 0.49958014488220215, | |
| "learning_rate": 9.63855421686747e-07, | |
| "loss": 5.096822357177734, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.016288461967556955, | |
| "grad_norm": 0.53453129529953, | |
| "learning_rate": 9.772423025435074e-07, | |
| "loss": 5.057343673706055, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.016511591583550884, | |
| "grad_norm": 0.49969372153282166, | |
| "learning_rate": 9.906291834002677e-07, | |
| "loss": 5.097013092041015, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.016734721199544816, | |
| "grad_norm": 0.477762907743454, | |
| "learning_rate": 1.0040160642570282e-06, | |
| "loss": 5.05908317565918, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.016957850815538748, | |
| "grad_norm": 0.5632966756820679, | |
| "learning_rate": 1.0174029451137885e-06, | |
| "loss": 4.880051040649414, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.017180980431532676, | |
| "grad_norm": 0.4806019961833954, | |
| "learning_rate": 1.0307898259705488e-06, | |
| "loss": 4.938255310058594, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.017404110047526608, | |
| "grad_norm": 0.5882935523986816, | |
| "learning_rate": 1.0441767068273092e-06, | |
| "loss": 4.972189331054688, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.01762723966352054, | |
| "grad_norm": 0.583991289138794, | |
| "learning_rate": 1.0575635876840697e-06, | |
| "loss": 4.8074909210205075, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.01785036927951447, | |
| "grad_norm": 0.6130750179290771, | |
| "learning_rate": 1.07095046854083e-06, | |
| "loss": 4.851003265380859, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.0180734988955084, | |
| "grad_norm": 0.630670428276062, | |
| "learning_rate": 1.0843373493975905e-06, | |
| "loss": 4.742978286743164, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.018296628511502332, | |
| "grad_norm": 0.6021527647972107, | |
| "learning_rate": 1.0977242302543508e-06, | |
| "loss": 4.795417022705078, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.018519758127496264, | |
| "grad_norm": 0.6027496457099915, | |
| "learning_rate": 1.111111111111111e-06, | |
| "loss": 4.799094009399414, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.018742887743490193, | |
| "grad_norm": 0.62542325258255, | |
| "learning_rate": 1.1244979919678715e-06, | |
| "loss": 4.734822845458984, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.018966017359484125, | |
| "grad_norm": 0.6744217872619629, | |
| "learning_rate": 1.137884872824632e-06, | |
| "loss": 4.470468902587891, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.019189146975478057, | |
| "grad_norm": 0.6994063854217529, | |
| "learning_rate": 1.1512717536813923e-06, | |
| "loss": 4.7334716796875, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.019412276591471985, | |
| "grad_norm": 0.6043664813041687, | |
| "learning_rate": 1.1646586345381526e-06, | |
| "loss": 4.6335186004638675, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.019635406207465917, | |
| "grad_norm": 0.600983738899231, | |
| "learning_rate": 1.178045515394913e-06, | |
| "loss": 4.521899795532226, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.01985853582345985, | |
| "grad_norm": 0.632734477519989, | |
| "learning_rate": 1.1914323962516733e-06, | |
| "loss": 4.455641555786133, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.020081665439453777, | |
| "grad_norm": 0.6841812133789062, | |
| "learning_rate": 1.2048192771084338e-06, | |
| "loss": 4.552269363403321, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.02030479505544771, | |
| "grad_norm": 0.7014154195785522, | |
| "learning_rate": 1.2182061579651943e-06, | |
| "loss": 4.4612682342529295, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.02052792467144164, | |
| "grad_norm": 0.7957334518432617, | |
| "learning_rate": 1.2315930388219544e-06, | |
| "loss": 4.16539421081543, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.02075105428743557, | |
| "grad_norm": 0.6679075360298157, | |
| "learning_rate": 1.2449799196787148e-06, | |
| "loss": 4.380799102783203, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.020974183903429502, | |
| "grad_norm": 0.6911934614181519, | |
| "learning_rate": 1.2583668005354753e-06, | |
| "loss": 4.188001251220703, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.021197313519423434, | |
| "grad_norm": 0.6941277384757996, | |
| "learning_rate": 1.2717536813922356e-06, | |
| "loss": 4.30323600769043, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.021420443135417366, | |
| "grad_norm": 0.6411834359169006, | |
| "learning_rate": 1.285140562248996e-06, | |
| "loss": 4.210566711425781, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.021643572751411294, | |
| "grad_norm": 0.6447572112083435, | |
| "learning_rate": 1.2985274431057564e-06, | |
| "loss": 4.145897674560547, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.021866702367405226, | |
| "grad_norm": 0.6956306099891663, | |
| "learning_rate": 1.3119143239625166e-06, | |
| "loss": 4.1259113311767575, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.022089831983399158, | |
| "grad_norm": 0.6369218826293945, | |
| "learning_rate": 1.3253012048192771e-06, | |
| "loss": 4.050044250488281, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.022312961599393086, | |
| "grad_norm": 0.6199172735214233, | |
| "learning_rate": 1.3386880856760376e-06, | |
| "loss": 3.9885379791259767, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.02253609121538702, | |
| "grad_norm": 0.6004146933555603, | |
| "learning_rate": 1.3520749665327979e-06, | |
| "loss": 3.9876686096191407, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.02275922083138095, | |
| "grad_norm": 0.6573876142501831, | |
| "learning_rate": 1.3654618473895582e-06, | |
| "loss": 3.860630416870117, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.02298235044737488, | |
| "grad_norm": 0.6570749878883362, | |
| "learning_rate": 1.3788487282463186e-06, | |
| "loss": 3.987852096557617, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.02320548006336881, | |
| "grad_norm": 0.6052077412605286, | |
| "learning_rate": 1.392235609103079e-06, | |
| "loss": 3.773631286621094, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.023428609679362743, | |
| "grad_norm": 0.6024802923202515, | |
| "learning_rate": 1.4056224899598394e-06, | |
| "loss": 3.8334468841552733, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.02365173929535667, | |
| "grad_norm": 0.6448367238044739, | |
| "learning_rate": 1.4190093708165999e-06, | |
| "loss": 3.709330749511719, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.023874868911350603, | |
| "grad_norm": 0.5610556602478027, | |
| "learning_rate": 1.4323962516733602e-06, | |
| "loss": 3.608018493652344, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.024097998527344535, | |
| "grad_norm": 0.5798413157463074, | |
| "learning_rate": 1.4457831325301204e-06, | |
| "loss": 3.513960266113281, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.024321128143338467, | |
| "grad_norm": 0.5836862921714783, | |
| "learning_rate": 1.459170013386881e-06, | |
| "loss": 3.5928466796875, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.024544257759332395, | |
| "grad_norm": 0.5705676078796387, | |
| "learning_rate": 1.4725568942436412e-06, | |
| "loss": 3.5890869140625, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.024767387375326327, | |
| "grad_norm": 0.6377549767494202, | |
| "learning_rate": 1.4859437751004017e-06, | |
| "loss": 3.6042083740234374, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.02499051699132026, | |
| "grad_norm": 0.5612762570381165, | |
| "learning_rate": 1.499330655957162e-06, | |
| "loss": 3.511037826538086, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.025213646607314188, | |
| "grad_norm": 0.5943495035171509, | |
| "learning_rate": 1.5127175368139222e-06, | |
| "loss": 3.5007259368896486, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.02543677622330812, | |
| "grad_norm": 0.5872607827186584, | |
| "learning_rate": 1.5261044176706827e-06, | |
| "loss": 3.3742324829101564, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.02565990583930205, | |
| "grad_norm": 0.5494210124015808, | |
| "learning_rate": 1.5394912985274432e-06, | |
| "loss": 3.3373321533203124, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.02588303545529598, | |
| "grad_norm": 0.5138423442840576, | |
| "learning_rate": 1.5528781793842037e-06, | |
| "loss": 3.346783447265625, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.026106165071289912, | |
| "grad_norm": 0.5539407730102539, | |
| "learning_rate": 1.566265060240964e-06, | |
| "loss": 3.183103561401367, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.026329294687283844, | |
| "grad_norm": 0.5888996720314026, | |
| "learning_rate": 1.5796519410977242e-06, | |
| "loss": 3.357350540161133, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.026552424303277773, | |
| "grad_norm": 0.5445035696029663, | |
| "learning_rate": 1.5930388219544845e-06, | |
| "loss": 3.374500274658203, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.026775553919271704, | |
| "grad_norm": 0.5013962388038635, | |
| "learning_rate": 1.606425702811245e-06, | |
| "loss": 3.3687782287597656, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.026998683535265636, | |
| "grad_norm": 0.5150508284568787, | |
| "learning_rate": 1.6198125836680055e-06, | |
| "loss": 3.2590232849121095, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.02722181315125957, | |
| "grad_norm": 0.603961169719696, | |
| "learning_rate": 1.633199464524766e-06, | |
| "loss": 3.1635900497436524, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.027444942767253497, | |
| "grad_norm": 0.5365360975265503, | |
| "learning_rate": 1.6465863453815263e-06, | |
| "loss": 3.3600418090820314, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.02766807238324743, | |
| "grad_norm": 0.5401721000671387, | |
| "learning_rate": 1.6599732262382863e-06, | |
| "loss": 3.183880424499512, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.02789120199924136, | |
| "grad_norm": 0.5595034956932068, | |
| "learning_rate": 1.6733601070950468e-06, | |
| "loss": 3.0762868881225587, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.02811433161523529, | |
| "grad_norm": 0.4972860813140869, | |
| "learning_rate": 1.6867469879518073e-06, | |
| "loss": 3.0724456787109373, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.02833746123122922, | |
| "grad_norm": 0.44132477045059204, | |
| "learning_rate": 1.7001338688085678e-06, | |
| "loss": 3.1175127029418945, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.028560590847223153, | |
| "grad_norm": 0.5078781843185425, | |
| "learning_rate": 1.713520749665328e-06, | |
| "loss": 3.0849445343017576, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.02878372046321708, | |
| "grad_norm": 0.4757557809352875, | |
| "learning_rate": 1.7269076305220885e-06, | |
| "loss": 3.2029571533203125, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.029006850079211013, | |
| "grad_norm": 0.4932602345943451, | |
| "learning_rate": 1.7402945113788486e-06, | |
| "loss": 3.0668067932128906, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.029229979695204945, | |
| "grad_norm": 0.5687287449836731, | |
| "learning_rate": 1.753681392235609e-06, | |
| "loss": 3.113470268249512, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.029453109311198874, | |
| "grad_norm": 0.4471919536590576, | |
| "learning_rate": 1.7670682730923696e-06, | |
| "loss": 2.9105451583862303, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.029676238927192806, | |
| "grad_norm": 0.5096343159675598, | |
| "learning_rate": 1.7804551539491298e-06, | |
| "loss": 2.94341926574707, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.029899368543186738, | |
| "grad_norm": 0.5599822402000427, | |
| "learning_rate": 1.7938420348058903e-06, | |
| "loss": 2.892704391479492, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.03012249815918067, | |
| "grad_norm": 0.40055856108665466, | |
| "learning_rate": 1.8072289156626508e-06, | |
| "loss": 2.896807861328125, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.030345627775174598, | |
| "grad_norm": 0.47082021832466125, | |
| "learning_rate": 1.8206157965194109e-06, | |
| "loss": 2.7966148376464846, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.03056875739116853, | |
| "grad_norm": 0.42042815685272217, | |
| "learning_rate": 1.8340026773761714e-06, | |
| "loss": 2.839730644226074, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.030791887007162462, | |
| "grad_norm": 0.49198001623153687, | |
| "learning_rate": 1.8473895582329318e-06, | |
| "loss": 2.9913705825805663, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.03101501662315639, | |
| "grad_norm": 0.4656424820423126, | |
| "learning_rate": 1.8607764390896921e-06, | |
| "loss": 3.0195621490478515, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.031238146239150322, | |
| "grad_norm": 0.46259376406669617, | |
| "learning_rate": 1.8741633199464526e-06, | |
| "loss": 2.948585319519043, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.03146127585514425, | |
| "grad_norm": 0.4671732783317566, | |
| "learning_rate": 1.887550200803213e-06, | |
| "loss": 2.7285741806030273, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.03168440547113818, | |
| "grad_norm": 0.6493679285049438, | |
| "learning_rate": 1.9009370816599732e-06, | |
| "loss": 2.9263893127441407, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.031907535087132115, | |
| "grad_norm": 0.46389710903167725, | |
| "learning_rate": 1.9143239625167336e-06, | |
| "loss": 2.634868049621582, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.03213066470312605, | |
| "grad_norm": 0.44173645973205566, | |
| "learning_rate": 1.927710843373494e-06, | |
| "loss": 2.7959117889404297, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.03235379431911998, | |
| "grad_norm": 0.45440003275871277, | |
| "learning_rate": 1.9410977242302546e-06, | |
| "loss": 2.9056529998779297, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.03257692393511391, | |
| "grad_norm": 0.44541046023368835, | |
| "learning_rate": 1.954484605087015e-06, | |
| "loss": 2.7214975357055664, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.032800053551107836, | |
| "grad_norm": 0.4217804968357086, | |
| "learning_rate": 1.967871485943775e-06, | |
| "loss": 2.7198978424072267, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.03302318316710177, | |
| "grad_norm": 0.46086645126342773, | |
| "learning_rate": 1.9812583668005354e-06, | |
| "loss": 2.7366127014160155, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.0332463127830957, | |
| "grad_norm": 0.41817471385002136, | |
| "learning_rate": 1.9946452476572957e-06, | |
| "loss": 2.857570457458496, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.03346944239908963, | |
| "grad_norm": 0.44163694977760315, | |
| "learning_rate": 2.0080321285140564e-06, | |
| "loss": 2.8251710891723634, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.03369257201508356, | |
| "grad_norm": 0.46645739674568176, | |
| "learning_rate": 2.0214190093708167e-06, | |
| "loss": 2.548258399963379, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.033915701631077495, | |
| "grad_norm": 0.3971084654331207, | |
| "learning_rate": 2.034805890227577e-06, | |
| "loss": 2.682722473144531, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.03413883124707143, | |
| "grad_norm": 0.5373950004577637, | |
| "learning_rate": 2.0481927710843377e-06, | |
| "loss": 2.8539506912231447, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.03436196086306535, | |
| "grad_norm": 0.6133913397789001, | |
| "learning_rate": 2.0615796519410975e-06, | |
| "loss": 2.7016387939453126, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.034585090479059284, | |
| "grad_norm": 0.5247603058815002, | |
| "learning_rate": 2.074966532797858e-06, | |
| "loss": 2.645602226257324, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.034808220095053216, | |
| "grad_norm": 0.4487764537334442, | |
| "learning_rate": 2.0883534136546185e-06, | |
| "loss": 2.699404716491699, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.03503134971104715, | |
| "grad_norm": 0.5962596535682678, | |
| "learning_rate": 2.1017402945113788e-06, | |
| "loss": 2.7891347885131834, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.03525447932704108, | |
| "grad_norm": 0.5410242080688477, | |
| "learning_rate": 2.1151271753681395e-06, | |
| "loss": 2.675504684448242, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.03547760894303501, | |
| "grad_norm": 0.4576985239982605, | |
| "learning_rate": 2.1285140562248997e-06, | |
| "loss": 2.642281341552734, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.03570073855902894, | |
| "grad_norm": 0.45449239015579224, | |
| "learning_rate": 2.14190093708166e-06, | |
| "loss": 2.7088804244995117, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.03592386817502287, | |
| "grad_norm": 0.4782140552997589, | |
| "learning_rate": 2.1552878179384203e-06, | |
| "loss": 2.698718452453613, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.0361469977910168, | |
| "grad_norm": 0.43745940923690796, | |
| "learning_rate": 2.168674698795181e-06, | |
| "loss": 2.7042917251586913, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.03637012740701073, | |
| "grad_norm": 0.6170194149017334, | |
| "learning_rate": 2.1820615796519413e-06, | |
| "loss": 2.6548912048339846, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.036593257023004665, | |
| "grad_norm": 0.5126772522926331, | |
| "learning_rate": 2.1954484605087015e-06, | |
| "loss": 2.663498306274414, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.0368163866389986, | |
| "grad_norm": 0.42503371834754944, | |
| "learning_rate": 2.2088353413654622e-06, | |
| "loss": 2.766114616394043, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.03703951625499253, | |
| "grad_norm": 0.5707629323005676, | |
| "learning_rate": 2.222222222222222e-06, | |
| "loss": 2.6172439575195314, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.037262645870986454, | |
| "grad_norm": 0.45169416069984436, | |
| "learning_rate": 2.2356091030789828e-06, | |
| "loss": 2.5655603408813477, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.037485775486980386, | |
| "grad_norm": 0.44726496934890747, | |
| "learning_rate": 2.248995983935743e-06, | |
| "loss": 2.5273290634155274, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.03770890510297432, | |
| "grad_norm": 0.5287439823150635, | |
| "learning_rate": 2.2623828647925033e-06, | |
| "loss": 2.6541069030761717, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.03793203471896825, | |
| "grad_norm": 0.46616867184638977, | |
| "learning_rate": 2.275769745649264e-06, | |
| "loss": 2.56517333984375, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.03815516433496218, | |
| "grad_norm": 0.4369581937789917, | |
| "learning_rate": 2.2891566265060243e-06, | |
| "loss": 2.4222312927246095, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.03837829395095611, | |
| "grad_norm": 0.4172717332839966, | |
| "learning_rate": 2.3025435073627846e-06, | |
| "loss": 2.549334716796875, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.03860142356695004, | |
| "grad_norm": 0.46124762296676636, | |
| "learning_rate": 2.315930388219545e-06, | |
| "loss": 2.5675996780395507, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.03882455318294397, | |
| "grad_norm": 0.4226182699203491, | |
| "learning_rate": 2.329317269076305e-06, | |
| "loss": 2.4144569396972657, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.0390476827989379, | |
| "grad_norm": 0.4673754870891571, | |
| "learning_rate": 2.342704149933066e-06, | |
| "loss": 2.3829999923706056, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.039270812414931834, | |
| "grad_norm": 0.5152049660682678, | |
| "learning_rate": 2.356091030789826e-06, | |
| "loss": 2.5030202865600586, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.039493942030925766, | |
| "grad_norm": 0.5377767086029053, | |
| "learning_rate": 2.3694779116465864e-06, | |
| "loss": 2.7714466094970702, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.0397170716469197, | |
| "grad_norm": 0.45861950516700745, | |
| "learning_rate": 2.3828647925033466e-06, | |
| "loss": 2.461492729187012, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.03994020126291363, | |
| "grad_norm": 0.4814665615558624, | |
| "learning_rate": 2.396251673360107e-06, | |
| "loss": 2.4756641387939453, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.040163330878907555, | |
| "grad_norm": 0.5024599432945251, | |
| "learning_rate": 2.4096385542168676e-06, | |
| "loss": 2.5811389923095702, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.04038646049490149, | |
| "grad_norm": 0.45141535997390747, | |
| "learning_rate": 2.423025435073628e-06, | |
| "loss": 2.686309814453125, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.04060959011089542, | |
| "grad_norm": 0.4806381165981293, | |
| "learning_rate": 2.4364123159303886e-06, | |
| "loss": 2.530971717834473, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.04083271972688935, | |
| "grad_norm": 0.7574505805969238, | |
| "learning_rate": 2.449799196787149e-06, | |
| "loss": 2.5287572860717775, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.04105584934288328, | |
| "grad_norm": 0.39056211709976196, | |
| "learning_rate": 2.4631860776439087e-06, | |
| "loss": 2.4373926162719726, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.041278978958877215, | |
| "grad_norm": 0.4549945890903473, | |
| "learning_rate": 2.4765729585006694e-06, | |
| "loss": 2.508747100830078, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.04150210857487114, | |
| "grad_norm": 0.49253398180007935, | |
| "learning_rate": 2.4899598393574297e-06, | |
| "loss": 2.5109813690185545, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.04172523819086507, | |
| "grad_norm": 0.4185622036457062, | |
| "learning_rate": 2.5033467202141904e-06, | |
| "loss": 2.5602550506591797, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.041948367806859004, | |
| "grad_norm": 0.5255184769630432, | |
| "learning_rate": 2.5167336010709507e-06, | |
| "loss": 2.5018032073974608, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.042171497422852935, | |
| "grad_norm": 0.40105360746383667, | |
| "learning_rate": 2.530120481927711e-06, | |
| "loss": 2.3703737258911133, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.04239462703884687, | |
| "grad_norm": 0.5396221876144409, | |
| "learning_rate": 2.543507362784471e-06, | |
| "loss": 2.46767520904541, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.0426177566548408, | |
| "grad_norm": 0.4686439633369446, | |
| "learning_rate": 2.5568942436412315e-06, | |
| "loss": 2.535739517211914, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.04284088627083473, | |
| "grad_norm": 0.5110803246498108, | |
| "learning_rate": 2.570281124497992e-06, | |
| "loss": 2.5248756408691406, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.043064015886828656, | |
| "grad_norm": 0.5164591670036316, | |
| "learning_rate": 2.5836680053547524e-06, | |
| "loss": 2.4275962829589846, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.04328714550282259, | |
| "grad_norm": 0.42096105217933655, | |
| "learning_rate": 2.5970548862115127e-06, | |
| "loss": 2.4086238861083986, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.04351027511881652, | |
| "grad_norm": 0.5388748645782471, | |
| "learning_rate": 2.6104417670682734e-06, | |
| "loss": 2.340771293640137, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.04373340473481045, | |
| "grad_norm": 0.4667651653289795, | |
| "learning_rate": 2.6238286479250333e-06, | |
| "loss": 2.506967544555664, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.043956534350804384, | |
| "grad_norm": 0.5090363621711731, | |
| "learning_rate": 2.637215528781794e-06, | |
| "loss": 2.251785469055176, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.044179663966798316, | |
| "grad_norm": 0.4600473940372467, | |
| "learning_rate": 2.6506024096385542e-06, | |
| "loss": 2.3269046783447265, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.04440279358279224, | |
| "grad_norm": 0.4453408718109131, | |
| "learning_rate": 2.6639892904953145e-06, | |
| "loss": 2.4940771102905273, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.04462592319878617, | |
| "grad_norm": 0.4637933671474457, | |
| "learning_rate": 2.6773761713520752e-06, | |
| "loss": 2.578083801269531, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.044849052814780105, | |
| "grad_norm": 0.48008954524993896, | |
| "learning_rate": 2.6907630522088355e-06, | |
| "loss": 2.509678077697754, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.04507218243077404, | |
| "grad_norm": 0.5186890363693237, | |
| "learning_rate": 2.7041499330655958e-06, | |
| "loss": 2.420572280883789, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.04529531204676797, | |
| "grad_norm": 0.5090295076370239, | |
| "learning_rate": 2.717536813922356e-06, | |
| "loss": 2.42071533203125, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.0455184416627619, | |
| "grad_norm": 0.5519959926605225, | |
| "learning_rate": 2.7309236947791163e-06, | |
| "loss": 2.458280563354492, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.04574157127875583, | |
| "grad_norm": 0.5525882840156555, | |
| "learning_rate": 2.744310575635877e-06, | |
| "loss": 2.3400331497192384, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.04596470089474976, | |
| "grad_norm": 0.48508089780807495, | |
| "learning_rate": 2.7576974564926373e-06, | |
| "loss": 2.4403892517089845, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.04618783051074369, | |
| "grad_norm": 0.5230780839920044, | |
| "learning_rate": 2.771084337349398e-06, | |
| "loss": 2.3652227401733397, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.04641096012673762, | |
| "grad_norm": 0.5132386088371277, | |
| "learning_rate": 2.784471218206158e-06, | |
| "loss": 2.313191604614258, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.04663408974273155, | |
| "grad_norm": 0.490017294883728, | |
| "learning_rate": 2.7978580990629185e-06, | |
| "loss": 2.24790096282959, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.046857219358725485, | |
| "grad_norm": 0.5250972509384155, | |
| "learning_rate": 2.811244979919679e-06, | |
| "loss": 2.1867441177368163, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.04708034897471942, | |
| "grad_norm": 0.45831558108329773, | |
| "learning_rate": 2.824631860776439e-06, | |
| "loss": 2.379189300537109, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.04730347859071334, | |
| "grad_norm": 0.5892948508262634, | |
| "learning_rate": 2.8380187416331998e-06, | |
| "loss": 2.2735191345214845, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.047526608206707274, | |
| "grad_norm": 0.5409689545631409, | |
| "learning_rate": 2.85140562248996e-06, | |
| "loss": 2.3979333877563476, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.047749737822701206, | |
| "grad_norm": 0.5346503257751465, | |
| "learning_rate": 2.8647925033467203e-06, | |
| "loss": 2.4819768905639648, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.04797286743869514, | |
| "grad_norm": 0.6150287985801697, | |
| "learning_rate": 2.8781793842034806e-06, | |
| "loss": 2.5218339920043946, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.04819599705468907, | |
| "grad_norm": 0.4347957968711853, | |
| "learning_rate": 2.891566265060241e-06, | |
| "loss": 2.1929386138916014, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.048419126670683, | |
| "grad_norm": 0.5238802433013916, | |
| "learning_rate": 2.9049531459170016e-06, | |
| "loss": 2.215384292602539, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.048642256286676934, | |
| "grad_norm": 0.5371809601783752, | |
| "learning_rate": 2.918340026773762e-06, | |
| "loss": 2.4019641876220703, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.04886538590267086, | |
| "grad_norm": 0.5125853419303894, | |
| "learning_rate": 2.931726907630522e-06, | |
| "loss": 2.3410377502441406, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.04908851551866479, | |
| "grad_norm": 0.5087509751319885, | |
| "learning_rate": 2.9451137884872824e-06, | |
| "loss": 2.3634616851806642, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.04931164513465872, | |
| "grad_norm": 0.5605607628822327, | |
| "learning_rate": 2.9585006693440427e-06, | |
| "loss": 2.239984130859375, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.049534774750652655, | |
| "grad_norm": 0.6371492743492126, | |
| "learning_rate": 2.9718875502008034e-06, | |
| "loss": 2.499210739135742, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.04975790436664659, | |
| "grad_norm": 0.5526962280273438, | |
| "learning_rate": 2.9852744310575636e-06, | |
| "loss": 2.433857536315918, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.04998103398264052, | |
| "grad_norm": 0.5811582803726196, | |
| "learning_rate": 2.998661311914324e-06, | |
| "loss": 2.372517967224121, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.050204163598634444, | |
| "grad_norm": 0.47710540890693665, | |
| "learning_rate": 3.0120481927710846e-06, | |
| "loss": 2.441071891784668, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.050427293214628376, | |
| "grad_norm": 0.49463921785354614, | |
| "learning_rate": 3.0254350736278445e-06, | |
| "loss": 2.218737030029297, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.05065042283062231, | |
| "grad_norm": 0.7574100494384766, | |
| "learning_rate": 3.0388219544846056e-06, | |
| "loss": 2.1490055084228517, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.05087355244661624, | |
| "grad_norm": 0.5574637651443481, | |
| "learning_rate": 3.0522088353413654e-06, | |
| "loss": 2.3484785079956056, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.05109668206261017, | |
| "grad_norm": 0.5128051042556763, | |
| "learning_rate": 3.0655957161981257e-06, | |
| "loss": 2.2358184814453126, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.0513198116786041, | |
| "grad_norm": 0.4747551679611206, | |
| "learning_rate": 3.0789825970548864e-06, | |
| "loss": 2.2787382125854494, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.051542941294598035, | |
| "grad_norm": 0.4365915358066559, | |
| "learning_rate": 3.0923694779116467e-06, | |
| "loss": 2.327534484863281, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.05176607091059196, | |
| "grad_norm": 0.6433975696563721, | |
| "learning_rate": 3.1057563587684074e-06, | |
| "loss": 2.288431167602539, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.05198920052658589, | |
| "grad_norm": 0.6102743744850159, | |
| "learning_rate": 3.1191432396251672e-06, | |
| "loss": 2.24114933013916, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.052212330142579824, | |
| "grad_norm": 0.5493948459625244, | |
| "learning_rate": 3.132530120481928e-06, | |
| "loss": 2.317499542236328, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.052435459758573756, | |
| "grad_norm": 0.5411773324012756, | |
| "learning_rate": 3.145917001338688e-06, | |
| "loss": 2.1778676986694334, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.05265858937456769, | |
| "grad_norm": 0.5491811633110046, | |
| "learning_rate": 3.1593038821954485e-06, | |
| "loss": 2.260644721984863, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.05288171899056162, | |
| "grad_norm": 0.5138102769851685, | |
| "learning_rate": 3.172690763052209e-06, | |
| "loss": 2.2743204116821287, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.053104848606555545, | |
| "grad_norm": 0.5310954451560974, | |
| "learning_rate": 3.186077643908969e-06, | |
| "loss": 2.257237434387207, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.05332797822254948, | |
| "grad_norm": 0.5704927444458008, | |
| "learning_rate": 3.1994645247657297e-06, | |
| "loss": 2.0418323516845702, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.05355110783854341, | |
| "grad_norm": 0.5295515656471252, | |
| "learning_rate": 3.21285140562249e-06, | |
| "loss": 2.282415771484375, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.05377423745453734, | |
| "grad_norm": 0.6476492881774902, | |
| "learning_rate": 3.2262382864792503e-06, | |
| "loss": 2.306402015686035, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.05399736707053127, | |
| "grad_norm": 0.6115656495094299, | |
| "learning_rate": 3.239625167336011e-06, | |
| "loss": 2.1535377502441406, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.054220496686525205, | |
| "grad_norm": 0.5802296996116638, | |
| "learning_rate": 3.253012048192771e-06, | |
| "loss": 2.2679920196533203, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.05444362630251914, | |
| "grad_norm": 0.5182514190673828, | |
| "learning_rate": 3.266398929049532e-06, | |
| "loss": 2.3105010986328125, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.05466675591851306, | |
| "grad_norm": 0.6288827657699585, | |
| "learning_rate": 3.279785809906292e-06, | |
| "loss": 2.0794536590576174, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.054889885534506994, | |
| "grad_norm": 0.7542652487754822, | |
| "learning_rate": 3.2931726907630525e-06, | |
| "loss": 2.3161176681518554, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.055113015150500926, | |
| "grad_norm": 0.7416955232620239, | |
| "learning_rate": 3.3065595716198128e-06, | |
| "loss": 2.279362106323242, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.05533614476649486, | |
| "grad_norm": 0.5682216882705688, | |
| "learning_rate": 3.3199464524765726e-06, | |
| "loss": 2.2426963806152345, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.05555927438248879, | |
| "grad_norm": 0.6715278625488281, | |
| "learning_rate": 3.3333333333333337e-06, | |
| "loss": 2.1546775817871096, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.05578240399848272, | |
| "grad_norm": 0.5482577085494995, | |
| "learning_rate": 3.3467202141900936e-06, | |
| "loss": 2.1484096527099608, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.056005533614476646, | |
| "grad_norm": 0.6009266972541809, | |
| "learning_rate": 3.3601070950468543e-06, | |
| "loss": 2.221162796020508, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.05622866323047058, | |
| "grad_norm": 0.6640043258666992, | |
| "learning_rate": 3.3734939759036146e-06, | |
| "loss": 2.338100242614746, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.05645179284646451, | |
| "grad_norm": 0.6514598727226257, | |
| "learning_rate": 3.3868808567603744e-06, | |
| "loss": 2.2825199127197267, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.05667492246245844, | |
| "grad_norm": 0.6053763031959534, | |
| "learning_rate": 3.4002677376171355e-06, | |
| "loss": 2.280612754821777, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.056898052078452374, | |
| "grad_norm": 0.8854132294654846, | |
| "learning_rate": 3.4136546184738954e-06, | |
| "loss": 2.3481237411499025, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.057121181694446306, | |
| "grad_norm": 0.6433872580528259, | |
| "learning_rate": 3.427041499330656e-06, | |
| "loss": 2.2561552047729494, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.05734431131044024, | |
| "grad_norm": 0.709402859210968, | |
| "learning_rate": 3.4404283801874164e-06, | |
| "loss": 2.264519691467285, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.05756744092643416, | |
| "grad_norm": 0.5431029796600342, | |
| "learning_rate": 3.453815261044177e-06, | |
| "loss": 2.163587760925293, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.057790570542428095, | |
| "grad_norm": 0.7146894931793213, | |
| "learning_rate": 3.4672021419009373e-06, | |
| "loss": 2.2624624252319334, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.05801370015842203, | |
| "grad_norm": 0.6029372215270996, | |
| "learning_rate": 3.480589022757697e-06, | |
| "loss": 2.0209115982055663, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.05823682977441596, | |
| "grad_norm": 0.6099830865859985, | |
| "learning_rate": 3.493975903614458e-06, | |
| "loss": 2.177568054199219, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.05845995939040989, | |
| "grad_norm": 0.8421804308891296, | |
| "learning_rate": 3.507362784471218e-06, | |
| "loss": 2.1980712890625, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.05868308900640382, | |
| "grad_norm": 0.7849622368812561, | |
| "learning_rate": 3.520749665327979e-06, | |
| "loss": 2.2047344207763673, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.05890621862239775, | |
| "grad_norm": 0.5906243324279785, | |
| "learning_rate": 3.534136546184739e-06, | |
| "loss": 2.3746465682983398, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.05912934823839168, | |
| "grad_norm": 0.9020318388938904, | |
| "learning_rate": 3.547523427041499e-06, | |
| "loss": 2.2612667083740234, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.05935247785438561, | |
| "grad_norm": 0.6542187929153442, | |
| "learning_rate": 3.5609103078982597e-06, | |
| "loss": 2.2060800552368165, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.059575607470379544, | |
| "grad_norm": 0.7428557872772217, | |
| "learning_rate": 3.57429718875502e-06, | |
| "loss": 2.0877670288085937, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.059798737086373475, | |
| "grad_norm": 0.7468170523643494, | |
| "learning_rate": 3.5876840696117807e-06, | |
| "loss": 2.3160980224609373, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.06002186670236741, | |
| "grad_norm": 0.8021371960639954, | |
| "learning_rate": 3.601070950468541e-06, | |
| "loss": 2.1923311233520506, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.06024499631836134, | |
| "grad_norm": 0.6904175281524658, | |
| "learning_rate": 3.6144578313253016e-06, | |
| "loss": 2.1911346435546877, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.060468125934355264, | |
| "grad_norm": 0.7336163520812988, | |
| "learning_rate": 3.627844712182062e-06, | |
| "loss": 2.2488683700561523, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.060691255550349196, | |
| "grad_norm": 0.7925560474395752, | |
| "learning_rate": 3.6412315930388218e-06, | |
| "loss": 2.174961280822754, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.06091438516634313, | |
| "grad_norm": 0.8825748562812805, | |
| "learning_rate": 3.6546184738955825e-06, | |
| "loss": 2.012922668457031, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.06113751478233706, | |
| "grad_norm": 0.6754929423332214, | |
| "learning_rate": 3.6680053547523427e-06, | |
| "loss": 2.216470146179199, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.06136064439833099, | |
| "grad_norm": 0.6616571545600891, | |
| "learning_rate": 3.6813922356091034e-06, | |
| "loss": 2.067160415649414, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.061583774014324924, | |
| "grad_norm": 0.7033371925354004, | |
| "learning_rate": 3.6947791164658637e-06, | |
| "loss": 2.2197628021240234, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.06180690363031885, | |
| "grad_norm": 0.7256376147270203, | |
| "learning_rate": 3.7081659973226235e-06, | |
| "loss": 2.2435529708862303, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.06203003324631278, | |
| "grad_norm": 0.6585844159126282, | |
| "learning_rate": 3.7215528781793842e-06, | |
| "loss": 2.113719177246094, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.06225316286230671, | |
| "grad_norm": 0.7192341685295105, | |
| "learning_rate": 3.7349397590361445e-06, | |
| "loss": 2.2271860122680662, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.062476292478300645, | |
| "grad_norm": 0.7313777804374695, | |
| "learning_rate": 3.7483266398929052e-06, | |
| "loss": 2.312770462036133, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.06269942209429458, | |
| "grad_norm": 0.7106947302818298, | |
| "learning_rate": 3.7617135207496655e-06, | |
| "loss": 2.041255760192871, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.0629225517102885, | |
| "grad_norm": 0.7265296578407288, | |
| "learning_rate": 3.775100401606426e-06, | |
| "loss": 2.2530691146850588, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.06314568132628244, | |
| "grad_norm": 0.7085316777229309, | |
| "learning_rate": 3.788487282463186e-06, | |
| "loss": 2.03436222076416, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.06336881094227637, | |
| "grad_norm": 0.7872986793518066, | |
| "learning_rate": 3.8018741633199463e-06, | |
| "loss": 2.0966476440429687, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.0635919405582703, | |
| "grad_norm": 0.8961313366889954, | |
| "learning_rate": 3.8152610441767074e-06, | |
| "loss": 2.1432809829711914, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.06381507017426423, | |
| "grad_norm": 0.7127321362495422, | |
| "learning_rate": 3.828647925033467e-06, | |
| "loss": 2.2022947311401366, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.06403819979025815, | |
| "grad_norm": 0.8228124976158142, | |
| "learning_rate": 3.842034805890228e-06, | |
| "loss": 2.39965705871582, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.0642613294062521, | |
| "grad_norm": 0.775909960269928, | |
| "learning_rate": 3.855421686746988e-06, | |
| "loss": 2.1313138961791993, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.06448445902224602, | |
| "grad_norm": 0.7945120334625244, | |
| "learning_rate": 3.8688085676037485e-06, | |
| "loss": 2.074580955505371, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.06470758863823996, | |
| "grad_norm": 1.014404535293579, | |
| "learning_rate": 3.882195448460509e-06, | |
| "loss": 2.26824893951416, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.06493071825423388, | |
| "grad_norm": 0.986544132232666, | |
| "learning_rate": 3.895582329317269e-06, | |
| "loss": 2.207651138305664, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.06515384787022782, | |
| "grad_norm": 0.7337960004806519, | |
| "learning_rate": 3.90896921017403e-06, | |
| "loss": 2.2294179916381838, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.06537697748622175, | |
| "grad_norm": 1.2697299718856812, | |
| "learning_rate": 3.92235609103079e-06, | |
| "loss": 2.114596366882324, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.06560010710221567, | |
| "grad_norm": 0.8556989431381226, | |
| "learning_rate": 3.93574297188755e-06, | |
| "loss": 2.21254940032959, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.06582323671820961, | |
| "grad_norm": 0.9477710723876953, | |
| "learning_rate": 3.949129852744311e-06, | |
| "loss": 1.977022361755371, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.06604636633420354, | |
| "grad_norm": 1.7189115285873413, | |
| "learning_rate": 3.962516733601071e-06, | |
| "loss": 2.012314224243164, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.06626949595019747, | |
| "grad_norm": 0.826131284236908, | |
| "learning_rate": 3.975903614457832e-06, | |
| "loss": 2.193516731262207, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.0664926255661914, | |
| "grad_norm": 1.0023162364959717, | |
| "learning_rate": 3.9892904953145914e-06, | |
| "loss": 2.2499979019165037, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.06671575518218534, | |
| "grad_norm": 0.7376646399497986, | |
| "learning_rate": 4.002677376171352e-06, | |
| "loss": 2.1386489868164062, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.06693888479817926, | |
| "grad_norm": 0.7226489782333374, | |
| "learning_rate": 4.016064257028113e-06, | |
| "loss": 2.11199836730957, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.06716201441417319, | |
| "grad_norm": 0.8427999019622803, | |
| "learning_rate": 4.029451137884873e-06, | |
| "loss": 2.034942626953125, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.06738514403016713, | |
| "grad_norm": 0.79557204246521, | |
| "learning_rate": 4.042838018741633e-06, | |
| "loss": 2.145208549499512, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.06760827364616105, | |
| "grad_norm": 0.7423805594444275, | |
| "learning_rate": 4.056224899598393e-06, | |
| "loss": 2.2282032012939452, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.06783140326215499, | |
| "grad_norm": 0.8701015114784241, | |
| "learning_rate": 4.069611780455154e-06, | |
| "loss": 2.0600002288818358, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.06805453287814892, | |
| "grad_norm": 0.9967679977416992, | |
| "learning_rate": 4.082998661311915e-06, | |
| "loss": 2.20194034576416, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.06827766249414285, | |
| "grad_norm": 1.0514639616012573, | |
| "learning_rate": 4.096385542168675e-06, | |
| "loss": 2.250165557861328, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.06850079211013678, | |
| "grad_norm": 0.7402302026748657, | |
| "learning_rate": 4.109772423025435e-06, | |
| "loss": 2.0741525650024415, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.0687239217261307, | |
| "grad_norm": 1.0764678716659546, | |
| "learning_rate": 4.123159303882195e-06, | |
| "loss": 2.068874549865723, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.06894705134212464, | |
| "grad_norm": 0.9718510508537292, | |
| "learning_rate": 4.136546184738956e-06, | |
| "loss": 1.9750284194946288, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.06917018095811857, | |
| "grad_norm": 0.7534152269363403, | |
| "learning_rate": 4.149933065595716e-06, | |
| "loss": 2.1882530212402345, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.06939331057411251, | |
| "grad_norm": 0.8103408813476562, | |
| "learning_rate": 4.163319946452477e-06, | |
| "loss": 2.076101875305176, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.06961644019010643, | |
| "grad_norm": 0.7058753967285156, | |
| "learning_rate": 4.176706827309237e-06, | |
| "loss": 2.1349681854248046, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.06983956980610036, | |
| "grad_norm": 0.664568305015564, | |
| "learning_rate": 4.190093708165997e-06, | |
| "loss": 2.1840320587158204, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.0700626994220943, | |
| "grad_norm": 0.9274365901947021, | |
| "learning_rate": 4.2034805890227575e-06, | |
| "loss": 2.0162349700927735, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.07028582903808822, | |
| "grad_norm": 0.806480348110199, | |
| "learning_rate": 4.216867469879518e-06, | |
| "loss": 2.1628509521484376, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.07050895865408216, | |
| "grad_norm": 0.9540587067604065, | |
| "learning_rate": 4.230254350736279e-06, | |
| "loss": 2.239594841003418, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.07073208827007609, | |
| "grad_norm": 0.8032299876213074, | |
| "learning_rate": 4.243641231593039e-06, | |
| "loss": 2.170275115966797, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.07095521788607002, | |
| "grad_norm": 0.9516310095787048, | |
| "learning_rate": 4.2570281124497995e-06, | |
| "loss": 2.035562515258789, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.07117834750206395, | |
| "grad_norm": 0.8803707361221313, | |
| "learning_rate": 4.270414993306559e-06, | |
| "loss": 2.1310283660888674, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.07140147711805787, | |
| "grad_norm": 1.022939682006836, | |
| "learning_rate": 4.28380187416332e-06, | |
| "loss": 2.1093021392822267, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.07162460673405181, | |
| "grad_norm": 0.8254362344741821, | |
| "learning_rate": 4.297188755020081e-06, | |
| "loss": 1.8178255081176757, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.07184773635004574, | |
| "grad_norm": 0.779062807559967, | |
| "learning_rate": 4.3105756358768406e-06, | |
| "loss": 2.261147880554199, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.07207086596603968, | |
| "grad_norm": 0.8894750475883484, | |
| "learning_rate": 4.323962516733601e-06, | |
| "loss": 2.0283786773681642, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.0722939955820336, | |
| "grad_norm": 0.9776813387870789, | |
| "learning_rate": 4.337349397590362e-06, | |
| "loss": 2.177045440673828, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.07251712519802754, | |
| "grad_norm": 1.2759140729904175, | |
| "learning_rate": 4.350736278447122e-06, | |
| "loss": 2.0527717590332033, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.07274025481402147, | |
| "grad_norm": 0.9953216314315796, | |
| "learning_rate": 4.3641231593038825e-06, | |
| "loss": 2.05969352722168, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.07296338443001539, | |
| "grad_norm": 0.8319527506828308, | |
| "learning_rate": 4.377510040160642e-06, | |
| "loss": 2.003727340698242, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.07318651404600933, | |
| "grad_norm": 0.8335089087486267, | |
| "learning_rate": 4.390896921017403e-06, | |
| "loss": 2.0173160552978517, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.07340964366200325, | |
| "grad_norm": 1.1071332693099976, | |
| "learning_rate": 4.404283801874164e-06, | |
| "loss": 2.1184307098388673, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.0736327732779972, | |
| "grad_norm": 1.2880672216415405, | |
| "learning_rate": 4.4176706827309244e-06, | |
| "loss": 2.0427175521850587, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.07385590289399112, | |
| "grad_norm": 1.1434367895126343, | |
| "learning_rate": 4.431057563587684e-06, | |
| "loss": 2.0279747009277345, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.07407903250998506, | |
| "grad_norm": 0.7727090716362, | |
| "learning_rate": 4.444444444444444e-06, | |
| "loss": 2.1546749114990233, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.07430216212597898, | |
| "grad_norm": 1.0890729427337646, | |
| "learning_rate": 4.457831325301205e-06, | |
| "loss": 2.1247718811035154, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.07452529174197291, | |
| "grad_norm": 1.0179014205932617, | |
| "learning_rate": 4.4712182061579655e-06, | |
| "loss": 2.146767234802246, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.07474842135796685, | |
| "grad_norm": 0.745704174041748, | |
| "learning_rate": 4.484605087014726e-06, | |
| "loss": 2.224173736572266, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.07497155097396077, | |
| "grad_norm": 0.771865963935852, | |
| "learning_rate": 4.497991967871486e-06, | |
| "loss": 2.2265625, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.07519468058995471, | |
| "grad_norm": 1.352388620376587, | |
| "learning_rate": 4.511378848728246e-06, | |
| "loss": 2.261865425109863, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.07541781020594863, | |
| "grad_norm": 0.8781198859214783, | |
| "learning_rate": 4.524765729585007e-06, | |
| "loss": 2.0096199035644533, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.07564093982194256, | |
| "grad_norm": 1.0159834623336792, | |
| "learning_rate": 4.538152610441767e-06, | |
| "loss": 2.0243366241455076, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.0758640694379365, | |
| "grad_norm": 0.8101127743721008, | |
| "learning_rate": 4.551539491298528e-06, | |
| "loss": 2.119925117492676, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.07608719905393042, | |
| "grad_norm": 1.0311274528503418, | |
| "learning_rate": 4.564926372155288e-06, | |
| "loss": 1.9274934768676757, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.07631032866992436, | |
| "grad_norm": 1.315838098526001, | |
| "learning_rate": 4.578313253012049e-06, | |
| "loss": 2.058757209777832, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.07653345828591829, | |
| "grad_norm": 1.016562819480896, | |
| "learning_rate": 4.5917001338688084e-06, | |
| "loss": 2.0910503387451174, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.07675658790191223, | |
| "grad_norm": 1.1127480268478394, | |
| "learning_rate": 4.605087014725569e-06, | |
| "loss": 2.0274993896484377, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.07697971751790615, | |
| "grad_norm": 0.7386570572853088, | |
| "learning_rate": 4.61847389558233e-06, | |
| "loss": 2.03863468170166, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.07720284713390008, | |
| "grad_norm": 0.9372514486312866, | |
| "learning_rate": 4.63186077643909e-06, | |
| "loss": 1.9647506713867187, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.07742597674989402, | |
| "grad_norm": 0.866057276725769, | |
| "learning_rate": 4.64524765729585e-06, | |
| "loss": 2.0213361740112306, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.07764910636588794, | |
| "grad_norm": 0.8000425100326538, | |
| "learning_rate": 4.65863453815261e-06, | |
| "loss": 2.009931755065918, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.07787223598188188, | |
| "grad_norm": 1.4270923137664795, | |
| "learning_rate": 4.672021419009371e-06, | |
| "loss": 2.0473087310791014, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.0780953655978758, | |
| "grad_norm": 0.7464487552642822, | |
| "learning_rate": 4.685408299866132e-06, | |
| "loss": 2.087724304199219, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.07831849521386974, | |
| "grad_norm": 1.0895863771438599, | |
| "learning_rate": 4.6987951807228915e-06, | |
| "loss": 2.0931013107299803, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.07854162482986367, | |
| "grad_norm": 1.2510039806365967, | |
| "learning_rate": 4.712182061579652e-06, | |
| "loss": 2.0668258666992188, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.0787647544458576, | |
| "grad_norm": 0.9064348340034485, | |
| "learning_rate": 4.725568942436412e-06, | |
| "loss": 1.9942918777465821, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.07898788406185153, | |
| "grad_norm": 1.0130219459533691, | |
| "learning_rate": 4.738955823293173e-06, | |
| "loss": 2.0953414916992186, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.07921101367784546, | |
| "grad_norm": 1.0138216018676758, | |
| "learning_rate": 4.7523427041499334e-06, | |
| "loss": 2.097544479370117, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.0794341432938394, | |
| "grad_norm": 0.9159922003746033, | |
| "learning_rate": 4.765729585006693e-06, | |
| "loss": 2.024347686767578, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.07965727290983332, | |
| "grad_norm": 0.6978763937950134, | |
| "learning_rate": 4.779116465863454e-06, | |
| "loss": 2.032564163208008, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.07988040252582726, | |
| "grad_norm": 0.9015496969223022, | |
| "learning_rate": 4.792503346720214e-06, | |
| "loss": 2.091436767578125, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.08010353214182118, | |
| "grad_norm": 0.8910471796989441, | |
| "learning_rate": 4.8058902275769745e-06, | |
| "loss": 2.095396041870117, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.08032666175781511, | |
| "grad_norm": 0.8550083637237549, | |
| "learning_rate": 4.819277108433735e-06, | |
| "loss": 2.061022186279297, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.08054979137380905, | |
| "grad_norm": 0.888156533241272, | |
| "learning_rate": 4.832663989290495e-06, | |
| "loss": 2.0070331573486326, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.08077292098980297, | |
| "grad_norm": 1.0453161001205444, | |
| "learning_rate": 4.846050870147256e-06, | |
| "loss": 2.0485980987548826, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.08099605060579691, | |
| "grad_norm": 0.835533857345581, | |
| "learning_rate": 4.859437751004016e-06, | |
| "loss": 2.0737443923950196, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.08121918022179084, | |
| "grad_norm": 1.3074698448181152, | |
| "learning_rate": 4.872824631860777e-06, | |
| "loss": 2.204619216918945, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.08144230983778476, | |
| "grad_norm": 0.952622652053833, | |
| "learning_rate": 4.886211512717537e-06, | |
| "loss": 1.8864990234375, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.0816654394537787, | |
| "grad_norm": 1.069469690322876, | |
| "learning_rate": 4.899598393574298e-06, | |
| "loss": 2.0791854858398438, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.08188856906977263, | |
| "grad_norm": 1.1299378871917725, | |
| "learning_rate": 4.9129852744310576e-06, | |
| "loss": 2.0667266845703125, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.08211169868576657, | |
| "grad_norm": 0.7093334794044495, | |
| "learning_rate": 4.926372155287817e-06, | |
| "loss": 1.9636056900024415, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.08233482830176049, | |
| "grad_norm": 1.1765010356903076, | |
| "learning_rate": 4.939759036144579e-06, | |
| "loss": 2.0537750244140627, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.08255795791775443, | |
| "grad_norm": 0.8516542315483093, | |
| "learning_rate": 4.953145917001339e-06, | |
| "loss": 2.1657791137695312, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.08278108753374835, | |
| "grad_norm": 1.0495437383651733, | |
| "learning_rate": 4.9665327978580995e-06, | |
| "loss": 2.1656772613525392, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.08300421714974228, | |
| "grad_norm": 0.8000126481056213, | |
| "learning_rate": 4.979919678714859e-06, | |
| "loss": 2.020354461669922, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.08322734676573622, | |
| "grad_norm": 1.2887593507766724, | |
| "learning_rate": 4.993306559571619e-06, | |
| "loss": 1.9938453674316405, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 0.08345047638173014, | |
| "grad_norm": 0.8419036865234375, | |
| "learning_rate": 5.006693440428381e-06, | |
| "loss": 2.0833532333374025, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.08367360599772408, | |
| "grad_norm": 0.9748121500015259, | |
| "learning_rate": 5.020080321285141e-06, | |
| "loss": 2.0065855026245116, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.08389673561371801, | |
| "grad_norm": 0.8467640280723572, | |
| "learning_rate": 5.033467202141901e-06, | |
| "loss": 2.2140008926391603, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.08411986522971195, | |
| "grad_norm": 0.8260283470153809, | |
| "learning_rate": 5.046854082998661e-06, | |
| "loss": 1.973113441467285, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 0.08434299484570587, | |
| "grad_norm": 1.2561461925506592, | |
| "learning_rate": 5.060240963855422e-06, | |
| "loss": 1.9699825286865233, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.0845661244616998, | |
| "grad_norm": 0.7021331191062927, | |
| "learning_rate": 5.0736278447121826e-06, | |
| "loss": 1.9100543975830078, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 0.08478925407769373, | |
| "grad_norm": 0.9285181760787964, | |
| "learning_rate": 5.087014725568942e-06, | |
| "loss": 2.100686454772949, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.08501238369368766, | |
| "grad_norm": 1.0512628555297852, | |
| "learning_rate": 5.100401606425703e-06, | |
| "loss": 2.092105484008789, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 0.0852355133096816, | |
| "grad_norm": 0.9993311166763306, | |
| "learning_rate": 5.113788487282463e-06, | |
| "loss": 2.178700256347656, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.08545864292567552, | |
| "grad_norm": 0.8007418513298035, | |
| "learning_rate": 5.127175368139224e-06, | |
| "loss": 2.061174774169922, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 0.08568177254166946, | |
| "grad_norm": 0.9044376611709595, | |
| "learning_rate": 5.140562248995984e-06, | |
| "loss": 1.905235481262207, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.08590490215766339, | |
| "grad_norm": 1.1532313823699951, | |
| "learning_rate": 5.153949129852744e-06, | |
| "loss": 2.0893394470214846, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 0.08612803177365731, | |
| "grad_norm": 1.0485821962356567, | |
| "learning_rate": 5.167336010709505e-06, | |
| "loss": 1.9315324783325196, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.08635116138965125, | |
| "grad_norm": 1.1166177988052368, | |
| "learning_rate": 5.180722891566265e-06, | |
| "loss": 1.9648147583007813, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 0.08657429100564518, | |
| "grad_norm": 1.4419307708740234, | |
| "learning_rate": 5.1941097724230254e-06, | |
| "loss": 2.0460626602172853, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.08679742062163912, | |
| "grad_norm": 1.0400481224060059, | |
| "learning_rate": 5.207496653279786e-06, | |
| "loss": 2.034720230102539, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 0.08702055023763304, | |
| "grad_norm": 1.041756510734558, | |
| "learning_rate": 5.220883534136547e-06, | |
| "loss": 1.9372877120971679, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.08724367985362697, | |
| "grad_norm": 0.9398750066757202, | |
| "learning_rate": 5.234270414993307e-06, | |
| "loss": 1.9075811386108399, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 0.0874668094696209, | |
| "grad_norm": 0.9996817708015442, | |
| "learning_rate": 5.2476572958500665e-06, | |
| "loss": 2.04040412902832, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.08768993908561483, | |
| "grad_norm": 1.1434073448181152, | |
| "learning_rate": 5.261044176706827e-06, | |
| "loss": 2.05865535736084, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 0.08791306870160877, | |
| "grad_norm": 1.1526947021484375, | |
| "learning_rate": 5.274431057563588e-06, | |
| "loss": 2.0584930419921874, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.0881361983176027, | |
| "grad_norm": 1.1225361824035645, | |
| "learning_rate": 5.287817938420349e-06, | |
| "loss": 1.869145965576172, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 0.08835932793359663, | |
| "grad_norm": 1.0828170776367188, | |
| "learning_rate": 5.3012048192771085e-06, | |
| "loss": 2.0549264907836915, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.08858245754959056, | |
| "grad_norm": 0.9480776786804199, | |
| "learning_rate": 5.314591700133868e-06, | |
| "loss": 1.987673568725586, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 0.08880558716558448, | |
| "grad_norm": 0.9799356460571289, | |
| "learning_rate": 5.327978580990629e-06, | |
| "loss": 1.9657482147216796, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.08902871678157842, | |
| "grad_norm": 1.1955480575561523, | |
| "learning_rate": 5.34136546184739e-06, | |
| "loss": 1.9616188049316405, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 0.08925184639757235, | |
| "grad_norm": 0.9528502225875854, | |
| "learning_rate": 5.3547523427041504e-06, | |
| "loss": 2.1342418670654295, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.08947497601356628, | |
| "grad_norm": 1.1190009117126465, | |
| "learning_rate": 5.36813922356091e-06, | |
| "loss": 2.1064796447753906, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 0.08969810562956021, | |
| "grad_norm": 1.0113600492477417, | |
| "learning_rate": 5.381526104417671e-06, | |
| "loss": 1.9123802185058594, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.08992123524555415, | |
| "grad_norm": 0.9729359745979309, | |
| "learning_rate": 5.394912985274431e-06, | |
| "loss": 1.9950223922729493, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 0.09014436486154807, | |
| "grad_norm": 1.035287618637085, | |
| "learning_rate": 5.4082998661311915e-06, | |
| "loss": 1.968986701965332, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.090367494477542, | |
| "grad_norm": 0.7954384684562683, | |
| "learning_rate": 5.421686746987952e-06, | |
| "loss": 2.075417327880859, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 0.09059062409353594, | |
| "grad_norm": 1.2406619787216187, | |
| "learning_rate": 5.435073627844712e-06, | |
| "loss": 2.097009468078613, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.09081375370952986, | |
| "grad_norm": 0.8746753931045532, | |
| "learning_rate": 5.448460508701473e-06, | |
| "loss": 2.04738826751709, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 0.0910368833255238, | |
| "grad_norm": 1.137431025505066, | |
| "learning_rate": 5.461847389558233e-06, | |
| "loss": 1.9636581420898438, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.09126001294151773, | |
| "grad_norm": 1.4577034711837769, | |
| "learning_rate": 5.475234270414993e-06, | |
| "loss": 1.8939685821533203, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 0.09148314255751167, | |
| "grad_norm": 1.3480346202850342, | |
| "learning_rate": 5.488621151271754e-06, | |
| "loss": 1.9858078002929687, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.09170627217350559, | |
| "grad_norm": 0.8342795372009277, | |
| "learning_rate": 5.502008032128514e-06, | |
| "loss": 1.7992578506469727, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 0.09192940178949952, | |
| "grad_norm": 1.1130149364471436, | |
| "learning_rate": 5.5153949129852746e-06, | |
| "loss": 2.0617441177368163, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.09215253140549345, | |
| "grad_norm": 1.0582903623580933, | |
| "learning_rate": 5.528781793842035e-06, | |
| "loss": 2.081201934814453, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 0.09237566102148738, | |
| "grad_norm": 0.8622370958328247, | |
| "learning_rate": 5.542168674698796e-06, | |
| "loss": 2.184007263183594, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.09259879063748132, | |
| "grad_norm": 1.247605800628662, | |
| "learning_rate": 5.555555555555556e-06, | |
| "loss": 2.2668291091918946, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 0.09282192025347524, | |
| "grad_norm": 0.9048439860343933, | |
| "learning_rate": 5.568942436412316e-06, | |
| "loss": 2.0036815643310546, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.09304504986946917, | |
| "grad_norm": 1.5477705001831055, | |
| "learning_rate": 5.582329317269076e-06, | |
| "loss": 2.1254167556762695, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 0.0932681794854631, | |
| "grad_norm": 0.898073673248291, | |
| "learning_rate": 5.595716198125837e-06, | |
| "loss": 2.0748970031738283, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.09349130910145703, | |
| "grad_norm": 0.976369321346283, | |
| "learning_rate": 5.609103078982598e-06, | |
| "loss": 1.9387639999389648, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 0.09371443871745097, | |
| "grad_norm": 1.229954719543457, | |
| "learning_rate": 5.622489959839358e-06, | |
| "loss": 1.9117536544799805, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.0939375683334449, | |
| "grad_norm": 1.0723400115966797, | |
| "learning_rate": 5.6358768406961175e-06, | |
| "loss": 1.935175323486328, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 0.09416069794943883, | |
| "grad_norm": 1.1824326515197754, | |
| "learning_rate": 5.649263721552878e-06, | |
| "loss": 2.126773452758789, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.09438382756543276, | |
| "grad_norm": 0.8257484436035156, | |
| "learning_rate": 5.662650602409639e-06, | |
| "loss": 2.01248836517334, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 0.09460695718142668, | |
| "grad_norm": 1.11427903175354, | |
| "learning_rate": 5.6760374832663996e-06, | |
| "loss": 2.1134754180908204, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.09483008679742062, | |
| "grad_norm": 1.5741039514541626, | |
| "learning_rate": 5.689424364123159e-06, | |
| "loss": 2.0824302673339843, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 0.09505321641341455, | |
| "grad_norm": 0.9258787631988525, | |
| "learning_rate": 5.70281124497992e-06, | |
| "loss": 1.9319812774658203, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.09527634602940849, | |
| "grad_norm": 1.09774911403656, | |
| "learning_rate": 5.71619812583668e-06, | |
| "loss": 1.9680803298950196, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 0.09549947564540241, | |
| "grad_norm": 0.9626898765563965, | |
| "learning_rate": 5.729585006693441e-06, | |
| "loss": 1.925799560546875, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.09572260526139635, | |
| "grad_norm": 0.9046334624290466, | |
| "learning_rate": 5.742971887550201e-06, | |
| "loss": 1.9070989608764648, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 0.09594573487739028, | |
| "grad_norm": 1.0075236558914185, | |
| "learning_rate": 5.756358768406961e-06, | |
| "loss": 1.9298274993896485, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.0961688644933842, | |
| "grad_norm": 0.8876293301582336, | |
| "learning_rate": 5.769745649263722e-06, | |
| "loss": 1.995884323120117, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 0.09639199410937814, | |
| "grad_norm": 0.9397478699684143, | |
| "learning_rate": 5.783132530120482e-06, | |
| "loss": 2.1033966064453127, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.09661512372537207, | |
| "grad_norm": 1.1887969970703125, | |
| "learning_rate": 5.7965194109772425e-06, | |
| "loss": 2.2321205139160156, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 0.096838253341366, | |
| "grad_norm": 0.9165719151496887, | |
| "learning_rate": 5.809906291834003e-06, | |
| "loss": 1.9332220077514648, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.09706138295735993, | |
| "grad_norm": 1.0620633363723755, | |
| "learning_rate": 5.823293172690763e-06, | |
| "loss": 1.942098617553711, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 0.09728451257335387, | |
| "grad_norm": 1.1249985694885254, | |
| "learning_rate": 5.836680053547524e-06, | |
| "loss": 2.004232406616211, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.09750764218934779, | |
| "grad_norm": 1.009824514389038, | |
| "learning_rate": 5.8500669344042836e-06, | |
| "loss": 2.060811424255371, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 0.09773077180534172, | |
| "grad_norm": 1.1177529096603394, | |
| "learning_rate": 5.863453815261044e-06, | |
| "loss": 1.9775056838989258, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.09795390142133566, | |
| "grad_norm": 0.8883247375488281, | |
| "learning_rate": 5.876840696117805e-06, | |
| "loss": 2.0491350173950194, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 0.09817703103732958, | |
| "grad_norm": 1.0781581401824951, | |
| "learning_rate": 5.890227576974565e-06, | |
| "loss": 2.130450439453125, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.09840016065332352, | |
| "grad_norm": 1.022125244140625, | |
| "learning_rate": 5.9036144578313255e-06, | |
| "loss": 1.9054424285888671, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 0.09862329026931745, | |
| "grad_norm": 1.5722389221191406, | |
| "learning_rate": 5.917001338688085e-06, | |
| "loss": 1.9462669372558594, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.09884641988531137, | |
| "grad_norm": 0.8767709732055664, | |
| "learning_rate": 5.930388219544846e-06, | |
| "loss": 1.9716548919677734, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 0.09906954950130531, | |
| "grad_norm": 0.8978337049484253, | |
| "learning_rate": 5.943775100401607e-06, | |
| "loss": 1.932182502746582, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.09929267911729923, | |
| "grad_norm": 1.1649725437164307, | |
| "learning_rate": 5.957161981258367e-06, | |
| "loss": 1.949960708618164, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 0.09951580873329317, | |
| "grad_norm": 0.9707964062690735, | |
| "learning_rate": 5.970548862115127e-06, | |
| "loss": 1.9415960311889648, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.0997389383492871, | |
| "grad_norm": 0.8075286149978638, | |
| "learning_rate": 5.983935742971887e-06, | |
| "loss": 1.9884584426879883, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 0.09996206796528104, | |
| "grad_norm": 0.919001579284668, | |
| "learning_rate": 5.997322623828648e-06, | |
| "loss": 1.9047340393066405, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.10018519758127496, | |
| "grad_norm": 1.1452890634536743, | |
| "learning_rate": 5.999999417648353e-06, | |
| "loss": 1.8911170959472656, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 0.10040832719726889, | |
| "grad_norm": 0.9075614213943481, | |
| "learning_rate": 5.999997051845177e-06, | |
| "loss": 1.9537294387817383, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.10063145681326283, | |
| "grad_norm": 0.9411978721618652, | |
| "learning_rate": 5.999992866194927e-06, | |
| "loss": 2.0128080368041994, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 0.10085458642925675, | |
| "grad_norm": 1.7404388189315796, | |
| "learning_rate": 5.999986860700142e-06, | |
| "loss": 1.9837495803833007, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.10107771604525069, | |
| "grad_norm": 0.990151047706604, | |
| "learning_rate": 5.999979035364464e-06, | |
| "loss": 1.9975740432739257, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 0.10130084566124462, | |
| "grad_norm": 1.042856216430664, | |
| "learning_rate": 5.9999693901926435e-06, | |
| "loss": 2.016056251525879, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.10152397527723855, | |
| "grad_norm": 0.8757387399673462, | |
| "learning_rate": 5.999957925190527e-06, | |
| "loss": 1.8990396499633788, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 0.10174710489323248, | |
| "grad_norm": 1.4858282804489136, | |
| "learning_rate": 5.999944640365074e-06, | |
| "loss": 2.0470705032348633, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.1019702345092264, | |
| "grad_norm": 1.0206114053726196, | |
| "learning_rate": 5.999929535724339e-06, | |
| "loss": 2.063922882080078, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 0.10219336412522034, | |
| "grad_norm": 1.2412887811660767, | |
| "learning_rate": 5.999912611277487e-06, | |
| "loss": 1.9137586593627929, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.10241649374121427, | |
| "grad_norm": 1.0442379713058472, | |
| "learning_rate": 5.999893867034784e-06, | |
| "loss": 1.9131647109985352, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 0.1026396233572082, | |
| "grad_norm": 1.0354846715927124, | |
| "learning_rate": 5.9998733030076015e-06, | |
| "loss": 1.943178367614746, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.10286275297320213, | |
| "grad_norm": 0.9171436429023743, | |
| "learning_rate": 5.999850919208413e-06, | |
| "loss": 1.9243003845214843, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 0.10308588258919607, | |
| "grad_norm": 1.3587474822998047, | |
| "learning_rate": 5.999826715650797e-06, | |
| "loss": 1.7801437377929688, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.10330901220519, | |
| "grad_norm": 1.6726102828979492, | |
| "learning_rate": 5.999800692349435e-06, | |
| "loss": 2.0044769287109374, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 0.10353214182118392, | |
| "grad_norm": 0.909557580947876, | |
| "learning_rate": 5.999772849320115e-06, | |
| "loss": 1.8790843963623047, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.10375527143717786, | |
| "grad_norm": 1.1021777391433716, | |
| "learning_rate": 5.9997431865797255e-06, | |
| "loss": 1.9645397186279296, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 0.10397840105317178, | |
| "grad_norm": 1.0416226387023926, | |
| "learning_rate": 5.999711704146261e-06, | |
| "loss": 1.9316511154174805, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.10420153066916572, | |
| "grad_norm": 1.014394998550415, | |
| "learning_rate": 5.99967840203882e-06, | |
| "loss": 1.9602357864379882, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 0.10442466028515965, | |
| "grad_norm": 1.0431203842163086, | |
| "learning_rate": 5.999643280277603e-06, | |
| "loss": 1.9662353515625, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.10464778990115357, | |
| "grad_norm": 0.9976010322570801, | |
| "learning_rate": 5.999606338883915e-06, | |
| "loss": 1.8156768798828125, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 0.10487091951714751, | |
| "grad_norm": 1.3542364835739136, | |
| "learning_rate": 5.999567577880167e-06, | |
| "loss": 1.9991931915283203, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.10509404913314144, | |
| "grad_norm": 0.9118264317512512, | |
| "learning_rate": 5.99952699728987e-06, | |
| "loss": 1.6772186279296875, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 0.10531717874913538, | |
| "grad_norm": 1.431563138961792, | |
| "learning_rate": 5.9994845971376435e-06, | |
| "loss": 2.0468074798583986, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.1055403083651293, | |
| "grad_norm": 0.8818404078483582, | |
| "learning_rate": 5.999440377449205e-06, | |
| "loss": 1.9281387329101562, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 0.10576343798112324, | |
| "grad_norm": 0.905672013759613, | |
| "learning_rate": 5.999394338251381e-06, | |
| "loss": 2.0013042449951173, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.10598656759711717, | |
| "grad_norm": 1.0039812326431274, | |
| "learning_rate": 5.999346479572099e-06, | |
| "loss": 2.0309635162353517, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 0.10620969721311109, | |
| "grad_norm": 1.1300805807113647, | |
| "learning_rate": 5.999296801440391e-06, | |
| "loss": 2.013144111633301, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.10643282682910503, | |
| "grad_norm": 1.052232027053833, | |
| "learning_rate": 5.999245303886392e-06, | |
| "loss": 2.089672660827637, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 0.10665595644509895, | |
| "grad_norm": 0.8320952653884888, | |
| "learning_rate": 5.999191986941342e-06, | |
| "loss": 2.0204065322875975, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.10687908606109289, | |
| "grad_norm": 0.8853346705436707, | |
| "learning_rate": 5.999136850637584e-06, | |
| "loss": 1.9383895874023438, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 0.10710221567708682, | |
| "grad_norm": 1.3795983791351318, | |
| "learning_rate": 5.999079895008564e-06, | |
| "loss": 1.808138084411621, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.10732534529308076, | |
| "grad_norm": 0.9352984428405762, | |
| "learning_rate": 5.999021120088833e-06, | |
| "loss": 1.9215972900390625, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 0.10754847490907468, | |
| "grad_norm": 1.1358400583267212, | |
| "learning_rate": 5.998960525914043e-06, | |
| "loss": 1.9847070693969726, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.1077716045250686, | |
| "grad_norm": 1.2196234464645386, | |
| "learning_rate": 5.998898112520954e-06, | |
| "loss": 2.07110595703125, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 0.10799473414106255, | |
| "grad_norm": 0.9041697978973389, | |
| "learning_rate": 5.998833879947425e-06, | |
| "loss": 2.014092445373535, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.10821786375705647, | |
| "grad_norm": 0.9917547106742859, | |
| "learning_rate": 5.998767828232421e-06, | |
| "loss": 2.064949607849121, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 0.10844099337305041, | |
| "grad_norm": 1.4730219841003418, | |
| "learning_rate": 5.99869995741601e-06, | |
| "loss": 1.9224634170532227, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.10866412298904433, | |
| "grad_norm": 0.8451655507087708, | |
| "learning_rate": 5.998630267539365e-06, | |
| "loss": 1.9094810485839844, | |
| "step": 2435 | |
| }, | |
| { | |
| "epoch": 0.10888725260503827, | |
| "grad_norm": 0.7860535383224487, | |
| "learning_rate": 5.998558758644759e-06, | |
| "loss": 1.9045148849487306, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.1091103822210322, | |
| "grad_norm": 1.1344449520111084, | |
| "learning_rate": 5.998485430775571e-06, | |
| "loss": 1.9020452499389648, | |
| "step": 2445 | |
| }, | |
| { | |
| "epoch": 0.10933351183702612, | |
| "grad_norm": 1.3006445169448853, | |
| "learning_rate": 5.998410283976283e-06, | |
| "loss": 2.0517154693603517, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.10955664145302006, | |
| "grad_norm": 0.9452170133590698, | |
| "learning_rate": 5.998333318292481e-06, | |
| "loss": 1.9513717651367188, | |
| "step": 2455 | |
| }, | |
| { | |
| "epoch": 0.10977977106901399, | |
| "grad_norm": 1.0528112649917603, | |
| "learning_rate": 5.9982545337708524e-06, | |
| "loss": 1.9914478302001952, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.11000290068500793, | |
| "grad_norm": 1.0746241807937622, | |
| "learning_rate": 5.998173930459191e-06, | |
| "loss": 1.9230745315551758, | |
| "step": 2465 | |
| }, | |
| { | |
| "epoch": 0.11022603030100185, | |
| "grad_norm": 0.9200554490089417, | |
| "learning_rate": 5.99809150840639e-06, | |
| "loss": 1.8942209243774415, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.11044915991699578, | |
| "grad_norm": 1.391030192375183, | |
| "learning_rate": 5.998007267662449e-06, | |
| "loss": 1.990131950378418, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 0.11067228953298971, | |
| "grad_norm": 1.0602452754974365, | |
| "learning_rate": 5.997921208278469e-06, | |
| "loss": 1.8233526229858399, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.11089541914898364, | |
| "grad_norm": 0.8840234279632568, | |
| "learning_rate": 5.997833330306656e-06, | |
| "loss": 1.9647716522216796, | |
| "step": 2485 | |
| }, | |
| { | |
| "epoch": 0.11111854876497758, | |
| "grad_norm": 1.157009243965149, | |
| "learning_rate": 5.997743633800319e-06, | |
| "loss": 1.918528175354004, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.1113416783809715, | |
| "grad_norm": 0.9907488822937012, | |
| "learning_rate": 5.997652118813867e-06, | |
| "loss": 1.8576431274414062, | |
| "step": 2495 | |
| }, | |
| { | |
| "epoch": 0.11156480799696544, | |
| "grad_norm": 1.3741017580032349, | |
| "learning_rate": 5.997558785402816e-06, | |
| "loss": 1.8172170639038085, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.11178793761295937, | |
| "grad_norm": 0.8179959058761597, | |
| "learning_rate": 5.997463633623784e-06, | |
| "loss": 1.9822776794433594, | |
| "step": 2505 | |
| }, | |
| { | |
| "epoch": 0.11201106722895329, | |
| "grad_norm": 1.1868292093276978, | |
| "learning_rate": 5.99736666353449e-06, | |
| "loss": 1.974200439453125, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.11223419684494723, | |
| "grad_norm": 1.1663192510604858, | |
| "learning_rate": 5.997267875193759e-06, | |
| "loss": 2.0109207153320314, | |
| "step": 2515 | |
| }, | |
| { | |
| "epoch": 0.11245732646094116, | |
| "grad_norm": 1.3568669557571411, | |
| "learning_rate": 5.997167268661517e-06, | |
| "loss": 2.010548973083496, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.1126804560769351, | |
| "grad_norm": 1.0239033699035645, | |
| "learning_rate": 5.9970648439987935e-06, | |
| "loss": 1.8068222045898437, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 0.11290358569292902, | |
| "grad_norm": 0.8456482291221619, | |
| "learning_rate": 5.996960601267723e-06, | |
| "loss": 2.0056526184082033, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.11312671530892296, | |
| "grad_norm": 1.0484741926193237, | |
| "learning_rate": 5.996854540531537e-06, | |
| "loss": 1.846318817138672, | |
| "step": 2535 | |
| }, | |
| { | |
| "epoch": 0.11334984492491688, | |
| "grad_norm": 1.120490550994873, | |
| "learning_rate": 5.996746661854576e-06, | |
| "loss": 1.828203010559082, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.11357297454091081, | |
| "grad_norm": 0.8005931377410889, | |
| "learning_rate": 5.996636965302281e-06, | |
| "loss": 1.9561363220214845, | |
| "step": 2545 | |
| }, | |
| { | |
| "epoch": 0.11379610415690475, | |
| "grad_norm": 1.116512656211853, | |
| "learning_rate": 5.996525450941195e-06, | |
| "loss": 1.7591400146484375, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.11401923377289867, | |
| "grad_norm": 1.076014518737793, | |
| "learning_rate": 5.996412118838966e-06, | |
| "loss": 1.8832557678222657, | |
| "step": 2555 | |
| }, | |
| { | |
| "epoch": 0.11424236338889261, | |
| "grad_norm": 1.0401372909545898, | |
| "learning_rate": 5.99629696906434e-06, | |
| "loss": 1.9299108505249023, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.11446549300488654, | |
| "grad_norm": 1.72737717628479, | |
| "learning_rate": 5.9961800016871725e-06, | |
| "loss": 1.8442726135253906, | |
| "step": 2565 | |
| }, | |
| { | |
| "epoch": 0.11468862262088048, | |
| "grad_norm": 0.9221048355102539, | |
| "learning_rate": 5.996061216778415e-06, | |
| "loss": 2.000954246520996, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.1149117522368744, | |
| "grad_norm": 1.1969536542892456, | |
| "learning_rate": 5.9959406144101255e-06, | |
| "loss": 1.817903709411621, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 0.11513488185286833, | |
| "grad_norm": 0.9273032546043396, | |
| "learning_rate": 5.995818194655463e-06, | |
| "loss": 1.7747377395629882, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.11535801146886226, | |
| "grad_norm": 1.052923560142517, | |
| "learning_rate": 5.99569395758869e-06, | |
| "loss": 2.0524295806884765, | |
| "step": 2585 | |
| }, | |
| { | |
| "epoch": 0.11558114108485619, | |
| "grad_norm": 0.8769122362136841, | |
| "learning_rate": 5.99556790328517e-06, | |
| "loss": 1.858469581604004, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.11580427070085013, | |
| "grad_norm": 1.0317225456237793, | |
| "learning_rate": 5.995440031821369e-06, | |
| "loss": 1.7618642807006837, | |
| "step": 2595 | |
| }, | |
| { | |
| "epoch": 0.11602740031684405, | |
| "grad_norm": 0.9179856777191162, | |
| "learning_rate": 5.995310343274858e-06, | |
| "loss": 1.8329139709472657, | |
| "step": 2600 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 22409, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 200, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.4842494444417057e+19, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |