| { |
| "best_global_step": 17094, |
| "best_metric": 0.9845099349219312, |
| "best_model_checkpoint": "./xlmr-language-identification/checkpoint-17094", |
| "epoch": 2.0, |
| "eval_steps": 2500, |
| "global_step": 17094, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.011701304695473546, |
| "grad_norm": 0.9905334711074829, |
| "learning_rate": 4.971042471042471e-05, |
| "loss": 16.156219482421875, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.023402609390947092, |
| "grad_norm": 1.069915533065796, |
| "learning_rate": 4.941792441792442e-05, |
| "loss": 5.045557556152343, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.035103914086420634, |
| "grad_norm": 1.8720062971115112, |
| "learning_rate": 4.912542412542413e-05, |
| "loss": 4.602024841308594, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.046805218781894184, |
| "grad_norm": 2.246952772140503, |
| "learning_rate": 4.883292383292383e-05, |
| "loss": 2.9484735107421876, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.058506523477367726, |
| "grad_norm": 1.7273783683776855, |
| "learning_rate": 4.8540423540423544e-05, |
| "loss": 1.8604541015625, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.07020782817284127, |
| "grad_norm": 1.871302843093872, |
| "learning_rate": 4.824792324792325e-05, |
| "loss": 1.2988777160644531, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.08190913286831482, |
| "grad_norm": 1.9515539407730103, |
| "learning_rate": 4.7955422955422954e-05, |
| "loss": 0.9935964965820312, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.09361043756378837, |
| "grad_norm": 1.469983696937561, |
| "learning_rate": 4.7662922662922666e-05, |
| "loss": 0.803764877319336, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.1053117422592619, |
| "grad_norm": 1.6788742542266846, |
| "learning_rate": 4.737042237042237e-05, |
| "loss": 0.6818081665039063, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.11701304695473545, |
| "grad_norm": 1.4318715333938599, |
| "learning_rate": 4.707792207792208e-05, |
| "loss": 0.5820805740356445, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.128714351650209, |
| "grad_norm": 1.9161852598190308, |
| "learning_rate": 4.678542178542179e-05, |
| "loss": 0.5121846389770508, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.14041565634568254, |
| "grad_norm": 1.6554639339447021, |
| "learning_rate": 4.6492921492921494e-05, |
| "loss": 0.4561897277832031, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.1521169610411561, |
| "grad_norm": 1.839687466621399, |
| "learning_rate": 4.62004212004212e-05, |
| "loss": 0.4188547134399414, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.16381826573662964, |
| "grad_norm": 1.735112190246582, |
| "learning_rate": 4.590792090792091e-05, |
| "loss": 0.3874264144897461, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.17551957043210317, |
| "grad_norm": 2.1364924907684326, |
| "learning_rate": 4.5615420615420616e-05, |
| "loss": 0.36660758972167967, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.18722087512757674, |
| "grad_norm": 1.5190038681030273, |
| "learning_rate": 4.532292032292033e-05, |
| "loss": 0.3320538330078125, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.19892217982305027, |
| "grad_norm": 1.4698094129562378, |
| "learning_rate": 4.503042003042003e-05, |
| "loss": 0.3180769729614258, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.2106234845185238, |
| "grad_norm": 1.4362446069717407, |
| "learning_rate": 4.4737919737919745e-05, |
| "loss": 0.3039104461669922, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.22232478921399737, |
| "grad_norm": 1.5884244441986084, |
| "learning_rate": 4.444541944541944e-05, |
| "loss": 0.2853180694580078, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.2340260939094709, |
| "grad_norm": 1.14271879196167, |
| "learning_rate": 4.4152919152919155e-05, |
| "loss": 0.2749899673461914, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.24572739860494444, |
| "grad_norm": 1.3998445272445679, |
| "learning_rate": 4.386041886041886e-05, |
| "loss": 0.2584847450256348, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.257428703300418, |
| "grad_norm": 1.126080870628357, |
| "learning_rate": 4.356791856791857e-05, |
| "loss": 0.2443878746032715, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.26913000799589154, |
| "grad_norm": 1.6860474348068237, |
| "learning_rate": 4.327541827541828e-05, |
| "loss": 0.23169761657714844, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.2808313126913651, |
| "grad_norm": 1.8752473592758179, |
| "learning_rate": 4.298291798291799e-05, |
| "loss": 0.22600336074829103, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.2925326173868386, |
| "grad_norm": 1.6066241264343262, |
| "learning_rate": 4.2690417690417694e-05, |
| "loss": 0.2185952568054199, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.2925326173868386, |
| "eval_accuracy": 0.8560363636363636, |
| "eval_f1": 0.9651478556858426, |
| "eval_loss": 0.039476945996284485, |
| "eval_precision": 0.9777755239169846, |
| "eval_recall": 0.9528421936966432, |
| "eval_runtime": 165.9297, |
| "eval_samples_per_second": 165.733, |
| "eval_steps_per_second": 5.183, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.3042339220823122, |
| "grad_norm": 1.2747637033462524, |
| "learning_rate": 4.2397917397917406e-05, |
| "loss": 0.22376758575439454, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.31593522677778574, |
| "grad_norm": 1.5281429290771484, |
| "learning_rate": 4.2105417105417104e-05, |
| "loss": 0.21567138671875, |
| "step": 2700 |
| }, |
| { |
| "epoch": 0.3276365314732593, |
| "grad_norm": 1.0241914987564087, |
| "learning_rate": 4.1812916812916816e-05, |
| "loss": 0.20220571517944336, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.3393378361687328, |
| "grad_norm": 1.9281812906265259, |
| "learning_rate": 4.152041652041652e-05, |
| "loss": 0.19900651931762695, |
| "step": 2900 |
| }, |
| { |
| "epoch": 0.35103914086420634, |
| "grad_norm": 1.0900094509124756, |
| "learning_rate": 4.122791622791623e-05, |
| "loss": 0.19108434677124023, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.3627404455596799, |
| "grad_norm": 1.9392260313034058, |
| "learning_rate": 4.093541593541594e-05, |
| "loss": 0.1862166404724121, |
| "step": 3100 |
| }, |
| { |
| "epoch": 0.37444175025515347, |
| "grad_norm": 1.4907201528549194, |
| "learning_rate": 4.064291564291564e-05, |
| "loss": 0.18224323272705079, |
| "step": 3200 |
| }, |
| { |
| "epoch": 0.386143054950627, |
| "grad_norm": 1.0142017602920532, |
| "learning_rate": 4.0350415350415355e-05, |
| "loss": 0.17916433334350587, |
| "step": 3300 |
| }, |
| { |
| "epoch": 0.39784435964610054, |
| "grad_norm": 1.3136364221572876, |
| "learning_rate": 4.005791505791506e-05, |
| "loss": 0.17482011795043945, |
| "step": 3400 |
| }, |
| { |
| "epoch": 0.4095456643415741, |
| "grad_norm": 1.7887938022613525, |
| "learning_rate": 3.9765414765414765e-05, |
| "loss": 0.16918930053710937, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.4212469690370476, |
| "grad_norm": 1.368762731552124, |
| "learning_rate": 3.947291447291447e-05, |
| "loss": 0.1643056869506836, |
| "step": 3600 |
| }, |
| { |
| "epoch": 0.43294827373252115, |
| "grad_norm": 1.666864275932312, |
| "learning_rate": 3.918041418041418e-05, |
| "loss": 0.16255182266235352, |
| "step": 3700 |
| }, |
| { |
| "epoch": 0.44464957842799474, |
| "grad_norm": 1.4107720851898193, |
| "learning_rate": 3.888791388791389e-05, |
| "loss": 0.15760359764099122, |
| "step": 3800 |
| }, |
| { |
| "epoch": 0.4563508831234683, |
| "grad_norm": 1.6475497484207153, |
| "learning_rate": 3.85954135954136e-05, |
| "loss": 0.1564300537109375, |
| "step": 3900 |
| }, |
| { |
| "epoch": 0.4680521878189418, |
| "grad_norm": 1.3566536903381348, |
| "learning_rate": 3.8302913302913305e-05, |
| "loss": 0.15231061935424806, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.47975349251441535, |
| "grad_norm": 0.9237350225448608, |
| "learning_rate": 3.801041301041301e-05, |
| "loss": 0.14665995597839354, |
| "step": 4100 |
| }, |
| { |
| "epoch": 0.4914547972098889, |
| "grad_norm": 2.0160303115844727, |
| "learning_rate": 3.7717912717912715e-05, |
| "loss": 0.1439968776702881, |
| "step": 4200 |
| }, |
| { |
| "epoch": 0.5031561019053624, |
| "grad_norm": 1.495821237564087, |
| "learning_rate": 3.742541242541243e-05, |
| "loss": 0.14349424362182617, |
| "step": 4300 |
| }, |
| { |
| "epoch": 0.514857406600836, |
| "grad_norm": 1.1643633842468262, |
| "learning_rate": 3.713291213291213e-05, |
| "loss": 0.1439281463623047, |
| "step": 4400 |
| }, |
| { |
| "epoch": 0.5265587112963095, |
| "grad_norm": 1.2707788944244385, |
| "learning_rate": 3.6840411840411844e-05, |
| "loss": 0.14324712753295898, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.5382600159917831, |
| "grad_norm": 0.9467172026634216, |
| "learning_rate": 3.654791154791155e-05, |
| "loss": 0.136934118270874, |
| "step": 4600 |
| }, |
| { |
| "epoch": 0.5499613206872567, |
| "grad_norm": 1.4451957941055298, |
| "learning_rate": 3.625541125541126e-05, |
| "loss": 0.13631460189819336, |
| "step": 4700 |
| }, |
| { |
| "epoch": 0.5616626253827302, |
| "grad_norm": 1.1038591861724854, |
| "learning_rate": 3.5962910962910966e-05, |
| "loss": 0.13634085655212402, |
| "step": 4800 |
| }, |
| { |
| "epoch": 0.5733639300782037, |
| "grad_norm": 1.2149685621261597, |
| "learning_rate": 3.567041067041067e-05, |
| "loss": 0.13158455848693848, |
| "step": 4900 |
| }, |
| { |
| "epoch": 0.5850652347736772, |
| "grad_norm": 1.421316146850586, |
| "learning_rate": 3.5377910377910376e-05, |
| "loss": 0.13308174133300782, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.5850652347736772, |
| "eval_accuracy": 0.9069818181818182, |
| "eval_f1": 0.9759635668726578, |
| "eval_loss": 0.02319982275366783, |
| "eval_precision": 0.9802819359100597, |
| "eval_recall": 0.971683077797408, |
| "eval_runtime": 164.1216, |
| "eval_samples_per_second": 167.559, |
| "eval_steps_per_second": 5.24, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.5967665394691508, |
| "grad_norm": 1.9884291887283325, |
| "learning_rate": 3.508541008541009e-05, |
| "loss": 0.12942585945129395, |
| "step": 5100 |
| }, |
| { |
| "epoch": 0.6084678441646244, |
| "grad_norm": 1.1202914714813232, |
| "learning_rate": 3.479290979290979e-05, |
| "loss": 0.12913961410522462, |
| "step": 5200 |
| }, |
| { |
| "epoch": 0.6201691488600979, |
| "grad_norm": 1.470561146736145, |
| "learning_rate": 3.4500409500409505e-05, |
| "loss": 0.12901023864746095, |
| "step": 5300 |
| }, |
| { |
| "epoch": 0.6318704535555715, |
| "grad_norm": 0.930363655090332, |
| "learning_rate": 3.420790920790921e-05, |
| "loss": 0.12278788566589355, |
| "step": 5400 |
| }, |
| { |
| "epoch": 0.643571758251045, |
| "grad_norm": 1.7242929935455322, |
| "learning_rate": 3.391540891540892e-05, |
| "loss": 0.12592774391174316, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.6552730629465185, |
| "grad_norm": 1.8954977989196777, |
| "learning_rate": 3.362290862290862e-05, |
| "loss": 0.12447975158691406, |
| "step": 5600 |
| }, |
| { |
| "epoch": 0.666974367641992, |
| "grad_norm": 1.352124571800232, |
| "learning_rate": 3.333040833040833e-05, |
| "loss": 0.1232685947418213, |
| "step": 5700 |
| }, |
| { |
| "epoch": 0.6786756723374656, |
| "grad_norm": 1.2466479539871216, |
| "learning_rate": 3.303790803790804e-05, |
| "loss": 0.11894026756286621, |
| "step": 5800 |
| }, |
| { |
| "epoch": 0.6903769770329392, |
| "grad_norm": 1.429012656211853, |
| "learning_rate": 3.274540774540775e-05, |
| "loss": 0.11655033111572266, |
| "step": 5900 |
| }, |
| { |
| "epoch": 0.7020782817284127, |
| "grad_norm": 1.4804608821868896, |
| "learning_rate": 3.2452907452907454e-05, |
| "loss": 0.11837745666503906, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.7137795864238863, |
| "grad_norm": 1.3687437772750854, |
| "learning_rate": 3.2160407160407166e-05, |
| "loss": 0.11587305068969726, |
| "step": 6100 |
| }, |
| { |
| "epoch": 0.7254808911193598, |
| "grad_norm": 1.4826337099075317, |
| "learning_rate": 3.186790686790687e-05, |
| "loss": 0.11750144958496093, |
| "step": 6200 |
| }, |
| { |
| "epoch": 0.7371821958148334, |
| "grad_norm": 0.959021270275116, |
| "learning_rate": 3.1575406575406577e-05, |
| "loss": 0.11341402053833008, |
| "step": 6300 |
| }, |
| { |
| "epoch": 0.7488835005103069, |
| "grad_norm": 0.8908078670501709, |
| "learning_rate": 3.128290628290628e-05, |
| "loss": 0.11524638175964355, |
| "step": 6400 |
| }, |
| { |
| "epoch": 0.7605848052057804, |
| "grad_norm": 0.9967349171638489, |
| "learning_rate": 3.0990405990405994e-05, |
| "loss": 0.1124759292602539, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.772286109901254, |
| "grad_norm": 1.4200743436813354, |
| "learning_rate": 3.06979056979057e-05, |
| "loss": 0.10863906860351563, |
| "step": 6600 |
| }, |
| { |
| "epoch": 0.7839874145967275, |
| "grad_norm": 1.6694557666778564, |
| "learning_rate": 3.0405405405405407e-05, |
| "loss": 0.11165953636169433, |
| "step": 6700 |
| }, |
| { |
| "epoch": 0.7956887192922011, |
| "grad_norm": 1.0781575441360474, |
| "learning_rate": 3.0112905112905116e-05, |
| "loss": 0.1116695213317871, |
| "step": 6800 |
| }, |
| { |
| "epoch": 0.8073900239876747, |
| "grad_norm": 1.3060591220855713, |
| "learning_rate": 2.9820404820404824e-05, |
| "loss": 0.1075587272644043, |
| "step": 6900 |
| }, |
| { |
| "epoch": 0.8190913286831482, |
| "grad_norm": 1.448541283607483, |
| "learning_rate": 2.9527904527904533e-05, |
| "loss": 0.11105222702026367, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.8307926333786217, |
| "grad_norm": 1.8385331630706787, |
| "learning_rate": 2.9235404235404234e-05, |
| "loss": 0.10690691947937012, |
| "step": 7100 |
| }, |
| { |
| "epoch": 0.8424939380740952, |
| "grad_norm": 1.4097715616226196, |
| "learning_rate": 2.8942903942903943e-05, |
| "loss": 0.10494094848632812, |
| "step": 7200 |
| }, |
| { |
| "epoch": 0.8541952427695688, |
| "grad_norm": 1.7280794382095337, |
| "learning_rate": 2.865040365040365e-05, |
| "loss": 0.10908853530883789, |
| "step": 7300 |
| }, |
| { |
| "epoch": 0.8658965474650423, |
| "grad_norm": 1.0693798065185547, |
| "learning_rate": 2.835790335790336e-05, |
| "loss": 0.10775123596191406, |
| "step": 7400 |
| }, |
| { |
| "epoch": 0.8775978521605159, |
| "grad_norm": 1.5537015199661255, |
| "learning_rate": 2.806540306540307e-05, |
| "loss": 0.10442270278930664, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.8775978521605159, |
| "eval_accuracy": 0.9217818181818181, |
| "eval_f1": 0.9801159592655914, |
| "eval_loss": 0.017201833426952362, |
| "eval_precision": 0.982831231753525, |
| "eval_recall": 0.9774156484352801, |
| "eval_runtime": 185.5814, |
| "eval_samples_per_second": 148.183, |
| "eval_steps_per_second": 4.634, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.8892991568559895, |
| "grad_norm": 1.0237928628921509, |
| "learning_rate": 2.7772902772902777e-05, |
| "loss": 0.10509194374084473, |
| "step": 7600 |
| }, |
| { |
| "epoch": 0.901000461551463, |
| "grad_norm": 1.5233043432235718, |
| "learning_rate": 2.7480402480402486e-05, |
| "loss": 0.10594425201416016, |
| "step": 7700 |
| }, |
| { |
| "epoch": 0.9127017662469366, |
| "grad_norm": 0.8517800569534302, |
| "learning_rate": 2.7187902187902187e-05, |
| "loss": 0.10539012908935547, |
| "step": 7800 |
| }, |
| { |
| "epoch": 0.92440307094241, |
| "grad_norm": 1.3082163333892822, |
| "learning_rate": 2.6895401895401896e-05, |
| "loss": 0.09960749626159668, |
| "step": 7900 |
| }, |
| { |
| "epoch": 0.9361043756378836, |
| "grad_norm": 0.8803576827049255, |
| "learning_rate": 2.6602901602901604e-05, |
| "loss": 0.10325809478759766, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.9478056803333572, |
| "grad_norm": 1.5242176055908203, |
| "learning_rate": 2.6310401310401313e-05, |
| "loss": 0.10169939041137695, |
| "step": 8100 |
| }, |
| { |
| "epoch": 0.9595069850288307, |
| "grad_norm": 1.2510749101638794, |
| "learning_rate": 2.601790101790102e-05, |
| "loss": 0.100986909866333, |
| "step": 8200 |
| }, |
| { |
| "epoch": 0.9712082897243043, |
| "grad_norm": 1.4342858791351318, |
| "learning_rate": 2.5725400725400726e-05, |
| "loss": 0.09906567573547363, |
| "step": 8300 |
| }, |
| { |
| "epoch": 0.9829095944197778, |
| "grad_norm": 1.1943491697311401, |
| "learning_rate": 2.5432900432900435e-05, |
| "loss": 0.09981002807617187, |
| "step": 8400 |
| }, |
| { |
| "epoch": 0.9946108991152514, |
| "grad_norm": 1.0533969402313232, |
| "learning_rate": 2.5140400140400143e-05, |
| "loss": 0.10099024772644043, |
| "step": 8500 |
| }, |
| { |
| "epoch": 1.006201691488601, |
| "grad_norm": 0.952810525894165, |
| "learning_rate": 2.484789984789985e-05, |
| "loss": 0.09148699760437012, |
| "step": 8600 |
| }, |
| { |
| "epoch": 1.0179029961840744, |
| "grad_norm": 0.9051135182380676, |
| "learning_rate": 2.4555399555399554e-05, |
| "loss": 0.08493613243103028, |
| "step": 8700 |
| }, |
| { |
| "epoch": 1.0296043008795481, |
| "grad_norm": 1.4660611152648926, |
| "learning_rate": 2.4262899262899262e-05, |
| "loss": 0.08283645629882813, |
| "step": 8800 |
| }, |
| { |
| "epoch": 1.0413056055750216, |
| "grad_norm": 1.2308636903762817, |
| "learning_rate": 2.397039897039897e-05, |
| "loss": 0.08224470138549805, |
| "step": 8900 |
| }, |
| { |
| "epoch": 1.053006910270495, |
| "grad_norm": 1.180936574935913, |
| "learning_rate": 2.367789867789868e-05, |
| "loss": 0.08456890106201172, |
| "step": 9000 |
| }, |
| { |
| "epoch": 1.0647082149659688, |
| "grad_norm": 1.1681982278823853, |
| "learning_rate": 2.3385398385398384e-05, |
| "loss": 0.08683476448059083, |
| "step": 9100 |
| }, |
| { |
| "epoch": 1.0764095196614423, |
| "grad_norm": 1.1853655576705933, |
| "learning_rate": 2.3092898092898093e-05, |
| "loss": 0.08339482307434082, |
| "step": 9200 |
| }, |
| { |
| "epoch": 1.0881108243569158, |
| "grad_norm": 0.7993655204772949, |
| "learning_rate": 2.28003978003978e-05, |
| "loss": 0.08154037475585937, |
| "step": 9300 |
| }, |
| { |
| "epoch": 1.0998121290523892, |
| "grad_norm": 1.3015902042388916, |
| "learning_rate": 2.250789750789751e-05, |
| "loss": 0.0843494701385498, |
| "step": 9400 |
| }, |
| { |
| "epoch": 1.111513433747863, |
| "grad_norm": 0.927474319934845, |
| "learning_rate": 2.2215397215397215e-05, |
| "loss": 0.08107766151428222, |
| "step": 9500 |
| }, |
| { |
| "epoch": 1.1232147384433364, |
| "grad_norm": 1.2510508298873901, |
| "learning_rate": 2.1922896922896923e-05, |
| "loss": 0.08390594482421875, |
| "step": 9600 |
| }, |
| { |
| "epoch": 1.13491604313881, |
| "grad_norm": 1.012722373008728, |
| "learning_rate": 2.1630396630396632e-05, |
| "loss": 0.08134162902832032, |
| "step": 9700 |
| }, |
| { |
| "epoch": 1.1466173478342836, |
| "grad_norm": 1.8160477876663208, |
| "learning_rate": 2.1337896337896337e-05, |
| "loss": 0.08689288139343261, |
| "step": 9800 |
| }, |
| { |
| "epoch": 1.158318652529757, |
| "grad_norm": 1.6144487857818604, |
| "learning_rate": 2.1045396045396046e-05, |
| "loss": 0.08774213790893555, |
| "step": 9900 |
| }, |
| { |
| "epoch": 1.1700199572252306, |
| "grad_norm": 1.088478922843933, |
| "learning_rate": 2.0752895752895754e-05, |
| "loss": 0.08510435104370118, |
| "step": 10000 |
| }, |
| { |
| "epoch": 1.1700199572252306, |
| "eval_accuracy": 0.9310545454545455, |
| "eval_f1": 0.9822452239157081, |
| "eval_loss": 0.0149807995185256, |
| "eval_precision": 0.9843658878562654, |
| "eval_recall": 0.9801336776170297, |
| "eval_runtime": 187.3637, |
| "eval_samples_per_second": 146.773, |
| "eval_steps_per_second": 4.59, |
| "step": 10000 |
| }, |
| { |
| "epoch": 1.1817212619207043, |
| "grad_norm": 1.2410812377929688, |
| "learning_rate": 2.0460395460395463e-05, |
| "loss": 0.08356905937194824, |
| "step": 10100 |
| }, |
| { |
| "epoch": 1.1934225666161777, |
| "grad_norm": 0.9291555881500244, |
| "learning_rate": 2.0167895167895168e-05, |
| "loss": 0.08259629249572754, |
| "step": 10200 |
| }, |
| { |
| "epoch": 1.2051238713116512, |
| "grad_norm": 1.146966576576233, |
| "learning_rate": 1.9875394875394876e-05, |
| "loss": 0.08215347290039063, |
| "step": 10300 |
| }, |
| { |
| "epoch": 1.2168251760071247, |
| "grad_norm": 0.9125510454177856, |
| "learning_rate": 1.9582894582894585e-05, |
| "loss": 0.08209040641784668, |
| "step": 10400 |
| }, |
| { |
| "epoch": 1.2285264807025984, |
| "grad_norm": 0.906428337097168, |
| "learning_rate": 1.9290394290394293e-05, |
| "loss": 0.08041071891784668, |
| "step": 10500 |
| }, |
| { |
| "epoch": 1.2402277853980719, |
| "grad_norm": 1.0091209411621094, |
| "learning_rate": 1.8997893997894e-05, |
| "loss": 0.07910086631774903, |
| "step": 10600 |
| }, |
| { |
| "epoch": 1.2519290900935454, |
| "grad_norm": 0.9152646660804749, |
| "learning_rate": 1.8705393705393707e-05, |
| "loss": 0.08119074821472168, |
| "step": 10700 |
| }, |
| { |
| "epoch": 1.2636303947890188, |
| "grad_norm": 1.0613765716552734, |
| "learning_rate": 1.8412893412893415e-05, |
| "loss": 0.07813576221466065, |
| "step": 10800 |
| }, |
| { |
| "epoch": 1.2753316994844925, |
| "grad_norm": 0.7284146547317505, |
| "learning_rate": 1.8120393120393124e-05, |
| "loss": 0.08263915061950683, |
| "step": 10900 |
| }, |
| { |
| "epoch": 1.287033004179966, |
| "grad_norm": 0.8245161771774292, |
| "learning_rate": 1.782789282789283e-05, |
| "loss": 0.07991621494293213, |
| "step": 11000 |
| }, |
| { |
| "epoch": 1.2987343088754395, |
| "grad_norm": 0.9131597876548767, |
| "learning_rate": 1.7535392535392538e-05, |
| "loss": 0.08203693389892579, |
| "step": 11100 |
| }, |
| { |
| "epoch": 1.3104356135709132, |
| "grad_norm": 1.4919288158416748, |
| "learning_rate": 1.7242892242892246e-05, |
| "loss": 0.07703531742095947, |
| "step": 11200 |
| }, |
| { |
| "epoch": 1.3221369182663867, |
| "grad_norm": 0.9880580306053162, |
| "learning_rate": 1.695039195039195e-05, |
| "loss": 0.07978516101837158, |
| "step": 11300 |
| }, |
| { |
| "epoch": 1.3338382229618602, |
| "grad_norm": 1.483494758605957, |
| "learning_rate": 1.665789165789166e-05, |
| "loss": 0.0792800235748291, |
| "step": 11400 |
| }, |
| { |
| "epoch": 1.3455395276573339, |
| "grad_norm": 1.2706217765808105, |
| "learning_rate": 1.6365391365391368e-05, |
| "loss": 0.08085798263549805, |
| "step": 11500 |
| }, |
| { |
| "epoch": 1.3572408323528073, |
| "grad_norm": 0.9506115913391113, |
| "learning_rate": 1.6072891072891073e-05, |
| "loss": 0.0732752513885498, |
| "step": 11600 |
| }, |
| { |
| "epoch": 1.3689421370482808, |
| "grad_norm": 0.7671661972999573, |
| "learning_rate": 1.5780390780390782e-05, |
| "loss": 0.07732769966125488, |
| "step": 11700 |
| }, |
| { |
| "epoch": 1.3806434417437545, |
| "grad_norm": 1.3610262870788574, |
| "learning_rate": 1.5487890487890487e-05, |
| "loss": 0.07633553028106689, |
| "step": 11800 |
| }, |
| { |
| "epoch": 1.392344746439228, |
| "grad_norm": 1.1439802646636963, |
| "learning_rate": 1.5195390195390197e-05, |
| "loss": 0.07713084220886231, |
| "step": 11900 |
| }, |
| { |
| "epoch": 1.4040460511347015, |
| "grad_norm": 1.389791488647461, |
| "learning_rate": 1.4902889902889906e-05, |
| "loss": 0.0749136209487915, |
| "step": 12000 |
| }, |
| { |
| "epoch": 1.4157473558301752, |
| "grad_norm": 0.6515536308288574, |
| "learning_rate": 1.461038961038961e-05, |
| "loss": 0.07383899688720703, |
| "step": 12100 |
| }, |
| { |
| "epoch": 1.4274486605256487, |
| "grad_norm": 0.7534876465797424, |
| "learning_rate": 1.431788931788932e-05, |
| "loss": 0.07853510379791259, |
| "step": 12200 |
| }, |
| { |
| "epoch": 1.4391499652211222, |
| "grad_norm": 0.6730746626853943, |
| "learning_rate": 1.4025389025389026e-05, |
| "loss": 0.0697617483139038, |
| "step": 12300 |
| }, |
| { |
| "epoch": 1.4508512699165956, |
| "grad_norm": 1.8400371074676514, |
| "learning_rate": 1.3732888732888733e-05, |
| "loss": 0.07848044872283935, |
| "step": 12400 |
| }, |
| { |
| "epoch": 1.4625525746120691, |
| "grad_norm": 1.2114777565002441, |
| "learning_rate": 1.3440388440388441e-05, |
| "loss": 0.0783261775970459, |
| "step": 12500 |
| }, |
| { |
| "epoch": 1.4625525746120691, |
| "eval_accuracy": 0.9353818181818182, |
| "eval_f1": 0.9834085799751036, |
| "eval_loss": 0.013636507093906403, |
| "eval_precision": 0.9859303596264654, |
| "eval_recall": 0.9808996676591591, |
| "eval_runtime": 167.3633, |
| "eval_samples_per_second": 164.313, |
| "eval_steps_per_second": 5.139, |
| "step": 12500 |
| }, |
| { |
| "epoch": 1.4742538793075428, |
| "grad_norm": 1.7295293807983398, |
| "learning_rate": 1.3147888147888148e-05, |
| "loss": 0.074767746925354, |
| "step": 12600 |
| }, |
| { |
| "epoch": 1.4859551840030163, |
| "grad_norm": 1.08072030544281, |
| "learning_rate": 1.2855387855387857e-05, |
| "loss": 0.07684030532836914, |
| "step": 12700 |
| }, |
| { |
| "epoch": 1.4976564886984898, |
| "grad_norm": 1.334511637687683, |
| "learning_rate": 1.2562887562887562e-05, |
| "loss": 0.07815152645111084, |
| "step": 12800 |
| }, |
| { |
| "epoch": 1.5093577933939635, |
| "grad_norm": 0.8732834458351135, |
| "learning_rate": 1.227038727038727e-05, |
| "loss": 0.07617097854614258, |
| "step": 12900 |
| }, |
| { |
| "epoch": 1.521059098089437, |
| "grad_norm": 1.038492202758789, |
| "learning_rate": 1.1977886977886979e-05, |
| "loss": 0.07482788562774659, |
| "step": 13000 |
| }, |
| { |
| "epoch": 1.5327604027849104, |
| "grad_norm": 1.1699777841567993, |
| "learning_rate": 1.1685386685386686e-05, |
| "loss": 0.07500550746917725, |
| "step": 13100 |
| }, |
| { |
| "epoch": 1.5444617074803841, |
| "grad_norm": 1.1473757028579712, |
| "learning_rate": 1.1392886392886394e-05, |
| "loss": 0.07690254688262939, |
| "step": 13200 |
| }, |
| { |
| "epoch": 1.5561630121758576, |
| "grad_norm": 1.6335910558700562, |
| "learning_rate": 1.1100386100386101e-05, |
| "loss": 0.07375136375427246, |
| "step": 13300 |
| }, |
| { |
| "epoch": 1.567864316871331, |
| "grad_norm": 0.9858669638633728, |
| "learning_rate": 1.0807885807885808e-05, |
| "loss": 0.07300055027008057, |
| "step": 13400 |
| }, |
| { |
| "epoch": 1.5795656215668048, |
| "grad_norm": 1.1223580837249756, |
| "learning_rate": 1.0515385515385516e-05, |
| "loss": 0.07348923683166504, |
| "step": 13500 |
| }, |
| { |
| "epoch": 1.5912669262622783, |
| "grad_norm": 0.8121886253356934, |
| "learning_rate": 1.0222885222885223e-05, |
| "loss": 0.0721654510498047, |
| "step": 13600 |
| }, |
| { |
| "epoch": 1.6029682309577518, |
| "grad_norm": 1.125582218170166, |
| "learning_rate": 9.930384930384932e-06, |
| "loss": 0.07356025695800782, |
| "step": 13700 |
| }, |
| { |
| "epoch": 1.6146695356532255, |
| "grad_norm": 0.9660710692405701, |
| "learning_rate": 9.637884637884638e-06, |
| "loss": 0.07352369308471679, |
| "step": 13800 |
| }, |
| { |
| "epoch": 1.6263708403486987, |
| "grad_norm": 0.9546118378639221, |
| "learning_rate": 9.345384345384347e-06, |
| "loss": 0.07006223201751709, |
| "step": 13900 |
| }, |
| { |
| "epoch": 1.6380721450441724, |
| "grad_norm": 1.2664750814437866, |
| "learning_rate": 9.052884052884054e-06, |
| "loss": 0.07232100963592529, |
| "step": 14000 |
| }, |
| { |
| "epoch": 1.6497734497396461, |
| "grad_norm": 1.087778925895691, |
| "learning_rate": 8.76038376038376e-06, |
| "loss": 0.07518599510192871, |
| "step": 14100 |
| }, |
| { |
| "epoch": 1.6614747544351194, |
| "grad_norm": 1.0009450912475586, |
| "learning_rate": 8.467883467883467e-06, |
| "loss": 0.07395988464355469, |
| "step": 14200 |
| }, |
| { |
| "epoch": 1.673176059130593, |
| "grad_norm": 1.052869200706482, |
| "learning_rate": 8.175383175383176e-06, |
| "loss": 0.07117973327636719, |
| "step": 14300 |
| }, |
| { |
| "epoch": 1.6848773638260666, |
| "grad_norm": 0.9935372471809387, |
| "learning_rate": 7.882882882882883e-06, |
| "loss": 0.07157835006713867, |
| "step": 14400 |
| }, |
| { |
| "epoch": 1.69657866852154, |
| "grad_norm": 0.8762065768241882, |
| "learning_rate": 7.59038259038259e-06, |
| "loss": 0.07200119018554688, |
| "step": 14500 |
| }, |
| { |
| "epoch": 1.7082799732170137, |
| "grad_norm": 1.1860989332199097, |
| "learning_rate": 7.297882297882298e-06, |
| "loss": 0.06994849681854248, |
| "step": 14600 |
| }, |
| { |
| "epoch": 1.7199812779124872, |
| "grad_norm": 0.7438328862190247, |
| "learning_rate": 7.005382005382005e-06, |
| "loss": 0.06839815139770508, |
| "step": 14700 |
| }, |
| { |
| "epoch": 1.7316825826079607, |
| "grad_norm": 1.6703062057495117, |
| "learning_rate": 6.712881712881713e-06, |
| "loss": 0.07001969337463379, |
| "step": 14800 |
| }, |
| { |
| "epoch": 1.7433838873034344, |
| "grad_norm": 1.143515706062317, |
| "learning_rate": 6.42038142038142e-06, |
| "loss": 0.07166263103485107, |
| "step": 14900 |
| }, |
| { |
| "epoch": 1.755085191998908, |
| "grad_norm": 1.0320720672607422, |
| "learning_rate": 6.127881127881129e-06, |
| "loss": 0.0704725456237793, |
| "step": 15000 |
| }, |
| { |
| "epoch": 1.755085191998908, |
| "eval_accuracy": 0.9399272727272727, |
| "eval_f1": 0.9843185306203277, |
| "eval_loss": 0.012572239153087139, |
| "eval_precision": 0.9860882072137978, |
| "eval_recall": 0.9825551945244066, |
| "eval_runtime": 207.4736, |
| "eval_samples_per_second": 132.547, |
| "eval_steps_per_second": 4.145, |
| "step": 15000 |
| }, |
| { |
| "epoch": 1.7667864966943814, |
| "grad_norm": 0.8305580019950867, |
| "learning_rate": 5.8353808353808354e-06, |
| "loss": 0.07156490802764892, |
| "step": 15100 |
| }, |
| { |
| "epoch": 1.778487801389855, |
| "grad_norm": 1.298708438873291, |
| "learning_rate": 5.542880542880543e-06, |
| "loss": 0.06973484516143799, |
| "step": 15200 |
| }, |
| { |
| "epoch": 1.7901891060853286, |
| "grad_norm": 0.8593277931213379, |
| "learning_rate": 5.250380250380251e-06, |
| "loss": 0.06879038333892823, |
| "step": 15300 |
| }, |
| { |
| "epoch": 1.801890410780802, |
| "grad_norm": 0.732769787311554, |
| "learning_rate": 4.9578799578799576e-06, |
| "loss": 0.06908240318298339, |
| "step": 15400 |
| }, |
| { |
| "epoch": 1.8135917154762757, |
| "grad_norm": 1.3583112955093384, |
| "learning_rate": 4.665379665379665e-06, |
| "loss": 0.07114370822906495, |
| "step": 15500 |
| }, |
| { |
| "epoch": 1.825293020171749, |
| "grad_norm": 0.9253562092781067, |
| "learning_rate": 4.372879372879373e-06, |
| "loss": 0.07082613945007324, |
| "step": 15600 |
| }, |
| { |
| "epoch": 1.8369943248672227, |
| "grad_norm": 1.340505599975586, |
| "learning_rate": 4.0803790803790806e-06, |
| "loss": 0.06995931625366211, |
| "step": 15700 |
| }, |
| { |
| "epoch": 1.8486956295626964, |
| "grad_norm": 0.9078701734542847, |
| "learning_rate": 3.7878787878787882e-06, |
| "loss": 0.06670016288757324, |
| "step": 15800 |
| }, |
| { |
| "epoch": 1.8603969342581697, |
| "grad_norm": 1.2157623767852783, |
| "learning_rate": 3.495378495378496e-06, |
| "loss": 0.0690723991394043, |
| "step": 15900 |
| }, |
| { |
| "epoch": 1.8720982389536434, |
| "grad_norm": 1.7708772420883179, |
| "learning_rate": 3.202878202878203e-06, |
| "loss": 0.06770940780639649, |
| "step": 16000 |
| }, |
| { |
| "epoch": 1.8837995436491168, |
| "grad_norm": 0.976672887802124, |
| "learning_rate": 2.9103779103779103e-06, |
| "loss": 0.0646241569519043, |
| "step": 16100 |
| }, |
| { |
| "epoch": 1.8955008483445903, |
| "grad_norm": 1.7383304834365845, |
| "learning_rate": 2.617877617877618e-06, |
| "loss": 0.06627600193023682, |
| "step": 16200 |
| }, |
| { |
| "epoch": 1.907202153040064, |
| "grad_norm": 0.8907257318496704, |
| "learning_rate": 2.3253773253773257e-06, |
| "loss": 0.06847190380096435, |
| "step": 16300 |
| }, |
| { |
| "epoch": 1.9189034577355375, |
| "grad_norm": 0.7403037548065186, |
| "learning_rate": 2.032877032877033e-06, |
| "loss": 0.06623115539550781, |
| "step": 16400 |
| }, |
| { |
| "epoch": 1.930604762431011, |
| "grad_norm": 1.0598572492599487, |
| "learning_rate": 1.7403767403767406e-06, |
| "loss": 0.06684101104736329, |
| "step": 16500 |
| }, |
| { |
| "epoch": 1.9423060671264847, |
| "grad_norm": 1.2067943811416626, |
| "learning_rate": 1.447876447876448e-06, |
| "loss": 0.06935319423675537, |
| "step": 16600 |
| }, |
| { |
| "epoch": 1.9540073718219582, |
| "grad_norm": 0.9936091899871826, |
| "learning_rate": 1.1553761553761555e-06, |
| "loss": 0.06889093399047852, |
| "step": 16700 |
| }, |
| { |
| "epoch": 1.9657086765174316, |
| "grad_norm": 1.5107425451278687, |
| "learning_rate": 8.628758628758629e-07, |
| "loss": 0.07209495544433593, |
| "step": 16800 |
| }, |
| { |
| "epoch": 1.9774099812129053, |
| "grad_norm": 1.0668072700500488, |
| "learning_rate": 5.703755703755704e-07, |
| "loss": 0.06988236904144288, |
| "step": 16900 |
| }, |
| { |
| "epoch": 1.9891112859083788, |
| "grad_norm": 1.113166093826294, |
| "learning_rate": 2.7787527787527786e-07, |
| "loss": 0.06924228668212891, |
| "step": 17000 |
| }, |
| { |
| "epoch": 2.0, |
| "eval_accuracy": 0.9411636363636363, |
| "eval_f1": 0.9845099349219312, |
| "eval_loss": 0.012292231433093548, |
| "eval_precision": 0.9859127008710089, |
| "eval_recall": 0.9831111550388555, |
| "eval_runtime": 183.6292, |
| "eval_samples_per_second": 149.758, |
| "eval_steps_per_second": 4.683, |
| "step": 17094 |
| }, |
| { |
| "epoch": 2.0, |
| "step": 17094, |
| "total_flos": 1.9402797892099978e+18, |
| "train_loss": 0.08505657710317173, |
| "train_runtime": 156175.5955, |
| "train_samples_per_second": 63.038, |
| "train_steps_per_second": 0.109 |
| } |
| ], |
| "logging_steps": 100, |
| "max_steps": 17094, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 2, |
| "save_steps": 2500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.9402797892099978e+18, |
| "train_batch_size": 32, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|