|
{ |
|
"best_metric": 0.21217399835586548, |
|
"best_model_checkpoint": "autotrain-swinv2-base-patch4-window8-256/checkpoint-11696", |
|
"epoch": 8.0, |
|
"eval_steps": 500, |
|
"global_step": 11696, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01709986320109439, |
|
"grad_norm": 10.912551879882812, |
|
"learning_rate": 8.207934336525308e-07, |
|
"loss": 2.9335, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03419972640218878, |
|
"grad_norm": 11.582744598388672, |
|
"learning_rate": 1.6757865937072504e-06, |
|
"loss": 2.8112, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05129958960328317, |
|
"grad_norm": 11.078932762145996, |
|
"learning_rate": 2.5307797537619698e-06, |
|
"loss": 2.6726, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06839945280437756, |
|
"grad_norm": 12.702400207519531, |
|
"learning_rate": 3.38577291381669e-06, |
|
"loss": 2.4547, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08549931600547196, |
|
"grad_norm": 16.10662078857422, |
|
"learning_rate": 4.240766073871409e-06, |
|
"loss": 2.0783, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.10259917920656635, |
|
"grad_norm": 15.413719177246094, |
|
"learning_rate": 5.0615595075239396e-06, |
|
"loss": 1.8944, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11969904240766074, |
|
"grad_norm": 14.965916633605957, |
|
"learning_rate": 5.91655266757866e-06, |
|
"loss": 1.6313, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.13679890560875513, |
|
"grad_norm": 15.34847640991211, |
|
"learning_rate": 6.77154582763338e-06, |
|
"loss": 1.5534, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1538987688098495, |
|
"grad_norm": 14.655660629272461, |
|
"learning_rate": 7.592339261285911e-06, |
|
"loss": 1.4456, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.17099863201094392, |
|
"grad_norm": 18.560701370239258, |
|
"learning_rate": 8.44733242134063e-06, |
|
"loss": 1.3092, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1880984952120383, |
|
"grad_norm": 29.902738571166992, |
|
"learning_rate": 9.302325581395349e-06, |
|
"loss": 1.2648, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.2051983584131327, |
|
"grad_norm": 27.373172760009766, |
|
"learning_rate": 1.0157318741450068e-05, |
|
"loss": 1.5046, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.22229822161422708, |
|
"grad_norm": 50.6275749206543, |
|
"learning_rate": 1.1012311901504789e-05, |
|
"loss": 1.3096, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.2393980848153215, |
|
"grad_norm": 16.131093978881836, |
|
"learning_rate": 1.1867305061559508e-05, |
|
"loss": 1.1689, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.25649794801641584, |
|
"grad_norm": 30.61119270324707, |
|
"learning_rate": 1.2722298221614229e-05, |
|
"loss": 1.1555, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.27359781121751026, |
|
"grad_norm": 12.447290420532227, |
|
"learning_rate": 1.3577291381668946e-05, |
|
"loss": 0.9556, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.29069767441860467, |
|
"grad_norm": 7.643873691558838, |
|
"learning_rate": 1.4432284541723667e-05, |
|
"loss": 1.0063, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.307797537619699, |
|
"grad_norm": 21.5053768157959, |
|
"learning_rate": 1.5287277701778386e-05, |
|
"loss": 1.0971, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.32489740082079344, |
|
"grad_norm": 26.181028366088867, |
|
"learning_rate": 1.6142270861833107e-05, |
|
"loss": 0.8747, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.34199726402188785, |
|
"grad_norm": Infinity, |
|
"learning_rate": 1.6963064295485636e-05, |
|
"loss": 0.899, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3590971272229822, |
|
"grad_norm": 13.22681713104248, |
|
"learning_rate": 1.7818057455540357e-05, |
|
"loss": 0.844, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.3761969904240766, |
|
"grad_norm": 20.26528549194336, |
|
"learning_rate": 1.8673050615595075e-05, |
|
"loss": 0.9843, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.393296853625171, |
|
"grad_norm": 13.295209884643555, |
|
"learning_rate": 1.9528043775649796e-05, |
|
"loss": 0.8477, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.4103967168262654, |
|
"grad_norm": 17.751012802124023, |
|
"learning_rate": 2.0383036935704516e-05, |
|
"loss": 0.9479, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.4274965800273598, |
|
"grad_norm": 7.726760387420654, |
|
"learning_rate": 2.1238030095759234e-05, |
|
"loss": 0.8116, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.44459644322845415, |
|
"grad_norm": 14.156341552734375, |
|
"learning_rate": 2.2093023255813955e-05, |
|
"loss": 0.8521, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.46169630642954856, |
|
"grad_norm": 39.080848693847656, |
|
"learning_rate": 2.2948016415868672e-05, |
|
"loss": 0.9364, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.478796169630643, |
|
"grad_norm": 18.685993194580078, |
|
"learning_rate": 2.3803009575923393e-05, |
|
"loss": 0.6069, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.49589603283173733, |
|
"grad_norm": 22.621519088745117, |
|
"learning_rate": 2.4658002735978114e-05, |
|
"loss": 0.6538, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.5129958960328317, |
|
"grad_norm": 31.690587997436523, |
|
"learning_rate": 2.5512995896032832e-05, |
|
"loss": 0.7562, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.5300957592339262, |
|
"grad_norm": 7.775012016296387, |
|
"learning_rate": 2.6367989056087556e-05, |
|
"loss": 0.692, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.5471956224350205, |
|
"grad_norm": 24.86013412475586, |
|
"learning_rate": 2.7222982216142274e-05, |
|
"loss": 0.7072, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5642954856361149, |
|
"grad_norm": 6.716433525085449, |
|
"learning_rate": 2.807797537619699e-05, |
|
"loss": 0.6732, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.5813953488372093, |
|
"grad_norm": 14.250188827514648, |
|
"learning_rate": 2.893296853625171e-05, |
|
"loss": 0.8366, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5984952120383037, |
|
"grad_norm": 17.7914981842041, |
|
"learning_rate": 2.9787961696306433e-05, |
|
"loss": 0.7953, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.615595075239398, |
|
"grad_norm": 13.2632417678833, |
|
"learning_rate": 3.064295485636115e-05, |
|
"loss": 0.7905, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6326949384404925, |
|
"grad_norm": 34.731689453125, |
|
"learning_rate": 3.149794801641587e-05, |
|
"loss": 0.741, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.6497948016415869, |
|
"grad_norm": 23.923887252807617, |
|
"learning_rate": 3.235294117647059e-05, |
|
"loss": 0.7366, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.6668946648426812, |
|
"grad_norm": 12.461081504821777, |
|
"learning_rate": 3.3207934336525306e-05, |
|
"loss": 0.8837, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.6839945280437757, |
|
"grad_norm": 14.00641918182373, |
|
"learning_rate": 3.406292749658003e-05, |
|
"loss": 0.6269, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.70109439124487, |
|
"grad_norm": 23.20526123046875, |
|
"learning_rate": 3.491792065663475e-05, |
|
"loss": 0.8368, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.7181942544459644, |
|
"grad_norm": 29.71381187438965, |
|
"learning_rate": 3.577291381668947e-05, |
|
"loss": 0.5834, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.7352941176470589, |
|
"grad_norm": 13.521023750305176, |
|
"learning_rate": 3.662790697674418e-05, |
|
"loss": 0.682, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.7523939808481532, |
|
"grad_norm": 18.163925170898438, |
|
"learning_rate": 3.748290013679891e-05, |
|
"loss": 0.7037, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7694938440492476, |
|
"grad_norm": 4.578676223754883, |
|
"learning_rate": 3.8337893296853625e-05, |
|
"loss": 0.6831, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.786593707250342, |
|
"grad_norm": 37.317405700683594, |
|
"learning_rate": 3.9192886456908346e-05, |
|
"loss": 0.6257, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.8036935704514364, |
|
"grad_norm": 19.161466598510742, |
|
"learning_rate": 4.004787961696307e-05, |
|
"loss": 0.6827, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.8207934336525308, |
|
"grad_norm": 12.673524856567383, |
|
"learning_rate": 4.090287277701779e-05, |
|
"loss": 0.7644, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.8378932968536251, |
|
"grad_norm": 19.195293426513672, |
|
"learning_rate": 4.17578659370725e-05, |
|
"loss": 0.8797, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.8549931600547196, |
|
"grad_norm": 12.568032264709473, |
|
"learning_rate": 4.261285909712722e-05, |
|
"loss": 0.8057, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.872093023255814, |
|
"grad_norm": 6.0890984535217285, |
|
"learning_rate": 4.3467852257181944e-05, |
|
"loss": 0.7327, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.8891928864569083, |
|
"grad_norm": 17.399168014526367, |
|
"learning_rate": 4.4322845417236665e-05, |
|
"loss": 0.6994, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.9062927496580028, |
|
"grad_norm": 22.242332458496094, |
|
"learning_rate": 4.517783857729138e-05, |
|
"loss": 0.7407, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.9233926128590971, |
|
"grad_norm": 34.255836486816406, |
|
"learning_rate": 4.6032831737346106e-05, |
|
"loss": 0.7177, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.9404924760601915, |
|
"grad_norm": 32.492916107177734, |
|
"learning_rate": 4.688782489740082e-05, |
|
"loss": 0.6823, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.957592339261286, |
|
"grad_norm": 8.38550853729248, |
|
"learning_rate": 4.774281805745554e-05, |
|
"loss": 0.6598, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.9746922024623803, |
|
"grad_norm": 14.937362670898438, |
|
"learning_rate": 4.859781121751026e-05, |
|
"loss": 0.6052, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.9917920656634747, |
|
"grad_norm": 12.976387023925781, |
|
"learning_rate": 4.945280437756498e-05, |
|
"loss": 0.6612, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.7494025264595425, |
|
"eval_f1_macro": 0.4161312340366561, |
|
"eval_f1_micro": 0.7494025264595425, |
|
"eval_f1_weighted": 0.7356756793477198, |
|
"eval_loss": 0.6534971594810486, |
|
"eval_precision_macro": 0.5527584385412905, |
|
"eval_precision_micro": 0.7494025264595425, |
|
"eval_precision_weighted": 0.8030313567462574, |
|
"eval_recall_macro": 0.42879797708152634, |
|
"eval_recall_micro": 0.7494025264595425, |
|
"eval_recall_weighted": 0.7494025264595425, |
|
"eval_runtime": 25.4169, |
|
"eval_samples_per_second": 115.238, |
|
"eval_steps_per_second": 7.239, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 1.008891928864569, |
|
"grad_norm": 9.59922981262207, |
|
"learning_rate": 4.996580027359781e-05, |
|
"loss": 0.5404, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.0259917920656634, |
|
"grad_norm": 8.981080055236816, |
|
"learning_rate": 4.9870801033591734e-05, |
|
"loss": 0.6632, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.043091655266758, |
|
"grad_norm": 19.810388565063477, |
|
"learning_rate": 4.977580179358565e-05, |
|
"loss": 0.7257, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.0601915184678523, |
|
"grad_norm": 24.40424346923828, |
|
"learning_rate": 4.9680802553579575e-05, |
|
"loss": 0.4992, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.0772913816689467, |
|
"grad_norm": 17.77897071838379, |
|
"learning_rate": 4.958580331357349e-05, |
|
"loss": 0.7102, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.094391244870041, |
|
"grad_norm": 9.230584144592285, |
|
"learning_rate": 4.9490804073567415e-05, |
|
"loss": 0.5452, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.1114911080711354, |
|
"grad_norm": 9.320358276367188, |
|
"learning_rate": 4.939580483356133e-05, |
|
"loss": 0.6861, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.1285909712722297, |
|
"grad_norm": 19.349388122558594, |
|
"learning_rate": 4.9300805593555256e-05, |
|
"loss": 0.6399, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.1456908344733243, |
|
"grad_norm": 8.893238067626953, |
|
"learning_rate": 4.920580635354917e-05, |
|
"loss": 0.541, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.1627906976744187, |
|
"grad_norm": 7.8030219078063965, |
|
"learning_rate": 4.9110807113543096e-05, |
|
"loss": 0.6742, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.179890560875513, |
|
"grad_norm": 30.97956657409668, |
|
"learning_rate": 4.901580787353701e-05, |
|
"loss": 0.5536, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.1969904240766074, |
|
"grad_norm": 28.708515167236328, |
|
"learning_rate": 4.892080863353094e-05, |
|
"loss": 0.5912, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.2140902872777017, |
|
"grad_norm": 8.513506889343262, |
|
"learning_rate": 4.8825809393524854e-05, |
|
"loss": 0.6994, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.231190150478796, |
|
"grad_norm": 3.460026741027832, |
|
"learning_rate": 4.873081015351878e-05, |
|
"loss": 0.5413, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.2482900136798905, |
|
"grad_norm": 11.066655158996582, |
|
"learning_rate": 4.8635810913512694e-05, |
|
"loss": 0.5687, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.265389876880985, |
|
"grad_norm": 11.196250915527344, |
|
"learning_rate": 4.854081167350661e-05, |
|
"loss": 0.6434, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.2824897400820794, |
|
"grad_norm": 6.403533935546875, |
|
"learning_rate": 4.8445812433500535e-05, |
|
"loss": 0.6681, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.2995896032831737, |
|
"grad_norm": 0.6531491875648499, |
|
"learning_rate": 4.835081319349445e-05, |
|
"loss": 0.6018, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.316689466484268, |
|
"grad_norm": 17.756439208984375, |
|
"learning_rate": 4.8255813953488375e-05, |
|
"loss": 0.6509, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.3337893296853625, |
|
"grad_norm": 6.791726589202881, |
|
"learning_rate": 4.816081471348229e-05, |
|
"loss": 0.7089, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.350889192886457, |
|
"grad_norm": 11.32545280456543, |
|
"learning_rate": 4.8065815473476216e-05, |
|
"loss": 0.6242, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.3679890560875512, |
|
"grad_norm": 23.671077728271484, |
|
"learning_rate": 4.797081623347013e-05, |
|
"loss": 0.7434, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.3850889192886457, |
|
"grad_norm": 2.791210174560547, |
|
"learning_rate": 4.7875816993464056e-05, |
|
"loss": 0.5504, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.40218878248974, |
|
"grad_norm": 8.67063045501709, |
|
"learning_rate": 4.778081775345797e-05, |
|
"loss": 0.756, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.4192886456908345, |
|
"grad_norm": 3.611431360244751, |
|
"learning_rate": 4.76858185134519e-05, |
|
"loss": 0.6977, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.4363885088919288, |
|
"grad_norm": 6.853910446166992, |
|
"learning_rate": 4.7590819273445814e-05, |
|
"loss": 0.4457, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.4534883720930232, |
|
"grad_norm": 5.6227264404296875, |
|
"learning_rate": 4.749582003343974e-05, |
|
"loss": 0.5169, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.4705882352941178, |
|
"grad_norm": 10.246155738830566, |
|
"learning_rate": 4.7400820793433654e-05, |
|
"loss": 0.7258, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.487688098495212, |
|
"grad_norm": 10.109542846679688, |
|
"learning_rate": 4.730582155342758e-05, |
|
"loss": 0.5037, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.5047879616963065, |
|
"grad_norm": 29.261133193969727, |
|
"learning_rate": 4.7210822313421495e-05, |
|
"loss": 0.5408, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.5218878248974008, |
|
"grad_norm": 22.082735061645508, |
|
"learning_rate": 4.711582307341542e-05, |
|
"loss": 0.6156, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 1.5389876880984952, |
|
"grad_norm": 4.202814102172852, |
|
"learning_rate": 4.7020823833409335e-05, |
|
"loss": 0.3967, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.5560875512995898, |
|
"grad_norm": 19.054508209228516, |
|
"learning_rate": 4.692582459340326e-05, |
|
"loss": 0.6536, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.573187414500684, |
|
"grad_norm": 10.150508880615234, |
|
"learning_rate": 4.6830825353397176e-05, |
|
"loss": 0.6868, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.5902872777017785, |
|
"grad_norm": 9.790059089660645, |
|
"learning_rate": 4.673582611339109e-05, |
|
"loss": 0.5342, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.6073871409028728, |
|
"grad_norm": 20.408973693847656, |
|
"learning_rate": 4.664082687338501e-05, |
|
"loss": 0.6302, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.6244870041039672, |
|
"grad_norm": 3.9431777000427246, |
|
"learning_rate": 4.6545827633378933e-05, |
|
"loss": 0.5242, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.6415868673050615, |
|
"grad_norm": 17.057270050048828, |
|
"learning_rate": 4.645082839337285e-05, |
|
"loss": 0.5606, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.658686730506156, |
|
"grad_norm": 15.276410102844238, |
|
"learning_rate": 4.6355829153366774e-05, |
|
"loss": 0.6188, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.6757865937072505, |
|
"grad_norm": 4.771947383880615, |
|
"learning_rate": 4.626082991336069e-05, |
|
"loss": 0.4203, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.6928864569083446, |
|
"grad_norm": 11.805036544799805, |
|
"learning_rate": 4.6165830673354615e-05, |
|
"loss": 0.6164, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.7099863201094392, |
|
"grad_norm": 15.42662239074707, |
|
"learning_rate": 4.607463140294878e-05, |
|
"loss": 0.561, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.7270861833105335, |
|
"grad_norm": 27.376279830932617, |
|
"learning_rate": 4.59796321629427e-05, |
|
"loss": 0.5232, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.744186046511628, |
|
"grad_norm": 14.586313247680664, |
|
"learning_rate": 4.5888432892536865e-05, |
|
"loss": 0.5013, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.7612859097127223, |
|
"grad_norm": 4.355943202972412, |
|
"learning_rate": 4.579343365253078e-05, |
|
"loss": 0.623, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.7783857729138166, |
|
"grad_norm": 14.60937213897705, |
|
"learning_rate": 4.5698434412524706e-05, |
|
"loss": 0.5106, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.7954856361149112, |
|
"grad_norm": 11.220321655273438, |
|
"learning_rate": 4.560343517251862e-05, |
|
"loss": 0.4515, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.8125854993160053, |
|
"grad_norm": 10.763425827026367, |
|
"learning_rate": 4.5508435932512546e-05, |
|
"loss": 0.5121, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.8296853625171, |
|
"grad_norm": 14.884260177612305, |
|
"learning_rate": 4.541343669250646e-05, |
|
"loss": 0.5982, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.8467852257181943, |
|
"grad_norm": 10.640288352966309, |
|
"learning_rate": 4.531843745250039e-05, |
|
"loss": 0.4412, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.8638850889192886, |
|
"grad_norm": 4.36832332611084, |
|
"learning_rate": 4.5223438212494304e-05, |
|
"loss": 0.57, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 1.8809849521203832, |
|
"grad_norm": 1.8027430772781372, |
|
"learning_rate": 4.512843897248822e-05, |
|
"loss": 0.5363, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.8980848153214773, |
|
"grad_norm": 3.5775504112243652, |
|
"learning_rate": 4.5033439732482144e-05, |
|
"loss": 0.433, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.915184678522572, |
|
"grad_norm": 6.087740421295166, |
|
"learning_rate": 4.493844049247606e-05, |
|
"loss": 0.505, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.9322845417236663, |
|
"grad_norm": 10.784486770629883, |
|
"learning_rate": 4.484344125246998e-05, |
|
"loss": 0.54, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 1.9493844049247606, |
|
"grad_norm": 16.345617294311523, |
|
"learning_rate": 4.47484420124639e-05, |
|
"loss": 0.5492, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.966484268125855, |
|
"grad_norm": 8.161157608032227, |
|
"learning_rate": 4.465344277245782e-05, |
|
"loss": 0.547, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 1.9835841313269493, |
|
"grad_norm": 0.8972734808921814, |
|
"learning_rate": 4.455844353245174e-05, |
|
"loss": 0.5136, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.8866507340389211, |
|
"eval_f1_macro": 0.6620993762486447, |
|
"eval_f1_micro": 0.8866507340389211, |
|
"eval_f1_weighted": 0.8825688205984242, |
|
"eval_loss": 0.3399985134601593, |
|
"eval_precision_macro": 0.7711949393609613, |
|
"eval_precision_micro": 0.8866507340389211, |
|
"eval_precision_weighted": 0.8986612163367862, |
|
"eval_recall_macro": 0.637716282980808, |
|
"eval_recall_micro": 0.8866507340389211, |
|
"eval_recall_weighted": 0.8866507340389211, |
|
"eval_runtime": 25.8144, |
|
"eval_samples_per_second": 113.464, |
|
"eval_steps_per_second": 7.128, |
|
"step": 2924 |
|
}, |
|
{ |
|
"epoch": 2.000683994528044, |
|
"grad_norm": 13.184772491455078, |
|
"learning_rate": 4.446344429244566e-05, |
|
"loss": 0.5396, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 2.017783857729138, |
|
"grad_norm": 25.030324935913086, |
|
"learning_rate": 4.436844505243958e-05, |
|
"loss": 0.4869, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.0348837209302326, |
|
"grad_norm": 4.769693851470947, |
|
"learning_rate": 4.42734458124335e-05, |
|
"loss": 0.6058, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 2.0519835841313268, |
|
"grad_norm": 7.207031726837158, |
|
"learning_rate": 4.417844657242742e-05, |
|
"loss": 0.4164, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.0690834473324213, |
|
"grad_norm": 8.032303810119629, |
|
"learning_rate": 4.408344733242134e-05, |
|
"loss": 0.6109, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 2.086183310533516, |
|
"grad_norm": 9.863656044006348, |
|
"learning_rate": 4.3988448092415264e-05, |
|
"loss": 0.5524, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.10328317373461, |
|
"grad_norm": 12.57875919342041, |
|
"learning_rate": 4.389344885240918e-05, |
|
"loss": 0.5071, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 2.1203830369357046, |
|
"grad_norm": 14.003454208374023, |
|
"learning_rate": 4.3798449612403104e-05, |
|
"loss": 0.5256, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.1374829001367988, |
|
"grad_norm": 18.200809478759766, |
|
"learning_rate": 4.370345037239703e-05, |
|
"loss": 0.5496, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 2.1545827633378933, |
|
"grad_norm": 4.887876033782959, |
|
"learning_rate": 4.3608451132390945e-05, |
|
"loss": 0.4347, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.1716826265389875, |
|
"grad_norm": 19.693723678588867, |
|
"learning_rate": 4.351345189238487e-05, |
|
"loss": 0.5804, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 2.188782489740082, |
|
"grad_norm": 4.4787726402282715, |
|
"learning_rate": 4.3418452652378786e-05, |
|
"loss": 0.7788, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.2058823529411766, |
|
"grad_norm": 8.199685096740723, |
|
"learning_rate": 4.33234534123727e-05, |
|
"loss": 0.5392, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 2.2229822161422708, |
|
"grad_norm": 3.600613594055176, |
|
"learning_rate": 4.322845417236662e-05, |
|
"loss": 0.4586, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.2400820793433653, |
|
"grad_norm": 1.0569565296173096, |
|
"learning_rate": 4.313345493236054e-05, |
|
"loss": 0.4908, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 2.2571819425444595, |
|
"grad_norm": 9.293862342834473, |
|
"learning_rate": 4.303845569235446e-05, |
|
"loss": 0.4662, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.274281805745554, |
|
"grad_norm": 20.558799743652344, |
|
"learning_rate": 4.2943456452348384e-05, |
|
"loss": 0.4093, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 2.2913816689466486, |
|
"grad_norm": 9.981971740722656, |
|
"learning_rate": 4.28484572123423e-05, |
|
"loss": 0.5925, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.3084815321477428, |
|
"grad_norm": 9.498967170715332, |
|
"learning_rate": 4.2753457972336224e-05, |
|
"loss": 0.398, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 2.3255813953488373, |
|
"grad_norm": 15.670341491699219, |
|
"learning_rate": 4.265845873233014e-05, |
|
"loss": 0.5653, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.3426812585499315, |
|
"grad_norm": 2.6220016479492188, |
|
"learning_rate": 4.2563459492324065e-05, |
|
"loss": 0.2821, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 2.359781121751026, |
|
"grad_norm": 4.382536888122559, |
|
"learning_rate": 4.246846025231798e-05, |
|
"loss": 0.5604, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.37688098495212, |
|
"grad_norm": 29.831298828125, |
|
"learning_rate": 4.2373461012311905e-05, |
|
"loss": 0.6324, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 2.3939808481532148, |
|
"grad_norm": 11.329654693603516, |
|
"learning_rate": 4.227846177230582e-05, |
|
"loss": 0.4776, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.4110807113543093, |
|
"grad_norm": 14.24655818939209, |
|
"learning_rate": 4.2183462532299746e-05, |
|
"loss": 0.3838, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 2.4281805745554035, |
|
"grad_norm": 10.425446510314941, |
|
"learning_rate": 4.208846329229366e-05, |
|
"loss": 0.4854, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.445280437756498, |
|
"grad_norm": 12.805408477783203, |
|
"learning_rate": 4.1993464052287586e-05, |
|
"loss": 0.4318, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 2.462380300957592, |
|
"grad_norm": 2.7314155101776123, |
|
"learning_rate": 4.18984648122815e-05, |
|
"loss": 0.4299, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.4794801641586868, |
|
"grad_norm": 18.400419235229492, |
|
"learning_rate": 4.180346557227543e-05, |
|
"loss": 0.51, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 2.496580027359781, |
|
"grad_norm": 7.5878214836120605, |
|
"learning_rate": 4.1708466332269344e-05, |
|
"loss": 0.4057, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.5136798905608755, |
|
"grad_norm": 6.642499923706055, |
|
"learning_rate": 4.161346709226327e-05, |
|
"loss": 0.5965, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 2.53077975376197, |
|
"grad_norm": 3.847749948501587, |
|
"learning_rate": 4.1518467852257184e-05, |
|
"loss": 0.5182, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.547879616963064, |
|
"grad_norm": 11.216913223266602, |
|
"learning_rate": 4.14234686122511e-05, |
|
"loss": 0.4942, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 2.5649794801641588, |
|
"grad_norm": 18.338762283325195, |
|
"learning_rate": 4.1328469372245025e-05, |
|
"loss": 0.5502, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.582079343365253, |
|
"grad_norm": 2.1774024963378906, |
|
"learning_rate": 4.123347013223894e-05, |
|
"loss": 0.557, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 2.5991792065663475, |
|
"grad_norm": 11.719930648803711, |
|
"learning_rate": 4.1138470892232865e-05, |
|
"loss": 0.5172, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.616279069767442, |
|
"grad_norm": 10.061673164367676, |
|
"learning_rate": 4.104347165222678e-05, |
|
"loss": 0.4343, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 2.633378932968536, |
|
"grad_norm": 8.708327293395996, |
|
"learning_rate": 4.0948472412220706e-05, |
|
"loss": 0.4235, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.650478796169631, |
|
"grad_norm": 13.297274589538574, |
|
"learning_rate": 4.085347317221462e-05, |
|
"loss": 0.4919, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 2.667578659370725, |
|
"grad_norm": 1.7467930316925049, |
|
"learning_rate": 4.0758473932208547e-05, |
|
"loss": 0.5269, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.6846785225718195, |
|
"grad_norm": 3.1984822750091553, |
|
"learning_rate": 4.066347469220246e-05, |
|
"loss": 0.5333, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 2.701778385772914, |
|
"grad_norm": 4.631045341491699, |
|
"learning_rate": 4.056847545219639e-05, |
|
"loss": 0.5227, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.718878248974008, |
|
"grad_norm": 34.19083786010742, |
|
"learning_rate": 4.0473476212190304e-05, |
|
"loss": 0.5034, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 2.7359781121751023, |
|
"grad_norm": 11.861445426940918, |
|
"learning_rate": 4.037847697218423e-05, |
|
"loss": 0.4182, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.753077975376197, |
|
"grad_norm": 2.626359224319458, |
|
"learning_rate": 4.0283477732178144e-05, |
|
"loss": 0.4524, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 2.7701778385772915, |
|
"grad_norm": 3.4258358478546143, |
|
"learning_rate": 4.018847849217207e-05, |
|
"loss": 0.6167, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.7872777017783856, |
|
"grad_norm": 8.724355697631836, |
|
"learning_rate": 4.0093479252165985e-05, |
|
"loss": 0.4887, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 2.80437756497948, |
|
"grad_norm": 4.958431720733643, |
|
"learning_rate": 3.999848001215991e-05, |
|
"loss": 0.5661, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.8214774281805743, |
|
"grad_norm": 22.518157958984375, |
|
"learning_rate": 3.9903480772153826e-05, |
|
"loss": 0.4875, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 2.838577291381669, |
|
"grad_norm": 2.556485176086426, |
|
"learning_rate": 3.980848153214774e-05, |
|
"loss": 0.4512, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.8556771545827635, |
|
"grad_norm": 22.274921417236328, |
|
"learning_rate": 3.971348229214166e-05, |
|
"loss": 0.6026, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 2.8727770177838576, |
|
"grad_norm": 11.015833854675293, |
|
"learning_rate": 3.961848305213558e-05, |
|
"loss": 0.4365, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.889876880984952, |
|
"grad_norm": 5.808104515075684, |
|
"learning_rate": 3.95234838121295e-05, |
|
"loss": 0.4042, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 2.9069767441860463, |
|
"grad_norm": 7.572930335998535, |
|
"learning_rate": 3.9428484572123424e-05, |
|
"loss": 0.4788, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.924076607387141, |
|
"grad_norm": 10.827695846557617, |
|
"learning_rate": 3.933348533211734e-05, |
|
"loss": 0.4966, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 2.9411764705882355, |
|
"grad_norm": 7.229436874389648, |
|
"learning_rate": 3.9238486092111264e-05, |
|
"loss": 0.4786, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.9582763337893296, |
|
"grad_norm": 1.738945484161377, |
|
"learning_rate": 3.914348685210518e-05, |
|
"loss": 0.3888, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 2.975376196990424, |
|
"grad_norm": 7.930450439453125, |
|
"learning_rate": 3.9048487612099105e-05, |
|
"loss": 0.5177, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.9924760601915183, |
|
"grad_norm": 3.204556465148926, |
|
"learning_rate": 3.895348837209303e-05, |
|
"loss": 0.5221, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.8921133492659611, |
|
"eval_f1_macro": 0.7248099411197927, |
|
"eval_f1_micro": 0.8921133492659611, |
|
"eval_f1_weighted": 0.8900566376282183, |
|
"eval_loss": 0.30895766615867615, |
|
"eval_precision_macro": 0.8455393630579494, |
|
"eval_precision_micro": 0.8921133492659611, |
|
"eval_precision_weighted": 0.9014059400435374, |
|
"eval_recall_macro": 0.6934276077306252, |
|
"eval_recall_micro": 0.8921133492659611, |
|
"eval_recall_weighted": 0.8921133492659611, |
|
"eval_runtime": 25.8185, |
|
"eval_samples_per_second": 113.446, |
|
"eval_steps_per_second": 7.127, |
|
"step": 4386 |
|
}, |
|
{ |
|
"epoch": 3.009575923392613, |
|
"grad_norm": 4.8183465003967285, |
|
"learning_rate": 3.8858489132086945e-05, |
|
"loss": 0.5111, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 3.026675786593707, |
|
"grad_norm": 11.976899147033691, |
|
"learning_rate": 3.876348989208087e-05, |
|
"loss": 0.5971, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 3.0437756497948016, |
|
"grad_norm": 7.26112699508667, |
|
"learning_rate": 3.8668490652074786e-05, |
|
"loss": 0.5008, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 3.060875512995896, |
|
"grad_norm": 16.217424392700195, |
|
"learning_rate": 3.857349141206871e-05, |
|
"loss": 0.542, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 3.0779753761969904, |
|
"grad_norm": 4.9412922859191895, |
|
"learning_rate": 3.8478492172062626e-05, |
|
"loss": 0.4046, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.095075239398085, |
|
"grad_norm": 14.565752983093262, |
|
"learning_rate": 3.838349293205655e-05, |
|
"loss": 0.2833, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 3.112175102599179, |
|
"grad_norm": 12.100059509277344, |
|
"learning_rate": 3.828849369205047e-05, |
|
"loss": 0.6031, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 3.1292749658002736, |
|
"grad_norm": 7.3751325607299805, |
|
"learning_rate": 3.8193494452044384e-05, |
|
"loss": 0.3429, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 3.146374829001368, |
|
"grad_norm": 1.8577054738998413, |
|
"learning_rate": 3.809849521203831e-05, |
|
"loss": 0.4626, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 3.1634746922024624, |
|
"grad_norm": 21.437606811523438, |
|
"learning_rate": 3.8003495972032224e-05, |
|
"loss": 0.5272, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 3.180574555403557, |
|
"grad_norm": 0.7049314379692078, |
|
"learning_rate": 3.790849673202614e-05, |
|
"loss": 0.4756, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.197674418604651, |
|
"grad_norm": 26.833208084106445, |
|
"learning_rate": 3.7813497492020065e-05, |
|
"loss": 0.4575, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 3.2147742818057456, |
|
"grad_norm": 3.7546021938323975, |
|
"learning_rate": 3.771849825201398e-05, |
|
"loss": 0.3827, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.23187414500684, |
|
"grad_norm": 0.08676711469888687, |
|
"learning_rate": 3.7623499012007905e-05, |
|
"loss": 0.5096, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 3.2489740082079344, |
|
"grad_norm": 6.28751802444458, |
|
"learning_rate": 3.752849977200182e-05, |
|
"loss": 0.5496, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 3.266073871409029, |
|
"grad_norm": 4.246407985687256, |
|
"learning_rate": 3.7433500531995746e-05, |
|
"loss": 0.4028, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 3.283173734610123, |
|
"grad_norm": 7.93133020401001, |
|
"learning_rate": 3.733850129198966e-05, |
|
"loss": 0.55, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 3.3002735978112177, |
|
"grad_norm": 4.031394004821777, |
|
"learning_rate": 3.7243502051983587e-05, |
|
"loss": 0.6104, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 3.317373461012312, |
|
"grad_norm": 6.97119140625, |
|
"learning_rate": 3.7148502811977503e-05, |
|
"loss": 0.3561, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 3.3344733242134064, |
|
"grad_norm": 4.752348899841309, |
|
"learning_rate": 3.705350357197143e-05, |
|
"loss": 0.4267, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 3.3515731874145005, |
|
"grad_norm": 5.945026397705078, |
|
"learning_rate": 3.6958504331965344e-05, |
|
"loss": 0.5058, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 3.368673050615595, |
|
"grad_norm": 3.1954736709594727, |
|
"learning_rate": 3.686350509195927e-05, |
|
"loss": 0.4159, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 3.3857729138166897, |
|
"grad_norm": 20.407272338867188, |
|
"learning_rate": 3.676850585195319e-05, |
|
"loss": 0.5054, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 3.402872777017784, |
|
"grad_norm": 9.6301851272583, |
|
"learning_rate": 3.667350661194711e-05, |
|
"loss": 0.4432, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 3.4199726402188784, |
|
"grad_norm": 7.080709934234619, |
|
"learning_rate": 3.657850737194103e-05, |
|
"loss": 0.3967, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.4370725034199725, |
|
"grad_norm": 6.897956848144531, |
|
"learning_rate": 3.648350813193495e-05, |
|
"loss": 0.3478, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 3.454172366621067, |
|
"grad_norm": 2.1270782947540283, |
|
"learning_rate": 3.6388508891928866e-05, |
|
"loss": 0.4657, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 3.471272229822161, |
|
"grad_norm": 8.693300247192383, |
|
"learning_rate": 3.629350965192278e-05, |
|
"loss": 0.3595, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 3.488372093023256, |
|
"grad_norm": 5.767752170562744, |
|
"learning_rate": 3.6198510411916706e-05, |
|
"loss": 0.6431, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 3.5054719562243504, |
|
"grad_norm": 2.4627044200897217, |
|
"learning_rate": 3.610351117191062e-05, |
|
"loss": 0.4672, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 3.5225718194254445, |
|
"grad_norm": 11.726508140563965, |
|
"learning_rate": 3.600851193190455e-05, |
|
"loss": 0.5105, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 3.539671682626539, |
|
"grad_norm": 12.632149696350098, |
|
"learning_rate": 3.5913512691898464e-05, |
|
"loss": 0.43, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 3.556771545827633, |
|
"grad_norm": 12.43307113647461, |
|
"learning_rate": 3.581851345189239e-05, |
|
"loss": 0.5033, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 3.573871409028728, |
|
"grad_norm": 5.589097499847412, |
|
"learning_rate": 3.5723514211886304e-05, |
|
"loss": 0.5718, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 3.5909712722298224, |
|
"grad_norm": 7.462125778198242, |
|
"learning_rate": 3.562851497188023e-05, |
|
"loss": 0.5025, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 3.6080711354309165, |
|
"grad_norm": 2.099492073059082, |
|
"learning_rate": 3.5533515731874145e-05, |
|
"loss": 0.3758, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 3.625170998632011, |
|
"grad_norm": 6.787677764892578, |
|
"learning_rate": 3.543851649186807e-05, |
|
"loss": 0.3685, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 3.6422708618331052, |
|
"grad_norm": 11.592167854309082, |
|
"learning_rate": 3.5343517251861985e-05, |
|
"loss": 0.4173, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 3.6593707250342, |
|
"grad_norm": 5.881494522094727, |
|
"learning_rate": 3.524851801185591e-05, |
|
"loss": 0.3484, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 3.6764705882352944, |
|
"grad_norm": 5.682451248168945, |
|
"learning_rate": 3.5153518771849826e-05, |
|
"loss": 0.4324, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 3.6935704514363885, |
|
"grad_norm": 0.20855078101158142, |
|
"learning_rate": 3.505851953184375e-05, |
|
"loss": 0.4147, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 3.7106703146374826, |
|
"grad_norm": 8.257678031921387, |
|
"learning_rate": 3.4963520291837666e-05, |
|
"loss": 0.494, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 3.7277701778385772, |
|
"grad_norm": 9.025782585144043, |
|
"learning_rate": 3.486852105183159e-05, |
|
"loss": 0.4538, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 3.744870041039672, |
|
"grad_norm": 8.485616683959961, |
|
"learning_rate": 3.477352181182551e-05, |
|
"loss": 0.4405, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 3.761969904240766, |
|
"grad_norm": 7.031386375427246, |
|
"learning_rate": 3.467852257181943e-05, |
|
"loss": 0.5225, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 3.7790697674418605, |
|
"grad_norm": 6.483025550842285, |
|
"learning_rate": 3.458352333181335e-05, |
|
"loss": 0.5224, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 3.7961696306429547, |
|
"grad_norm": 13.42219066619873, |
|
"learning_rate": 3.4488524091807264e-05, |
|
"loss": 0.4022, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 3.8132694938440492, |
|
"grad_norm": 11.677425384521484, |
|
"learning_rate": 3.439352485180119e-05, |
|
"loss": 0.4221, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 3.830369357045144, |
|
"grad_norm": 83.29227447509766, |
|
"learning_rate": 3.4298525611795105e-05, |
|
"loss": 0.4575, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 3.847469220246238, |
|
"grad_norm": 13.023981094360352, |
|
"learning_rate": 3.420352637178903e-05, |
|
"loss": 0.4159, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 3.8645690834473325, |
|
"grad_norm": 3.78824520111084, |
|
"learning_rate": 3.4108527131782945e-05, |
|
"loss": 0.3825, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 3.8816689466484267, |
|
"grad_norm": 23.089439392089844, |
|
"learning_rate": 3.401352789177687e-05, |
|
"loss": 0.4796, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 3.8987688098495212, |
|
"grad_norm": 8.51207160949707, |
|
"learning_rate": 3.3918528651770786e-05, |
|
"loss": 0.389, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 3.915868673050616, |
|
"grad_norm": 25.023662567138672, |
|
"learning_rate": 3.382352941176471e-05, |
|
"loss": 0.4779, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 3.93296853625171, |
|
"grad_norm": 10.165434837341309, |
|
"learning_rate": 3.3728530171758627e-05, |
|
"loss": 0.4599, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 3.9500683994528045, |
|
"grad_norm": 18.18007469177246, |
|
"learning_rate": 3.363353093175255e-05, |
|
"loss": 0.4497, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 3.9671682626538987, |
|
"grad_norm": 4.1317315101623535, |
|
"learning_rate": 3.353853169174647e-05, |
|
"loss": 0.3715, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 3.9842681258549932, |
|
"grad_norm": 10.207343101501465, |
|
"learning_rate": 3.344353245174039e-05, |
|
"loss": 0.3945, |
|
"step": 5825 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.899283031751451, |
|
"eval_f1_macro": 0.7167734728855646, |
|
"eval_f1_micro": 0.899283031751451, |
|
"eval_f1_weighted": 0.8949067494674995, |
|
"eval_loss": 0.2918279469013214, |
|
"eval_precision_macro": 0.8481396171573297, |
|
"eval_precision_micro": 0.899283031751451, |
|
"eval_precision_weighted": 0.9104794727753962, |
|
"eval_recall_macro": 0.677056197036938, |
|
"eval_recall_micro": 0.899283031751451, |
|
"eval_recall_weighted": 0.899283031751451, |
|
"eval_runtime": 26.2772, |
|
"eval_samples_per_second": 111.465, |
|
"eval_steps_per_second": 7.002, |
|
"step": 5848 |
|
}, |
|
{ |
|
"epoch": 4.001367989056088, |
|
"grad_norm": 9.241109848022461, |
|
"learning_rate": 3.334853321173431e-05, |
|
"loss": 0.3062, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 4.0184678522571815, |
|
"grad_norm": 2.8735108375549316, |
|
"learning_rate": 3.325353397172823e-05, |
|
"loss": 0.3876, |
|
"step": 5875 |
|
}, |
|
{ |
|
"epoch": 4.035567715458276, |
|
"grad_norm": 7.347777843475342, |
|
"learning_rate": 3.315853473172215e-05, |
|
"loss": 0.5103, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 4.052667578659371, |
|
"grad_norm": 17.826900482177734, |
|
"learning_rate": 3.306353549171607e-05, |
|
"loss": 0.4089, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 4.069767441860465, |
|
"grad_norm": 4.9833292961120605, |
|
"learning_rate": 3.296853625170999e-05, |
|
"loss": 0.4656, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 4.08686730506156, |
|
"grad_norm": 9.229751586914062, |
|
"learning_rate": 3.2873537011703906e-05, |
|
"loss": 0.3591, |
|
"step": 5975 |
|
}, |
|
{ |
|
"epoch": 4.1039671682626535, |
|
"grad_norm": 0.23064318299293518, |
|
"learning_rate": 3.277853777169782e-05, |
|
"loss": 0.4878, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 4.121067031463748, |
|
"grad_norm": 23.045246124267578, |
|
"learning_rate": 3.2683538531691746e-05, |
|
"loss": 0.3487, |
|
"step": 6025 |
|
}, |
|
{ |
|
"epoch": 4.138166894664843, |
|
"grad_norm": 8.428695678710938, |
|
"learning_rate": 3.258853929168566e-05, |
|
"loss": 0.3685, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 4.155266757865937, |
|
"grad_norm": 23.741397857666016, |
|
"learning_rate": 3.249354005167959e-05, |
|
"loss": 0.4878, |
|
"step": 6075 |
|
}, |
|
{ |
|
"epoch": 4.172366621067032, |
|
"grad_norm": 4.824987888336182, |
|
"learning_rate": 3.2398540811673504e-05, |
|
"loss": 0.3667, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 4.1894664842681255, |
|
"grad_norm": 5.439081192016602, |
|
"learning_rate": 3.230354157166743e-05, |
|
"loss": 0.449, |
|
"step": 6125 |
|
}, |
|
{ |
|
"epoch": 4.20656634746922, |
|
"grad_norm": 4.361891269683838, |
|
"learning_rate": 3.2208542331661344e-05, |
|
"loss": 0.3391, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 4.223666210670315, |
|
"grad_norm": 68.6259994506836, |
|
"learning_rate": 3.211354309165527e-05, |
|
"loss": 0.4126, |
|
"step": 6175 |
|
}, |
|
{ |
|
"epoch": 4.240766073871409, |
|
"grad_norm": 5.1873860359191895, |
|
"learning_rate": 3.201854385164919e-05, |
|
"loss": 0.5427, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 4.257865937072504, |
|
"grad_norm": 4.0612359046936035, |
|
"learning_rate": 3.192354461164311e-05, |
|
"loss": 0.45, |
|
"step": 6225 |
|
}, |
|
{ |
|
"epoch": 4.2749658002735975, |
|
"grad_norm": 7.9056010246276855, |
|
"learning_rate": 3.182854537163703e-05, |
|
"loss": 0.3444, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 4.292065663474692, |
|
"grad_norm": 13.213556289672852, |
|
"learning_rate": 3.173354613163095e-05, |
|
"loss": 0.3836, |
|
"step": 6275 |
|
}, |
|
{ |
|
"epoch": 4.309165526675787, |
|
"grad_norm": 2.609199047088623, |
|
"learning_rate": 3.163854689162487e-05, |
|
"loss": 0.3189, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 4.326265389876881, |
|
"grad_norm": 5.747865676879883, |
|
"learning_rate": 3.154354765161879e-05, |
|
"loss": 0.4637, |
|
"step": 6325 |
|
}, |
|
{ |
|
"epoch": 4.343365253077975, |
|
"grad_norm": 11.459451675415039, |
|
"learning_rate": 3.144854841161271e-05, |
|
"loss": 0.4867, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 4.3604651162790695, |
|
"grad_norm": 38.839962005615234, |
|
"learning_rate": 3.135354917160663e-05, |
|
"loss": 0.3632, |
|
"step": 6375 |
|
}, |
|
{ |
|
"epoch": 4.377564979480164, |
|
"grad_norm": 16.965190887451172, |
|
"learning_rate": 3.125854993160055e-05, |
|
"loss": 0.4038, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 4.394664842681259, |
|
"grad_norm": 9.547988891601562, |
|
"learning_rate": 3.116355069159447e-05, |
|
"loss": 0.3246, |
|
"step": 6425 |
|
}, |
|
{ |
|
"epoch": 4.411764705882353, |
|
"grad_norm": 29.236087799072266, |
|
"learning_rate": 3.106855145158839e-05, |
|
"loss": 0.4722, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 4.428864569083447, |
|
"grad_norm": 9.634954452514648, |
|
"learning_rate": 3.0973552211582304e-05, |
|
"loss": 0.4658, |
|
"step": 6475 |
|
}, |
|
{ |
|
"epoch": 4.4459644322845415, |
|
"grad_norm": 14.585807800292969, |
|
"learning_rate": 3.087855297157623e-05, |
|
"loss": 0.3389, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 4.463064295485636, |
|
"grad_norm": 9.657065391540527, |
|
"learning_rate": 3.0783553731570145e-05, |
|
"loss": 0.3755, |
|
"step": 6525 |
|
}, |
|
{ |
|
"epoch": 4.480164158686731, |
|
"grad_norm": 10.945516586303711, |
|
"learning_rate": 3.068855449156407e-05, |
|
"loss": 0.4174, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 4.497264021887825, |
|
"grad_norm": 7.025084018707275, |
|
"learning_rate": 3.0593555251557986e-05, |
|
"loss": 0.5521, |
|
"step": 6575 |
|
}, |
|
{ |
|
"epoch": 4.514363885088919, |
|
"grad_norm": 4.908510684967041, |
|
"learning_rate": 3.049855601155191e-05, |
|
"loss": 0.3591, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 4.5314637482900135, |
|
"grad_norm": 3.1344048976898193, |
|
"learning_rate": 3.0403556771545826e-05, |
|
"loss": 0.3548, |
|
"step": 6625 |
|
}, |
|
{ |
|
"epoch": 4.548563611491108, |
|
"grad_norm": 4.515110015869141, |
|
"learning_rate": 3.030855753153975e-05, |
|
"loss": 0.4855, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 4.565663474692203, |
|
"grad_norm": 7.656958103179932, |
|
"learning_rate": 3.0213558291533667e-05, |
|
"loss": 0.4507, |
|
"step": 6675 |
|
}, |
|
{ |
|
"epoch": 4.582763337893297, |
|
"grad_norm": 16.05572509765625, |
|
"learning_rate": 3.011855905152759e-05, |
|
"loss": 0.481, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 4.599863201094391, |
|
"grad_norm": 3.04164457321167, |
|
"learning_rate": 3.0023559811521507e-05, |
|
"loss": 0.3844, |
|
"step": 6725 |
|
}, |
|
{ |
|
"epoch": 4.6169630642954855, |
|
"grad_norm": 11.163993835449219, |
|
"learning_rate": 2.992856057151543e-05, |
|
"loss": 0.4604, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 4.63406292749658, |
|
"grad_norm": 2.3913285732269287, |
|
"learning_rate": 2.983356133150935e-05, |
|
"loss": 0.4375, |
|
"step": 6775 |
|
}, |
|
{ |
|
"epoch": 4.651162790697675, |
|
"grad_norm": 12.693867683410645, |
|
"learning_rate": 2.9738562091503268e-05, |
|
"loss": 0.3717, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 4.668262653898768, |
|
"grad_norm": 5.616913795471191, |
|
"learning_rate": 2.964356285149719e-05, |
|
"loss": 0.2993, |
|
"step": 6825 |
|
}, |
|
{ |
|
"epoch": 4.685362517099863, |
|
"grad_norm": 10.53511905670166, |
|
"learning_rate": 2.954856361149111e-05, |
|
"loss": 0.4742, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 4.7024623803009575, |
|
"grad_norm": 4.093250751495361, |
|
"learning_rate": 2.9453564371485032e-05, |
|
"loss": 0.3256, |
|
"step": 6875 |
|
}, |
|
{ |
|
"epoch": 4.719562243502052, |
|
"grad_norm": 12.562928199768066, |
|
"learning_rate": 2.935856513147895e-05, |
|
"loss": 0.3647, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 4.736662106703147, |
|
"grad_norm": 28.220108032226562, |
|
"learning_rate": 2.9263565891472873e-05, |
|
"loss": 0.4505, |
|
"step": 6925 |
|
}, |
|
{ |
|
"epoch": 4.75376196990424, |
|
"grad_norm": 11.514904975891113, |
|
"learning_rate": 2.916856665146679e-05, |
|
"loss": 0.3662, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 4.770861833105335, |
|
"grad_norm": 3.687920093536377, |
|
"learning_rate": 2.907356741146071e-05, |
|
"loss": 0.3526, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 4.7879616963064295, |
|
"grad_norm": 7.425134181976318, |
|
"learning_rate": 2.897856817145463e-05, |
|
"loss": 0.3395, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 4.805061559507524, |
|
"grad_norm": 2.660003185272217, |
|
"learning_rate": 2.888356893144855e-05, |
|
"loss": 0.4889, |
|
"step": 7025 |
|
}, |
|
{ |
|
"epoch": 4.822161422708619, |
|
"grad_norm": 7.089037895202637, |
|
"learning_rate": 2.8788569691442467e-05, |
|
"loss": 0.4015, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 4.839261285909712, |
|
"grad_norm": 11.357674598693848, |
|
"learning_rate": 2.869357045143639e-05, |
|
"loss": 0.4199, |
|
"step": 7075 |
|
}, |
|
{ |
|
"epoch": 4.856361149110807, |
|
"grad_norm": 18.09085464477539, |
|
"learning_rate": 2.8598571211430308e-05, |
|
"loss": 0.4895, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 4.8734610123119015, |
|
"grad_norm": 10.047117233276367, |
|
"learning_rate": 2.850357197142423e-05, |
|
"loss": 0.4722, |
|
"step": 7125 |
|
}, |
|
{ |
|
"epoch": 4.890560875512996, |
|
"grad_norm": 21.57419776916504, |
|
"learning_rate": 2.840857273141815e-05, |
|
"loss": 0.4878, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 4.907660738714091, |
|
"grad_norm": 7.3810577392578125, |
|
"learning_rate": 2.8313573491412072e-05, |
|
"loss": 0.4413, |
|
"step": 7175 |
|
}, |
|
{ |
|
"epoch": 4.924760601915184, |
|
"grad_norm": 1.9230318069458008, |
|
"learning_rate": 2.821857425140599e-05, |
|
"loss": 0.3885, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 4.941860465116279, |
|
"grad_norm": 6.324495792388916, |
|
"learning_rate": 2.812357501139991e-05, |
|
"loss": 0.4513, |
|
"step": 7225 |
|
}, |
|
{ |
|
"epoch": 4.9589603283173735, |
|
"grad_norm": 6.727700710296631, |
|
"learning_rate": 2.802857577139383e-05, |
|
"loss": 0.4085, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 4.976060191518468, |
|
"grad_norm": 7.250006675720215, |
|
"learning_rate": 2.793357653138775e-05, |
|
"loss": 0.3343, |
|
"step": 7275 |
|
}, |
|
{ |
|
"epoch": 4.993160054719562, |
|
"grad_norm": 2.118319034576416, |
|
"learning_rate": 2.7838577291381667e-05, |
|
"loss": 0.4999, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.8828951860703311, |
|
"eval_f1_macro": 0.7464038256987945, |
|
"eval_f1_micro": 0.8828951860703311, |
|
"eval_f1_weighted": 0.8814097485811152, |
|
"eval_loss": 0.3324004113674164, |
|
"eval_precision_macro": 0.8694718296335959, |
|
"eval_precision_micro": 0.8828951860703311, |
|
"eval_precision_weighted": 0.9022243801832591, |
|
"eval_recall_macro": 0.7003746302122184, |
|
"eval_recall_micro": 0.8828951860703311, |
|
"eval_recall_weighted": 0.8828951860703311, |
|
"eval_runtime": 25.9346, |
|
"eval_samples_per_second": 112.938, |
|
"eval_steps_per_second": 7.095, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 5.010259917920656, |
|
"grad_norm": 0.7182475328445435, |
|
"learning_rate": 2.774357805137559e-05, |
|
"loss": 0.4188, |
|
"step": 7325 |
|
}, |
|
{ |
|
"epoch": 5.027359781121751, |
|
"grad_norm": 1.1450774669647217, |
|
"learning_rate": 2.7648578811369507e-05, |
|
"loss": 0.394, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 5.0444596443228455, |
|
"grad_norm": 5.432520866394043, |
|
"learning_rate": 2.755357957136343e-05, |
|
"loss": 0.4667, |
|
"step": 7375 |
|
}, |
|
{ |
|
"epoch": 5.06155950752394, |
|
"grad_norm": 9.457201957702637, |
|
"learning_rate": 2.7458580331357355e-05, |
|
"loss": 0.3702, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 5.078659370725034, |
|
"grad_norm": 4.041426181793213, |
|
"learning_rate": 2.736358109135127e-05, |
|
"loss": 0.4232, |
|
"step": 7425 |
|
}, |
|
{ |
|
"epoch": 5.095759233926128, |
|
"grad_norm": 5.445071220397949, |
|
"learning_rate": 2.7268581851345192e-05, |
|
"loss": 0.5922, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 5.112859097127223, |
|
"grad_norm": 21.097936630249023, |
|
"learning_rate": 2.717358261133911e-05, |
|
"loss": 0.3139, |
|
"step": 7475 |
|
}, |
|
{ |
|
"epoch": 5.1299589603283176, |
|
"grad_norm": 7.353487968444824, |
|
"learning_rate": 2.7078583371333032e-05, |
|
"loss": 0.365, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 5.147058823529412, |
|
"grad_norm": 0.25362271070480347, |
|
"learning_rate": 2.698358413132695e-05, |
|
"loss": 0.4082, |
|
"step": 7525 |
|
}, |
|
{ |
|
"epoch": 5.164158686730506, |
|
"grad_norm": 13.88097858428955, |
|
"learning_rate": 2.6888584891320873e-05, |
|
"loss": 0.3095, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 5.1812585499316, |
|
"grad_norm": 0.10476674884557724, |
|
"learning_rate": 2.679358565131479e-05, |
|
"loss": 0.4199, |
|
"step": 7575 |
|
}, |
|
{ |
|
"epoch": 5.198358413132695, |
|
"grad_norm": 15.92738151550293, |
|
"learning_rate": 2.6698586411308713e-05, |
|
"loss": 0.3436, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 5.2154582763337896, |
|
"grad_norm": 3.1535122394561768, |
|
"learning_rate": 2.660358717130263e-05, |
|
"loss": 0.3798, |
|
"step": 7625 |
|
}, |
|
{ |
|
"epoch": 5.232558139534884, |
|
"grad_norm": 20.45937728881836, |
|
"learning_rate": 2.6508587931296554e-05, |
|
"loss": 0.4386, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 5.249658002735978, |
|
"grad_norm": 11.347773551940918, |
|
"learning_rate": 2.641358869129047e-05, |
|
"loss": 0.2972, |
|
"step": 7675 |
|
}, |
|
{ |
|
"epoch": 5.266757865937072, |
|
"grad_norm": 2.8144240379333496, |
|
"learning_rate": 2.631858945128439e-05, |
|
"loss": 0.3987, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 5.283857729138167, |
|
"grad_norm": 5.629673480987549, |
|
"learning_rate": 2.6223590211278308e-05, |
|
"loss": 0.3263, |
|
"step": 7725 |
|
}, |
|
{ |
|
"epoch": 5.300957592339262, |
|
"grad_norm": 2.6866259574890137, |
|
"learning_rate": 2.6128590971272232e-05, |
|
"loss": 0.3661, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 5.318057455540355, |
|
"grad_norm": 14.74667739868164, |
|
"learning_rate": 2.603359173126615e-05, |
|
"loss": 0.3379, |
|
"step": 7775 |
|
}, |
|
{ |
|
"epoch": 5.33515731874145, |
|
"grad_norm": 3.7586936950683594, |
|
"learning_rate": 2.5938592491260072e-05, |
|
"loss": 0.3503, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 5.352257181942544, |
|
"grad_norm": 4.488011360168457, |
|
"learning_rate": 2.584359325125399e-05, |
|
"loss": 0.4632, |
|
"step": 7825 |
|
}, |
|
{ |
|
"epoch": 5.369357045143639, |
|
"grad_norm": 2.083674430847168, |
|
"learning_rate": 2.5748594011247913e-05, |
|
"loss": 0.3565, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 5.386456908344734, |
|
"grad_norm": 7.482815265655518, |
|
"learning_rate": 2.565359477124183e-05, |
|
"loss": 0.3507, |
|
"step": 7875 |
|
}, |
|
{ |
|
"epoch": 5.403556771545827, |
|
"grad_norm": 15.02985954284668, |
|
"learning_rate": 2.5558595531235753e-05, |
|
"loss": 0.3787, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 5.420656634746922, |
|
"grad_norm": 8.766378402709961, |
|
"learning_rate": 2.546359629122967e-05, |
|
"loss": 0.4566, |
|
"step": 7925 |
|
}, |
|
{ |
|
"epoch": 5.437756497948016, |
|
"grad_norm": 13.31812858581543, |
|
"learning_rate": 2.536859705122359e-05, |
|
"loss": 0.4214, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 5.454856361149111, |
|
"grad_norm": 3.4309494495391846, |
|
"learning_rate": 2.5273597811217514e-05, |
|
"loss": 0.4198, |
|
"step": 7975 |
|
}, |
|
{ |
|
"epoch": 5.471956224350206, |
|
"grad_norm": 13.40803337097168, |
|
"learning_rate": 2.517859857121143e-05, |
|
"loss": 0.3288, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 5.489056087551299, |
|
"grad_norm": 2.447211265563965, |
|
"learning_rate": 2.5083599331205355e-05, |
|
"loss": 0.3237, |
|
"step": 8025 |
|
}, |
|
{ |
|
"epoch": 5.506155950752394, |
|
"grad_norm": 3.199465036392212, |
|
"learning_rate": 2.498860009119927e-05, |
|
"loss": 0.455, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 5.523255813953488, |
|
"grad_norm": 20.3509521484375, |
|
"learning_rate": 2.4893600851193192e-05, |
|
"loss": 0.5065, |
|
"step": 8075 |
|
}, |
|
{ |
|
"epoch": 5.540355677154583, |
|
"grad_norm": 51.09640884399414, |
|
"learning_rate": 2.4798601611187112e-05, |
|
"loss": 0.3341, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 5.557455540355678, |
|
"grad_norm": 2.58319091796875, |
|
"learning_rate": 2.4703602371181032e-05, |
|
"loss": 0.3664, |
|
"step": 8125 |
|
}, |
|
{ |
|
"epoch": 5.574555403556771, |
|
"grad_norm": 8.091569900512695, |
|
"learning_rate": 2.460860313117495e-05, |
|
"loss": 0.3487, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 5.591655266757866, |
|
"grad_norm": 3.2208356857299805, |
|
"learning_rate": 2.451360389116887e-05, |
|
"loss": 0.4357, |
|
"step": 8175 |
|
}, |
|
{ |
|
"epoch": 5.60875512995896, |
|
"grad_norm": 7.119288444519043, |
|
"learning_rate": 2.441860465116279e-05, |
|
"loss": 0.4647, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 5.625854993160055, |
|
"grad_norm": 2.9200692176818848, |
|
"learning_rate": 2.432360541115671e-05, |
|
"loss": 0.4414, |
|
"step": 8225 |
|
}, |
|
{ |
|
"epoch": 5.642954856361149, |
|
"grad_norm": 17.426734924316406, |
|
"learning_rate": 2.422860617115063e-05, |
|
"loss": 0.3779, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 5.660054719562243, |
|
"grad_norm": 6.002254009246826, |
|
"learning_rate": 2.413360693114455e-05, |
|
"loss": 0.4117, |
|
"step": 8275 |
|
}, |
|
{ |
|
"epoch": 5.677154582763338, |
|
"grad_norm": 10.85733413696289, |
|
"learning_rate": 2.4038607691138474e-05, |
|
"loss": 0.3918, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 5.694254445964432, |
|
"grad_norm": 22.411169052124023, |
|
"learning_rate": 2.3943608451132395e-05, |
|
"loss": 0.3103, |
|
"step": 8325 |
|
}, |
|
{ |
|
"epoch": 5.711354309165527, |
|
"grad_norm": 14.168360710144043, |
|
"learning_rate": 2.384860921112631e-05, |
|
"loss": 0.3941, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 5.728454172366621, |
|
"grad_norm": 3.775557279586792, |
|
"learning_rate": 2.3753609971120232e-05, |
|
"loss": 0.4347, |
|
"step": 8375 |
|
}, |
|
{ |
|
"epoch": 5.745554035567715, |
|
"grad_norm": 2.539484977722168, |
|
"learning_rate": 2.3658610731114152e-05, |
|
"loss": 0.273, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 5.76265389876881, |
|
"grad_norm": 2.8777148723602295, |
|
"learning_rate": 2.3563611491108072e-05, |
|
"loss": 0.3594, |
|
"step": 8425 |
|
}, |
|
{ |
|
"epoch": 5.779753761969904, |
|
"grad_norm": 3.811925172805786, |
|
"learning_rate": 2.3468612251101993e-05, |
|
"loss": 0.3306, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 5.796853625170999, |
|
"grad_norm": 10.664482116699219, |
|
"learning_rate": 2.3373613011095913e-05, |
|
"loss": 0.3625, |
|
"step": 8475 |
|
}, |
|
{ |
|
"epoch": 5.813953488372093, |
|
"grad_norm": 1.653448462486267, |
|
"learning_rate": 2.3278613771089833e-05, |
|
"loss": 0.298, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 5.831053351573187, |
|
"grad_norm": 3.4559919834136963, |
|
"learning_rate": 2.3183614531083753e-05, |
|
"loss": 0.355, |
|
"step": 8525 |
|
}, |
|
{ |
|
"epoch": 5.848153214774282, |
|
"grad_norm": 4.02964973449707, |
|
"learning_rate": 2.3088615291077674e-05, |
|
"loss": 0.4186, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 5.865253077975376, |
|
"grad_norm": 7.618357181549072, |
|
"learning_rate": 2.2993616051071594e-05, |
|
"loss": 0.5639, |
|
"step": 8575 |
|
}, |
|
{ |
|
"epoch": 5.882352941176471, |
|
"grad_norm": 4.395501136779785, |
|
"learning_rate": 2.289861681106551e-05, |
|
"loss": 0.3863, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 5.899452804377565, |
|
"grad_norm": 4.189037322998047, |
|
"learning_rate": 2.280361757105943e-05, |
|
"loss": 0.3274, |
|
"step": 8625 |
|
}, |
|
{ |
|
"epoch": 5.916552667578659, |
|
"grad_norm": 1.825620412826538, |
|
"learning_rate": 2.270861833105335e-05, |
|
"loss": 0.4074, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 5.933652530779754, |
|
"grad_norm": 2.863705635070801, |
|
"learning_rate": 2.2613619091047272e-05, |
|
"loss": 0.3973, |
|
"step": 8675 |
|
}, |
|
{ |
|
"epoch": 5.950752393980848, |
|
"grad_norm": 3.3748676776885986, |
|
"learning_rate": 2.2518619851041192e-05, |
|
"loss": 0.3318, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 5.967852257181942, |
|
"grad_norm": 18.595125198364258, |
|
"learning_rate": 2.2423620611035112e-05, |
|
"loss": 0.355, |
|
"step": 8725 |
|
}, |
|
{ |
|
"epoch": 5.984952120383037, |
|
"grad_norm": 8.303060531616211, |
|
"learning_rate": 2.2328621371029033e-05, |
|
"loss": 0.3325, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.9235233868214408, |
|
"eval_f1_macro": 0.8015916762918697, |
|
"eval_f1_micro": 0.9235233868214408, |
|
"eval_f1_weighted": 0.922485221004273, |
|
"eval_loss": 0.24165508151054382, |
|
"eval_precision_macro": 0.8743011626241778, |
|
"eval_precision_micro": 0.9235233868214408, |
|
"eval_precision_weighted": 0.9276221151549562, |
|
"eval_recall_macro": 0.7664921252239268, |
|
"eval_recall_micro": 0.9235233868214408, |
|
"eval_recall_weighted": 0.9235233868214408, |
|
"eval_runtime": 25.9565, |
|
"eval_samples_per_second": 112.842, |
|
"eval_steps_per_second": 7.089, |
|
"step": 8772 |
|
}, |
|
{ |
|
"epoch": 6.002051983584131, |
|
"grad_norm": 11.892972946166992, |
|
"learning_rate": 2.2233622131022953e-05, |
|
"loss": 0.3357, |
|
"step": 8775 |
|
}, |
|
{ |
|
"epoch": 6.019151846785226, |
|
"grad_norm": 7.985339164733887, |
|
"learning_rate": 2.2138622891016873e-05, |
|
"loss": 0.4488, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 6.03625170998632, |
|
"grad_norm": 6.227213382720947, |
|
"learning_rate": 2.2043623651010793e-05, |
|
"loss": 0.3902, |
|
"step": 8825 |
|
}, |
|
{ |
|
"epoch": 6.053351573187414, |
|
"grad_norm": 19.529422760009766, |
|
"learning_rate": 2.194862441100471e-05, |
|
"loss": 0.4248, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 6.070451436388509, |
|
"grad_norm": 19.405685424804688, |
|
"learning_rate": 2.185362517099863e-05, |
|
"loss": 0.2371, |
|
"step": 8875 |
|
}, |
|
{ |
|
"epoch": 6.087551299589603, |
|
"grad_norm": 31.696195602416992, |
|
"learning_rate": 2.1758625930992554e-05, |
|
"loss": 0.511, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 6.104651162790698, |
|
"grad_norm": 23.88582992553711, |
|
"learning_rate": 2.1663626690986475e-05, |
|
"loss": 0.2996, |
|
"step": 8925 |
|
}, |
|
{ |
|
"epoch": 6.121751025991792, |
|
"grad_norm": 7.8858561515808105, |
|
"learning_rate": 2.1568627450980395e-05, |
|
"loss": 0.2554, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 6.138850889192886, |
|
"grad_norm": 2.5783660411834717, |
|
"learning_rate": 2.1473628210974315e-05, |
|
"loss": 0.4588, |
|
"step": 8975 |
|
}, |
|
{ |
|
"epoch": 6.155950752393981, |
|
"grad_norm": 2.6995346546173096, |
|
"learning_rate": 2.1378628970968235e-05, |
|
"loss": 0.3833, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 6.173050615595075, |
|
"grad_norm": 6.289373874664307, |
|
"learning_rate": 2.1283629730962156e-05, |
|
"loss": 0.4185, |
|
"step": 9025 |
|
}, |
|
{ |
|
"epoch": 6.19015047879617, |
|
"grad_norm": 6.533155918121338, |
|
"learning_rate": 2.1188630490956073e-05, |
|
"loss": 0.2384, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 6.207250341997264, |
|
"grad_norm": 17.285852432250977, |
|
"learning_rate": 2.1093631250949993e-05, |
|
"loss": 0.2801, |
|
"step": 9075 |
|
}, |
|
{ |
|
"epoch": 6.224350205198358, |
|
"grad_norm": 9.399192810058594, |
|
"learning_rate": 2.0998632010943913e-05, |
|
"loss": 0.5708, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 6.241450068399453, |
|
"grad_norm": 12.317584991455078, |
|
"learning_rate": 2.0903632770937833e-05, |
|
"loss": 0.4415, |
|
"step": 9125 |
|
}, |
|
{ |
|
"epoch": 6.258549931600547, |
|
"grad_norm": 7.904637336730957, |
|
"learning_rate": 2.0808633530931754e-05, |
|
"loss": 0.3887, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 6.275649794801642, |
|
"grad_norm": 8.931986808776855, |
|
"learning_rate": 2.0713634290925674e-05, |
|
"loss": 0.4239, |
|
"step": 9175 |
|
}, |
|
{ |
|
"epoch": 6.292749658002736, |
|
"grad_norm": 7.259034633636475, |
|
"learning_rate": 2.0618635050919594e-05, |
|
"loss": 0.3405, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 6.30984952120383, |
|
"grad_norm": 2.570568084716797, |
|
"learning_rate": 2.0523635810913514e-05, |
|
"loss": 0.3136, |
|
"step": 9225 |
|
}, |
|
{ |
|
"epoch": 6.326949384404925, |
|
"grad_norm": 16.150344848632812, |
|
"learning_rate": 2.0428636570907435e-05, |
|
"loss": 0.4056, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 6.344049247606019, |
|
"grad_norm": 15.508628845214844, |
|
"learning_rate": 2.0333637330901355e-05, |
|
"loss": 0.4423, |
|
"step": 9275 |
|
}, |
|
{ |
|
"epoch": 6.361149110807114, |
|
"grad_norm": 3.140324354171753, |
|
"learning_rate": 2.0238638090895272e-05, |
|
"loss": 0.3972, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 6.378248974008208, |
|
"grad_norm": 2.326601505279541, |
|
"learning_rate": 2.0143638850889192e-05, |
|
"loss": 0.3066, |
|
"step": 9325 |
|
}, |
|
{ |
|
"epoch": 6.395348837209302, |
|
"grad_norm": 2.924823760986328, |
|
"learning_rate": 2.0048639610883112e-05, |
|
"loss": 0.3117, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 6.412448700410397, |
|
"grad_norm": 13.392170906066895, |
|
"learning_rate": 1.9953640370877033e-05, |
|
"loss": 0.2947, |
|
"step": 9375 |
|
}, |
|
{ |
|
"epoch": 6.429548563611491, |
|
"grad_norm": 2.736306667327881, |
|
"learning_rate": 1.9858641130870953e-05, |
|
"loss": 0.2432, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 6.446648426812586, |
|
"grad_norm": 2.8124167919158936, |
|
"learning_rate": 1.9763641890864873e-05, |
|
"loss": 0.4835, |
|
"step": 9425 |
|
}, |
|
{ |
|
"epoch": 6.46374829001368, |
|
"grad_norm": 8.63110637664795, |
|
"learning_rate": 1.9668642650858794e-05, |
|
"loss": 0.3444, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 6.480848153214774, |
|
"grad_norm": 12.762578010559082, |
|
"learning_rate": 1.9573643410852714e-05, |
|
"loss": 0.2832, |
|
"step": 9475 |
|
}, |
|
{ |
|
"epoch": 6.497948016415869, |
|
"grad_norm": 4.230663776397705, |
|
"learning_rate": 1.9478644170846634e-05, |
|
"loss": 0.3595, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 6.515047879616963, |
|
"grad_norm": 2.9571053981781006, |
|
"learning_rate": 1.9383644930840554e-05, |
|
"loss": 0.3388, |
|
"step": 9525 |
|
}, |
|
{ |
|
"epoch": 6.532147742818058, |
|
"grad_norm": 3.3192389011383057, |
|
"learning_rate": 1.9288645690834475e-05, |
|
"loss": 0.4937, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 6.549247606019152, |
|
"grad_norm": 4.746058940887451, |
|
"learning_rate": 1.9193646450828395e-05, |
|
"loss": 0.4209, |
|
"step": 9575 |
|
}, |
|
{ |
|
"epoch": 6.566347469220246, |
|
"grad_norm": 1.2305563688278198, |
|
"learning_rate": 1.9098647210822315e-05, |
|
"loss": 0.361, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 6.583447332421341, |
|
"grad_norm": 0.1572732776403427, |
|
"learning_rate": 1.9003647970816235e-05, |
|
"loss": 0.3816, |
|
"step": 9625 |
|
}, |
|
{ |
|
"epoch": 6.600547195622435, |
|
"grad_norm": 8.715572357177734, |
|
"learning_rate": 1.8908648730810156e-05, |
|
"loss": 0.3678, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 6.617647058823529, |
|
"grad_norm": 4.759888172149658, |
|
"learning_rate": 1.8813649490804076e-05, |
|
"loss": 0.3702, |
|
"step": 9675 |
|
}, |
|
{ |
|
"epoch": 6.634746922024624, |
|
"grad_norm": 3.071633815765381, |
|
"learning_rate": 1.8718650250797996e-05, |
|
"loss": 0.3771, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 6.651846785225718, |
|
"grad_norm": 13.45046615600586, |
|
"learning_rate": 1.8623651010791917e-05, |
|
"loss": 0.5068, |
|
"step": 9725 |
|
}, |
|
{ |
|
"epoch": 6.668946648426813, |
|
"grad_norm": 15.248006820678711, |
|
"learning_rate": 1.8528651770785833e-05, |
|
"loss": 0.3403, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 6.686046511627907, |
|
"grad_norm": 2.1745738983154297, |
|
"learning_rate": 1.8433652530779754e-05, |
|
"loss": 0.3092, |
|
"step": 9775 |
|
}, |
|
{ |
|
"epoch": 6.703146374829001, |
|
"grad_norm": 10.927750587463379, |
|
"learning_rate": 1.8338653290773674e-05, |
|
"loss": 0.3776, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 6.720246238030096, |
|
"grad_norm": 24.628570556640625, |
|
"learning_rate": 1.8243654050767594e-05, |
|
"loss": 0.4011, |
|
"step": 9825 |
|
}, |
|
{ |
|
"epoch": 6.73734610123119, |
|
"grad_norm": 1.9271844625473022, |
|
"learning_rate": 1.8148654810761515e-05, |
|
"loss": 0.3472, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 6.754445964432285, |
|
"grad_norm": 11.969741821289062, |
|
"learning_rate": 1.8053655570755435e-05, |
|
"loss": 0.3599, |
|
"step": 9875 |
|
}, |
|
{ |
|
"epoch": 6.771545827633379, |
|
"grad_norm": 10.276418685913086, |
|
"learning_rate": 1.7958656330749355e-05, |
|
"loss": 0.3054, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 6.788645690834473, |
|
"grad_norm": 1.4558168649673462, |
|
"learning_rate": 1.7863657090743275e-05, |
|
"loss": 0.363, |
|
"step": 9925 |
|
}, |
|
{ |
|
"epoch": 6.805745554035568, |
|
"grad_norm": 6.96836519241333, |
|
"learning_rate": 1.7768657850737196e-05, |
|
"loss": 0.4066, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 6.822845417236662, |
|
"grad_norm": 11.515777587890625, |
|
"learning_rate": 1.7673658610731113e-05, |
|
"loss": 0.3551, |
|
"step": 9975 |
|
}, |
|
{ |
|
"epoch": 6.839945280437757, |
|
"grad_norm": 13.478057861328125, |
|
"learning_rate": 1.7578659370725033e-05, |
|
"loss": 0.3799, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 6.857045143638851, |
|
"grad_norm": 11.620177268981934, |
|
"learning_rate": 1.7483660130718953e-05, |
|
"loss": 0.3375, |
|
"step": 10025 |
|
}, |
|
{ |
|
"epoch": 6.874145006839945, |
|
"grad_norm": 3.876800537109375, |
|
"learning_rate": 1.7388660890712873e-05, |
|
"loss": 0.5015, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 6.89124487004104, |
|
"grad_norm": 1.9693479537963867, |
|
"learning_rate": 1.7293661650706794e-05, |
|
"loss": 0.2689, |
|
"step": 10075 |
|
}, |
|
{ |
|
"epoch": 6.908344733242134, |
|
"grad_norm": 14.633630752563477, |
|
"learning_rate": 1.7198662410700717e-05, |
|
"loss": 0.3271, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 6.925444596443229, |
|
"grad_norm": 4.947651386260986, |
|
"learning_rate": 1.7103663170694638e-05, |
|
"loss": 0.434, |
|
"step": 10125 |
|
}, |
|
{ |
|
"epoch": 6.942544459644322, |
|
"grad_norm": 2.0154216289520264, |
|
"learning_rate": 1.7008663930688558e-05, |
|
"loss": 0.4467, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 6.959644322845417, |
|
"grad_norm": 14.244186401367188, |
|
"learning_rate": 1.6913664690682475e-05, |
|
"loss": 0.3106, |
|
"step": 10175 |
|
}, |
|
{ |
|
"epoch": 6.976744186046512, |
|
"grad_norm": 4.09621000289917, |
|
"learning_rate": 1.6818665450676395e-05, |
|
"loss": 0.3063, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 6.993844049247606, |
|
"grad_norm": 2.0784831047058105, |
|
"learning_rate": 1.6723666210670315e-05, |
|
"loss": 0.3515, |
|
"step": 10225 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.9238648002731308, |
|
"eval_f1_macro": 0.8426039748613977, |
|
"eval_f1_micro": 0.9238648002731308, |
|
"eval_f1_weighted": 0.9239964103657616, |
|
"eval_loss": 0.22421783208847046, |
|
"eval_precision_macro": 0.9018755264534066, |
|
"eval_precision_micro": 0.9238648002731308, |
|
"eval_precision_weighted": 0.9296443693001667, |
|
"eval_recall_macro": 0.8036953584993286, |
|
"eval_recall_micro": 0.9238648002731308, |
|
"eval_recall_weighted": 0.9238648002731308, |
|
"eval_runtime": 25.5062, |
|
"eval_samples_per_second": 114.835, |
|
"eval_steps_per_second": 7.214, |
|
"step": 10234 |
|
}, |
|
{ |
|
"epoch": 7.010943912448701, |
|
"grad_norm": 4.232228755950928, |
|
"learning_rate": 1.6628666970664236e-05, |
|
"loss": 0.4255, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 7.028043775649794, |
|
"grad_norm": 12.369464874267578, |
|
"learning_rate": 1.6533667730658156e-05, |
|
"loss": 0.3828, |
|
"step": 10275 |
|
}, |
|
{ |
|
"epoch": 7.045143638850889, |
|
"grad_norm": 8.454262733459473, |
|
"learning_rate": 1.6438668490652076e-05, |
|
"loss": 0.287, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 7.062243502051984, |
|
"grad_norm": 2.064643144607544, |
|
"learning_rate": 1.6343669250645996e-05, |
|
"loss": 0.3688, |
|
"step": 10325 |
|
}, |
|
{ |
|
"epoch": 7.079343365253078, |
|
"grad_norm": 22.918745040893555, |
|
"learning_rate": 1.6248670010639917e-05, |
|
"loss": 0.3487, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 7.096443228454173, |
|
"grad_norm": 3.0556788444519043, |
|
"learning_rate": 1.6153670770633837e-05, |
|
"loss": 0.2822, |
|
"step": 10375 |
|
}, |
|
{ |
|
"epoch": 7.113543091655266, |
|
"grad_norm": 11.955765724182129, |
|
"learning_rate": 1.6058671530627757e-05, |
|
"loss": 0.4273, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 7.130642954856361, |
|
"grad_norm": 13.998736381530762, |
|
"learning_rate": 1.5963672290621674e-05, |
|
"loss": 0.3034, |
|
"step": 10425 |
|
}, |
|
{ |
|
"epoch": 7.147742818057456, |
|
"grad_norm": 3.8617422580718994, |
|
"learning_rate": 1.5868673050615594e-05, |
|
"loss": 0.3679, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 7.16484268125855, |
|
"grad_norm": 1.1045358180999756, |
|
"learning_rate": 1.5773673810609515e-05, |
|
"loss": 0.4234, |
|
"step": 10475 |
|
}, |
|
{ |
|
"epoch": 7.181942544459645, |
|
"grad_norm": 5.257449626922607, |
|
"learning_rate": 1.5678674570603435e-05, |
|
"loss": 0.3665, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 7.199042407660738, |
|
"grad_norm": 7.530872821807861, |
|
"learning_rate": 1.5583675330597355e-05, |
|
"loss": 0.4602, |
|
"step": 10525 |
|
}, |
|
{ |
|
"epoch": 7.216142270861833, |
|
"grad_norm": 0.4516567885875702, |
|
"learning_rate": 1.5488676090591275e-05, |
|
"loss": 0.3864, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 7.233242134062928, |
|
"grad_norm": 22.563528060913086, |
|
"learning_rate": 1.5393676850585196e-05, |
|
"loss": 0.3435, |
|
"step": 10575 |
|
}, |
|
{ |
|
"epoch": 7.250341997264022, |
|
"grad_norm": 13.331972122192383, |
|
"learning_rate": 1.5298677610579116e-05, |
|
"loss": 0.2879, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 7.267441860465116, |
|
"grad_norm": 9.387733459472656, |
|
"learning_rate": 1.5203678370573035e-05, |
|
"loss": 0.3333, |
|
"step": 10625 |
|
}, |
|
{ |
|
"epoch": 7.2845417236662104, |
|
"grad_norm": 7.556676864624023, |
|
"learning_rate": 1.5108679130566955e-05, |
|
"loss": 0.4042, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 7.301641586867305, |
|
"grad_norm": 6.794082164764404, |
|
"learning_rate": 1.5013679890560875e-05, |
|
"loss": 0.4062, |
|
"step": 10675 |
|
}, |
|
{ |
|
"epoch": 7.3187414500684, |
|
"grad_norm": 2.1838858127593994, |
|
"learning_rate": 1.4918680650554795e-05, |
|
"loss": 0.3238, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 7.335841313269494, |
|
"grad_norm": 10.469732284545898, |
|
"learning_rate": 1.4823681410548717e-05, |
|
"loss": 0.4204, |
|
"step": 10725 |
|
}, |
|
{ |
|
"epoch": 7.352941176470588, |
|
"grad_norm": 2.8655450344085693, |
|
"learning_rate": 1.4728682170542638e-05, |
|
"loss": 0.3095, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 7.3700410396716824, |
|
"grad_norm": 7.8528337478637695, |
|
"learning_rate": 1.4633682930536558e-05, |
|
"loss": 0.2917, |
|
"step": 10775 |
|
}, |
|
{ |
|
"epoch": 7.387140902872777, |
|
"grad_norm": 1.5195367336273193, |
|
"learning_rate": 1.4538683690530477e-05, |
|
"loss": 0.2933, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 7.404240766073872, |
|
"grad_norm": 12.097100257873535, |
|
"learning_rate": 1.4443684450524397e-05, |
|
"loss": 0.4429, |
|
"step": 10825 |
|
}, |
|
{ |
|
"epoch": 7.421340629274966, |
|
"grad_norm": 16.263513565063477, |
|
"learning_rate": 1.4348685210518317e-05, |
|
"loss": 0.3024, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 7.43844049247606, |
|
"grad_norm": 1.629339337348938, |
|
"learning_rate": 1.4253685970512237e-05, |
|
"loss": 0.2379, |
|
"step": 10875 |
|
}, |
|
{ |
|
"epoch": 7.4555403556771545, |
|
"grad_norm": 0.13442982733249664, |
|
"learning_rate": 1.4158686730506158e-05, |
|
"loss": 0.3297, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 7.472640218878249, |
|
"grad_norm": 22.14508628845215, |
|
"learning_rate": 1.4063687490500076e-05, |
|
"loss": 0.3236, |
|
"step": 10925 |
|
}, |
|
{ |
|
"epoch": 7.489740082079344, |
|
"grad_norm": 0.142426535487175, |
|
"learning_rate": 1.3968688250493997e-05, |
|
"loss": 0.3527, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 7.506839945280438, |
|
"grad_norm": 0.38898319005966187, |
|
"learning_rate": 1.3873689010487917e-05, |
|
"loss": 0.3389, |
|
"step": 10975 |
|
}, |
|
{ |
|
"epoch": 7.523939808481532, |
|
"grad_norm": 0.7467881441116333, |
|
"learning_rate": 1.3778689770481837e-05, |
|
"loss": 0.3385, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 7.5410396716826265, |
|
"grad_norm": 12.254922866821289, |
|
"learning_rate": 1.3683690530475757e-05, |
|
"loss": 0.4477, |
|
"step": 11025 |
|
}, |
|
{ |
|
"epoch": 7.558139534883721, |
|
"grad_norm": 8.779296875, |
|
"learning_rate": 1.3588691290469676e-05, |
|
"loss": 0.3468, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 7.575239398084816, |
|
"grad_norm": 39.5240478515625, |
|
"learning_rate": 1.3493692050463596e-05, |
|
"loss": 0.3673, |
|
"step": 11075 |
|
}, |
|
{ |
|
"epoch": 7.592339261285909, |
|
"grad_norm": 11.708680152893066, |
|
"learning_rate": 1.3398692810457516e-05, |
|
"loss": 0.3424, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 7.609439124487004, |
|
"grad_norm": 4.1340131759643555, |
|
"learning_rate": 1.3303693570451437e-05, |
|
"loss": 0.3234, |
|
"step": 11125 |
|
}, |
|
{ |
|
"epoch": 7.6265389876880985, |
|
"grad_norm": 34.217979431152344, |
|
"learning_rate": 1.3208694330445357e-05, |
|
"loss": 0.3146, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 7.643638850889193, |
|
"grad_norm": 2.088963508605957, |
|
"learning_rate": 1.3113695090439276e-05, |
|
"loss": 0.3924, |
|
"step": 11175 |
|
}, |
|
{ |
|
"epoch": 7.660738714090288, |
|
"grad_norm": 14.35055160522461, |
|
"learning_rate": 1.3018695850433196e-05, |
|
"loss": 0.3071, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 7.677838577291381, |
|
"grad_norm": 7.640285491943359, |
|
"learning_rate": 1.2923696610427116e-05, |
|
"loss": 0.3105, |
|
"step": 11225 |
|
}, |
|
{ |
|
"epoch": 7.694938440492476, |
|
"grad_norm": 10.794476509094238, |
|
"learning_rate": 1.283249734002128e-05, |
|
"loss": 0.3436, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 7.7120383036935705, |
|
"grad_norm": 21.634641647338867, |
|
"learning_rate": 1.27374981000152e-05, |
|
"loss": 0.2365, |
|
"step": 11275 |
|
}, |
|
{ |
|
"epoch": 7.729138166894665, |
|
"grad_norm": 1.7207705974578857, |
|
"learning_rate": 1.2642498860009118e-05, |
|
"loss": 0.3978, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 7.74623803009576, |
|
"grad_norm": 2.8081226348876953, |
|
"learning_rate": 1.2547499620003039e-05, |
|
"loss": 0.2167, |
|
"step": 11325 |
|
}, |
|
{ |
|
"epoch": 7.763337893296853, |
|
"grad_norm": 3.3401083946228027, |
|
"learning_rate": 1.245250037999696e-05, |
|
"loss": 0.3313, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 7.780437756497948, |
|
"grad_norm": 24.61179542541504, |
|
"learning_rate": 1.2357501139990881e-05, |
|
"loss": 0.4088, |
|
"step": 11375 |
|
}, |
|
{ |
|
"epoch": 7.7975376196990425, |
|
"grad_norm": 3.2282018661499023, |
|
"learning_rate": 1.22625018999848e-05, |
|
"loss": 0.3965, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 7.814637482900137, |
|
"grad_norm": 5.257138252258301, |
|
"learning_rate": 1.216750265997872e-05, |
|
"loss": 0.275, |
|
"step": 11425 |
|
}, |
|
{ |
|
"epoch": 7.831737346101232, |
|
"grad_norm": 1.5772024393081665, |
|
"learning_rate": 1.207250341997264e-05, |
|
"loss": 0.1914, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 7.848837209302325, |
|
"grad_norm": 7.5829033851623535, |
|
"learning_rate": 1.197750417996656e-05, |
|
"loss": 0.5063, |
|
"step": 11475 |
|
}, |
|
{ |
|
"epoch": 7.86593707250342, |
|
"grad_norm": 3.2422189712524414, |
|
"learning_rate": 1.188250493996048e-05, |
|
"loss": 0.3235, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 7.8830369357045145, |
|
"grad_norm": 2.9335296154022217, |
|
"learning_rate": 1.1787505699954401e-05, |
|
"loss": 0.2887, |
|
"step": 11525 |
|
}, |
|
{ |
|
"epoch": 7.900136798905609, |
|
"grad_norm": 0.015060103498399258, |
|
"learning_rate": 1.1692506459948321e-05, |
|
"loss": 0.4049, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 7.917236662106703, |
|
"grad_norm": 9.77502155303955, |
|
"learning_rate": 1.1597507219942242e-05, |
|
"loss": 0.2858, |
|
"step": 11575 |
|
}, |
|
{ |
|
"epoch": 7.934336525307797, |
|
"grad_norm": 1.4075678586959839, |
|
"learning_rate": 1.1502507979936162e-05, |
|
"loss": 0.3153, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 7.951436388508892, |
|
"grad_norm": 12.93342399597168, |
|
"learning_rate": 1.140750873993008e-05, |
|
"loss": 0.3199, |
|
"step": 11625 |
|
}, |
|
{ |
|
"epoch": 7.9685362517099865, |
|
"grad_norm": 19.707088470458984, |
|
"learning_rate": 1.1312509499924e-05, |
|
"loss": 0.5197, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 7.985636114911081, |
|
"grad_norm": 2.147141695022583, |
|
"learning_rate": 1.1217510259917921e-05, |
|
"loss": 0.338, |
|
"step": 11675 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.9320587231136906, |
|
"eval_f1_macro": 0.8801923417646881, |
|
"eval_f1_micro": 0.9320587231136906, |
|
"eval_f1_weighted": 0.9322151264732859, |
|
"eval_loss": 0.21217399835586548, |
|
"eval_precision_macro": 0.9267115227700036, |
|
"eval_precision_micro": 0.9320587231136906, |
|
"eval_precision_weighted": 0.9357267323781668, |
|
"eval_recall_macro": 0.8522160392320227, |
|
"eval_recall_micro": 0.9320587231136906, |
|
"eval_recall_weighted": 0.9320587231136906, |
|
"eval_runtime": 25.9574, |
|
"eval_samples_per_second": 112.839, |
|
"eval_steps_per_second": 7.089, |
|
"step": 11696 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 14620, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"total_flos": 9.590567882317627e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|