|
{ |
|
"best_metric": 0.23535355925559998, |
|
"best_model_checkpoint": "autotrain-vit-large-patch16-224/checkpoint-10234", |
|
"epoch": 7.0, |
|
"eval_steps": 500, |
|
"global_step": 10234, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01709986320109439, |
|
"grad_norm": 16.03294563293457, |
|
"learning_rate": 8.207934336525308e-07, |
|
"loss": 3.1283, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03419972640218878, |
|
"grad_norm": 15.965583801269531, |
|
"learning_rate": 1.6073871409028727e-06, |
|
"loss": 2.7271, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05129958960328317, |
|
"grad_norm": 10.72908878326416, |
|
"learning_rate": 2.4623803009575924e-06, |
|
"loss": 2.2885, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06839945280437756, |
|
"grad_norm": 12.793851852416992, |
|
"learning_rate": 3.3173734610123124e-06, |
|
"loss": 1.9045, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08549931600547196, |
|
"grad_norm": 11.025067329406738, |
|
"learning_rate": 4.172366621067032e-06, |
|
"loss": 1.4383, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.10259917920656635, |
|
"grad_norm": 13.093396186828613, |
|
"learning_rate": 5.027359781121752e-06, |
|
"loss": 1.3066, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11969904240766074, |
|
"grad_norm": 8.261626243591309, |
|
"learning_rate": 5.882352941176471e-06, |
|
"loss": 1.1223, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.13679890560875513, |
|
"grad_norm": 9.678178787231445, |
|
"learning_rate": 6.73734610123119e-06, |
|
"loss": 1.1502, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1538987688098495, |
|
"grad_norm": 13.990431785583496, |
|
"learning_rate": 7.592339261285911e-06, |
|
"loss": 0.9833, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.17099863201094392, |
|
"grad_norm": 11.807621002197266, |
|
"learning_rate": 8.44733242134063e-06, |
|
"loss": 0.8433, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1880984952120383, |
|
"grad_norm": 21.150636672973633, |
|
"learning_rate": 9.302325581395349e-06, |
|
"loss": 0.8553, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.2051983584131327, |
|
"grad_norm": 16.56648826599121, |
|
"learning_rate": 1.0157318741450068e-05, |
|
"loss": 1.0502, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.22229822161422708, |
|
"grad_norm": 8.26397705078125, |
|
"learning_rate": 1.1012311901504789e-05, |
|
"loss": 0.8686, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.2393980848153215, |
|
"grad_norm": 7.628605842590332, |
|
"learning_rate": 1.1867305061559508e-05, |
|
"loss": 0.7838, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.25649794801641584, |
|
"grad_norm": 6.690649509429932, |
|
"learning_rate": 1.2722298221614229e-05, |
|
"loss": 0.7284, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.27359781121751026, |
|
"grad_norm": 9.397786140441895, |
|
"learning_rate": 1.3577291381668946e-05, |
|
"loss": 0.6408, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.29069767441860467, |
|
"grad_norm": 1.714457392692566, |
|
"learning_rate": 1.4432284541723667e-05, |
|
"loss": 0.7046, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.307797537619699, |
|
"grad_norm": 16.014860153198242, |
|
"learning_rate": 1.5287277701778386e-05, |
|
"loss": 0.7838, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.32489740082079344, |
|
"grad_norm": 10.126370429992676, |
|
"learning_rate": 1.6142270861833107e-05, |
|
"loss": 0.735, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.34199726402188785, |
|
"grad_norm": 10.995970726013184, |
|
"learning_rate": 1.6997264021887825e-05, |
|
"loss": 0.7523, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3590971272229822, |
|
"grad_norm": 7.909482955932617, |
|
"learning_rate": 1.7852257181942546e-05, |
|
"loss": 0.7157, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.3761969904240766, |
|
"grad_norm": 10.751182556152344, |
|
"learning_rate": 1.8707250341997263e-05, |
|
"loss": 0.7966, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.393296853625171, |
|
"grad_norm": 8.872684478759766, |
|
"learning_rate": 1.9562243502051984e-05, |
|
"loss": 0.6923, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.4103967168262654, |
|
"grad_norm": 12.542756080627441, |
|
"learning_rate": 2.0417236662106705e-05, |
|
"loss": 0.8022, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.4274965800273598, |
|
"grad_norm": 4.314883708953857, |
|
"learning_rate": 2.1272229822161423e-05, |
|
"loss": 0.6959, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.44459644322845415, |
|
"grad_norm": 10.603111267089844, |
|
"learning_rate": 2.2127222982216144e-05, |
|
"loss": 0.7282, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.46169630642954856, |
|
"grad_norm": 12.411425590515137, |
|
"learning_rate": 2.298221614227086e-05, |
|
"loss": 0.8191, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.478796169630643, |
|
"grad_norm": 6.108178615570068, |
|
"learning_rate": 2.3837209302325582e-05, |
|
"loss": 0.507, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.49589603283173733, |
|
"grad_norm": 11.323492050170898, |
|
"learning_rate": 2.46922024623803e-05, |
|
"loss": 0.5333, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.5129958960328317, |
|
"grad_norm": 9.001029014587402, |
|
"learning_rate": 2.554719562243502e-05, |
|
"loss": 0.6816, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.5300957592339262, |
|
"grad_norm": 1.6873475313186646, |
|
"learning_rate": 2.6402188782489738e-05, |
|
"loss": 0.6621, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.5471956224350205, |
|
"grad_norm": 9.71667194366455, |
|
"learning_rate": 2.7257181942544462e-05, |
|
"loss": 0.5873, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5642954856361149, |
|
"grad_norm": 5.7018866539001465, |
|
"learning_rate": 2.811217510259918e-05, |
|
"loss": 0.6119, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.5813953488372093, |
|
"grad_norm": 6.869658946990967, |
|
"learning_rate": 2.8967168262653897e-05, |
|
"loss": 0.8036, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5984952120383037, |
|
"grad_norm": 10.419452667236328, |
|
"learning_rate": 2.982216142270862e-05, |
|
"loss": 0.6884, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.615595075239398, |
|
"grad_norm": 6.166721343994141, |
|
"learning_rate": 3.067715458276334e-05, |
|
"loss": 0.652, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6326949384404925, |
|
"grad_norm": 10.317444801330566, |
|
"learning_rate": 3.1532147742818057e-05, |
|
"loss": 0.6249, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.6497948016415869, |
|
"grad_norm": 11.344230651855469, |
|
"learning_rate": 3.2387140902872784e-05, |
|
"loss": 0.6797, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.6668946648426812, |
|
"grad_norm": 8.010811805725098, |
|
"learning_rate": 3.32421340629275e-05, |
|
"loss": 0.8409, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.6839945280437757, |
|
"grad_norm": 8.49295425415039, |
|
"learning_rate": 3.409712722298222e-05, |
|
"loss": 0.5985, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.70109439124487, |
|
"grad_norm": 14.201081275939941, |
|
"learning_rate": 3.4952120383036933e-05, |
|
"loss": 0.7377, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.7181942544459644, |
|
"grad_norm": 12.270833015441895, |
|
"learning_rate": 3.580711354309166e-05, |
|
"loss": 0.5195, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.7352941176470589, |
|
"grad_norm": 2.879152297973633, |
|
"learning_rate": 3.6662106703146375e-05, |
|
"loss": 0.6526, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.7523939808481532, |
|
"grad_norm": 8.10263442993164, |
|
"learning_rate": 3.7517099863201096e-05, |
|
"loss": 0.7119, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7694938440492476, |
|
"grad_norm": 4.584366321563721, |
|
"learning_rate": 3.837209302325582e-05, |
|
"loss": 0.6515, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.786593707250342, |
|
"grad_norm": 6.819866180419922, |
|
"learning_rate": 3.922708618331054e-05, |
|
"loss": 0.5304, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.8036935704514364, |
|
"grad_norm": 9.715897560119629, |
|
"learning_rate": 4.008207934336525e-05, |
|
"loss": 0.5718, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.8207934336525308, |
|
"grad_norm": 6.830766677856445, |
|
"learning_rate": 4.093707250341998e-05, |
|
"loss": 0.6228, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.8378932968536251, |
|
"grad_norm": 8.660406112670898, |
|
"learning_rate": 4.1792065663474694e-05, |
|
"loss": 0.7842, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.8549931600547196, |
|
"grad_norm": 8.981478691101074, |
|
"learning_rate": 4.2647058823529415e-05, |
|
"loss": 0.7702, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.872093023255814, |
|
"grad_norm": 5.491425037384033, |
|
"learning_rate": 4.3502051983584136e-05, |
|
"loss": 0.6483, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.8891928864569083, |
|
"grad_norm": 8.423704147338867, |
|
"learning_rate": 4.435704514363886e-05, |
|
"loss": 0.6889, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.9062927496580028, |
|
"grad_norm": 11.879225730895996, |
|
"learning_rate": 4.521203830369357e-05, |
|
"loss": 0.7592, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.9233926128590971, |
|
"grad_norm": 10.297099113464355, |
|
"learning_rate": 4.606703146374829e-05, |
|
"loss": 0.6994, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.9404924760601915, |
|
"grad_norm": 34.574771881103516, |
|
"learning_rate": 4.692202462380301e-05, |
|
"loss": 0.6254, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.957592339261286, |
|
"grad_norm": 7.829855442047119, |
|
"learning_rate": 4.7777017783857733e-05, |
|
"loss": 0.62, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.9746922024623803, |
|
"grad_norm": 10.330737113952637, |
|
"learning_rate": 4.863201094391245e-05, |
|
"loss": 0.5799, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.9917920656634747, |
|
"grad_norm": 7.56599235534668, |
|
"learning_rate": 4.948700410396717e-05, |
|
"loss": 0.6286, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8296346876066917, |
|
"eval_f1_macro": 0.5237337030166633, |
|
"eval_f1_micro": 0.8296346876066917, |
|
"eval_f1_weighted": 0.818531665770605, |
|
"eval_loss": 0.5858680605888367, |
|
"eval_precision_macro": 0.6349808627313414, |
|
"eval_precision_micro": 0.8296346876066917, |
|
"eval_precision_weighted": 0.8428198229026794, |
|
"eval_recall_macro": 0.5158734772675513, |
|
"eval_recall_micro": 0.8296346876066917, |
|
"eval_recall_weighted": 0.8296346876066917, |
|
"eval_runtime": 29.6775, |
|
"eval_samples_per_second": 98.694, |
|
"eval_steps_per_second": 6.2, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 1.008891928864569, |
|
"grad_norm": 7.980313301086426, |
|
"learning_rate": 4.996200030399757e-05, |
|
"loss": 0.6037, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.0259917920656634, |
|
"grad_norm": 6.049495220184326, |
|
"learning_rate": 4.9867001063991494e-05, |
|
"loss": 0.6332, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.043091655266758, |
|
"grad_norm": 11.885618209838867, |
|
"learning_rate": 4.977200182398541e-05, |
|
"loss": 0.761, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.0601915184678523, |
|
"grad_norm": 14.27984619140625, |
|
"learning_rate": 4.9677002583979335e-05, |
|
"loss": 0.4568, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.0772913816689467, |
|
"grad_norm": 6.687506198883057, |
|
"learning_rate": 4.958200334397325e-05, |
|
"loss": 0.6308, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.094391244870041, |
|
"grad_norm": 6.140110492706299, |
|
"learning_rate": 4.948700410396717e-05, |
|
"loss": 0.5896, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.1114911080711354, |
|
"grad_norm": 8.485834121704102, |
|
"learning_rate": 4.939200486396109e-05, |
|
"loss": 0.7167, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.1285909712722297, |
|
"grad_norm": 8.836180686950684, |
|
"learning_rate": 4.929700562395501e-05, |
|
"loss": 0.5643, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.1456908344733243, |
|
"grad_norm": 5.937542915344238, |
|
"learning_rate": 4.9202006383948926e-05, |
|
"loss": 0.5568, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.1627906976744187, |
|
"grad_norm": 4.0352582931518555, |
|
"learning_rate": 4.910700714394285e-05, |
|
"loss": 0.7228, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.179890560875513, |
|
"grad_norm": 6.907437801361084, |
|
"learning_rate": 4.9012007903936766e-05, |
|
"loss": 0.5816, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.1969904240766074, |
|
"grad_norm": 6.376392841339111, |
|
"learning_rate": 4.891700866393069e-05, |
|
"loss": 0.5199, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.2140902872777017, |
|
"grad_norm": 9.630695343017578, |
|
"learning_rate": 4.882200942392461e-05, |
|
"loss": 0.6383, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.231190150478796, |
|
"grad_norm": 6.556370258331299, |
|
"learning_rate": 4.872701018391853e-05, |
|
"loss": 0.5977, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.2482900136798905, |
|
"grad_norm": 8.746458053588867, |
|
"learning_rate": 4.863201094391245e-05, |
|
"loss": 0.5699, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.265389876880985, |
|
"grad_norm": 6.0708537101745605, |
|
"learning_rate": 4.853701170390637e-05, |
|
"loss": 0.6479, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.2824897400820794, |
|
"grad_norm": 7.421648979187012, |
|
"learning_rate": 4.8442012463900295e-05, |
|
"loss": 0.7258, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.2995896032831737, |
|
"grad_norm": 11.180326461791992, |
|
"learning_rate": 4.834701322389421e-05, |
|
"loss": 0.5853, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.316689466484268, |
|
"grad_norm": 12.808277130126953, |
|
"learning_rate": 4.8252013983888135e-05, |
|
"loss": 0.625, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.3337893296853625, |
|
"grad_norm": 5.873989105224609, |
|
"learning_rate": 4.815701474388205e-05, |
|
"loss": 0.6044, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.350889192886457, |
|
"grad_norm": 6.302676200866699, |
|
"learning_rate": 4.8062015503875976e-05, |
|
"loss": 0.7381, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.3679890560875512, |
|
"grad_norm": 10.768912315368652, |
|
"learning_rate": 4.796701626386989e-05, |
|
"loss": 0.7416, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.3850889192886457, |
|
"grad_norm": 3.4966821670532227, |
|
"learning_rate": 4.7872017023863817e-05, |
|
"loss": 0.5816, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.40218878248974, |
|
"grad_norm": 20.856693267822266, |
|
"learning_rate": 4.7777017783857733e-05, |
|
"loss": 0.674, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.4192886456908345, |
|
"grad_norm": 3.353882074356079, |
|
"learning_rate": 4.768201854385165e-05, |
|
"loss": 0.6317, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.4363885088919288, |
|
"grad_norm": 4.5955939292907715, |
|
"learning_rate": 4.758701930384557e-05, |
|
"loss": 0.5058, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.4534883720930232, |
|
"grad_norm": 3.8296780586242676, |
|
"learning_rate": 4.749202006383949e-05, |
|
"loss": 0.55, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.4705882352941178, |
|
"grad_norm": 8.511550903320312, |
|
"learning_rate": 4.739702082383341e-05, |
|
"loss": 0.7234, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.487688098495212, |
|
"grad_norm": 7.452313423156738, |
|
"learning_rate": 4.730202158382733e-05, |
|
"loss": 0.4657, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.5047879616963065, |
|
"grad_norm": 5.583883762359619, |
|
"learning_rate": 4.720702234382125e-05, |
|
"loss": 0.5554, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.5218878248974008, |
|
"grad_norm": 3.6462392807006836, |
|
"learning_rate": 4.711202310381517e-05, |
|
"loss": 0.6182, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 1.5389876880984952, |
|
"grad_norm": 5.7832207679748535, |
|
"learning_rate": 4.701702386380909e-05, |
|
"loss": 0.4694, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.5560875512995898, |
|
"grad_norm": 3.335848331451416, |
|
"learning_rate": 4.692202462380301e-05, |
|
"loss": 0.6423, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.573187414500684, |
|
"grad_norm": 15.582513809204102, |
|
"learning_rate": 4.682702538379693e-05, |
|
"loss": 0.6912, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.5902872777017785, |
|
"grad_norm": 8.966438293457031, |
|
"learning_rate": 4.673202614379085e-05, |
|
"loss": 0.4998, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.6073871409028728, |
|
"grad_norm": 6.383381366729736, |
|
"learning_rate": 4.663702690378477e-05, |
|
"loss": 0.5879, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.6244870041039672, |
|
"grad_norm": 3.314211845397949, |
|
"learning_rate": 4.6542027663778694e-05, |
|
"loss": 0.5865, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.6415868673050615, |
|
"grad_norm": 7.680863380432129, |
|
"learning_rate": 4.644702842377261e-05, |
|
"loss": 0.5731, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.658686730506156, |
|
"grad_norm": 15.084454536437988, |
|
"learning_rate": 4.6352029183766534e-05, |
|
"loss": 0.6018, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.6757865937072505, |
|
"grad_norm": 4.016242027282715, |
|
"learning_rate": 4.625702994376046e-05, |
|
"loss": 0.4291, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.6928864569083446, |
|
"grad_norm": 6.622522354125977, |
|
"learning_rate": 4.6162030703754375e-05, |
|
"loss": 0.5729, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.7099863201094392, |
|
"grad_norm": 7.149597644805908, |
|
"learning_rate": 4.606703146374829e-05, |
|
"loss": 0.6435, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.7270861833105335, |
|
"grad_norm": 4.473049163818359, |
|
"learning_rate": 4.597203222374221e-05, |
|
"loss": 0.5068, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.744186046511628, |
|
"grad_norm": 8.462128639221191, |
|
"learning_rate": 4.587703298373613e-05, |
|
"loss": 0.4735, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.7612859097127223, |
|
"grad_norm": 3.4091100692749023, |
|
"learning_rate": 4.578203374373005e-05, |
|
"loss": 0.591, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.7783857729138166, |
|
"grad_norm": 4.581203937530518, |
|
"learning_rate": 4.568703450372397e-05, |
|
"loss": 0.4642, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.7954856361149112, |
|
"grad_norm": 7.462313175201416, |
|
"learning_rate": 4.559203526371789e-05, |
|
"loss": 0.5036, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.8125854993160053, |
|
"grad_norm": 7.391847610473633, |
|
"learning_rate": 4.549703602371181e-05, |
|
"loss": 0.5039, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.8296853625171, |
|
"grad_norm": 16.374174118041992, |
|
"learning_rate": 4.540203678370573e-05, |
|
"loss": 0.6136, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.8467852257181943, |
|
"grad_norm": 7.047588348388672, |
|
"learning_rate": 4.5307037543699654e-05, |
|
"loss": 0.4051, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.8638850889192886, |
|
"grad_norm": 4.181390285491943, |
|
"learning_rate": 4.521203830369357e-05, |
|
"loss": 0.7065, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 1.8809849521203832, |
|
"grad_norm": 2.601778984069824, |
|
"learning_rate": 4.5117039063687494e-05, |
|
"loss": 0.5517, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.8980848153214773, |
|
"grad_norm": 2.9935567378997803, |
|
"learning_rate": 4.502203982368141e-05, |
|
"loss": 0.3861, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.915184678522572, |
|
"grad_norm": 7.431847095489502, |
|
"learning_rate": 4.4927040583675335e-05, |
|
"loss": 0.5537, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.9322845417236663, |
|
"grad_norm": 7.30590295791626, |
|
"learning_rate": 4.483204134366925e-05, |
|
"loss": 0.5082, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 1.9493844049247606, |
|
"grad_norm": 6.986799240112305, |
|
"learning_rate": 4.4737042103663176e-05, |
|
"loss": 0.5727, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.966484268125855, |
|
"grad_norm": 4.823264122009277, |
|
"learning_rate": 4.464204286365709e-05, |
|
"loss": 0.5773, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 1.9835841313269493, |
|
"grad_norm": 0.19885371625423431, |
|
"learning_rate": 4.4547043623651016e-05, |
|
"loss": 0.5056, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9095254353021509, |
|
"eval_f1_macro": 0.7148556374573719, |
|
"eval_f1_micro": 0.9095254353021509, |
|
"eval_f1_weighted": 0.906235846022486, |
|
"eval_loss": 0.2884175181388855, |
|
"eval_precision_macro": 0.779691628927798, |
|
"eval_precision_micro": 0.9095254353021509, |
|
"eval_precision_weighted": 0.9118121732094211, |
|
"eval_recall_macro": 0.6904984139815875, |
|
"eval_recall_micro": 0.9095254353021509, |
|
"eval_recall_weighted": 0.9095254353021509, |
|
"eval_runtime": 29.8929, |
|
"eval_samples_per_second": 97.983, |
|
"eval_steps_per_second": 6.155, |
|
"step": 2924 |
|
}, |
|
{ |
|
"epoch": 2.000683994528044, |
|
"grad_norm": 8.045063972473145, |
|
"learning_rate": 4.445204438364493e-05, |
|
"loss": 0.5348, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 2.017783857729138, |
|
"grad_norm": 7.1167521476745605, |
|
"learning_rate": 4.435704514363886e-05, |
|
"loss": 0.4205, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.0348837209302326, |
|
"grad_norm": 7.508376121520996, |
|
"learning_rate": 4.4262045903632774e-05, |
|
"loss": 0.5367, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 2.0519835841313268, |
|
"grad_norm": 10.526673316955566, |
|
"learning_rate": 4.416704666362669e-05, |
|
"loss": 0.4326, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.0690834473324213, |
|
"grad_norm": 7.46666145324707, |
|
"learning_rate": 4.407204742362061e-05, |
|
"loss": 0.6013, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 2.086183310533516, |
|
"grad_norm": 3.7393107414245605, |
|
"learning_rate": 4.397704818361453e-05, |
|
"loss": 0.5544, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.10328317373461, |
|
"grad_norm": 6.285150051116943, |
|
"learning_rate": 4.3882048943608455e-05, |
|
"loss": 0.5717, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 2.1203830369357046, |
|
"grad_norm": 8.156254768371582, |
|
"learning_rate": 4.378704970360237e-05, |
|
"loss": 0.5008, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.1374829001367988, |
|
"grad_norm": 10.280779838562012, |
|
"learning_rate": 4.3692050463596295e-05, |
|
"loss": 0.3871, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 2.1545827633378933, |
|
"grad_norm": 11.541882514953613, |
|
"learning_rate": 4.359705122359021e-05, |
|
"loss": 0.4684, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.1716826265389875, |
|
"grad_norm": 12.31938648223877, |
|
"learning_rate": 4.3502051983584136e-05, |
|
"loss": 0.5055, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 2.188782489740082, |
|
"grad_norm": 6.534464359283447, |
|
"learning_rate": 4.340705274357805e-05, |
|
"loss": 0.682, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.2058823529411766, |
|
"grad_norm": 6.284212112426758, |
|
"learning_rate": 4.3312053503571976e-05, |
|
"loss": 0.5189, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 2.2229822161422708, |
|
"grad_norm": 4.796578884124756, |
|
"learning_rate": 4.321705426356589e-05, |
|
"loss": 0.5516, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.2400820793433653, |
|
"grad_norm": 0.08828147500753403, |
|
"learning_rate": 4.312205502355982e-05, |
|
"loss": 0.4325, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 2.2571819425444595, |
|
"grad_norm": 3.709010362625122, |
|
"learning_rate": 4.3027055783553734e-05, |
|
"loss": 0.524, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.274281805745554, |
|
"grad_norm": 6.48209810256958, |
|
"learning_rate": 4.293205654354766e-05, |
|
"loss": 0.4304, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 2.2913816689466486, |
|
"grad_norm": 7.461411952972412, |
|
"learning_rate": 4.2837057303541574e-05, |
|
"loss": 0.5894, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.3084815321477428, |
|
"grad_norm": 6.687474727630615, |
|
"learning_rate": 4.27420580635355e-05, |
|
"loss": 0.4504, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 2.3255813953488373, |
|
"grad_norm": 4.4408745765686035, |
|
"learning_rate": 4.2647058823529415e-05, |
|
"loss": 0.5091, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.3426812585499315, |
|
"grad_norm": 7.8160552978515625, |
|
"learning_rate": 4.255205958352333e-05, |
|
"loss": 0.3612, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 2.359781121751026, |
|
"grad_norm": 4.452754497528076, |
|
"learning_rate": 4.2457060343517255e-05, |
|
"loss": 0.6184, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.37688098495212, |
|
"grad_norm": 12.292354583740234, |
|
"learning_rate": 4.236206110351117e-05, |
|
"loss": 0.5868, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 2.3939808481532148, |
|
"grad_norm": 3.688055992126465, |
|
"learning_rate": 4.226706186350509e-05, |
|
"loss": 0.5325, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.4110807113543093, |
|
"grad_norm": 5.77033805847168, |
|
"learning_rate": 4.217206262349901e-05, |
|
"loss": 0.3577, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 2.4281805745554035, |
|
"grad_norm": 4.816909313201904, |
|
"learning_rate": 4.207706338349293e-05, |
|
"loss": 0.4681, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.445280437756498, |
|
"grad_norm": 8.695076942443848, |
|
"learning_rate": 4.198206414348685e-05, |
|
"loss": 0.4151, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 2.462380300957592, |
|
"grad_norm": 10.278604507446289, |
|
"learning_rate": 4.188706490348077e-05, |
|
"loss": 0.4165, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.4794801641586868, |
|
"grad_norm": 17.01686668395996, |
|
"learning_rate": 4.1792065663474694e-05, |
|
"loss": 0.5273, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 2.496580027359781, |
|
"grad_norm": 7.072406768798828, |
|
"learning_rate": 4.170086639306886e-05, |
|
"loss": 0.4383, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.5136798905608755, |
|
"grad_norm": 5.283012866973877, |
|
"learning_rate": 4.1605867153062774e-05, |
|
"loss": 0.578, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 2.53077975376197, |
|
"grad_norm": 3.6656084060668945, |
|
"learning_rate": 4.15108679130567e-05, |
|
"loss": 0.491, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.547879616963064, |
|
"grad_norm": 9.410102844238281, |
|
"learning_rate": 4.1415868673050615e-05, |
|
"loss": 0.4382, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 2.5649794801641588, |
|
"grad_norm": 8.41618824005127, |
|
"learning_rate": 4.132086943304454e-05, |
|
"loss": 0.5658, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.582079343365253, |
|
"grad_norm": 1.3863807916641235, |
|
"learning_rate": 4.1225870193038455e-05, |
|
"loss": 0.5228, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 2.5991792065663475, |
|
"grad_norm": 3.929469585418701, |
|
"learning_rate": 4.113087095303238e-05, |
|
"loss": 0.4993, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.616279069767442, |
|
"grad_norm": 4.706065654754639, |
|
"learning_rate": 4.1035871713026296e-05, |
|
"loss": 0.4045, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 2.633378932968536, |
|
"grad_norm": 2.805471658706665, |
|
"learning_rate": 4.094087247302022e-05, |
|
"loss": 0.4283, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.650478796169631, |
|
"grad_norm": 8.297080993652344, |
|
"learning_rate": 4.0845873233014136e-05, |
|
"loss": 0.4499, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 2.667578659370725, |
|
"grad_norm": 2.637622594833374, |
|
"learning_rate": 4.075087399300806e-05, |
|
"loss": 0.5087, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.6846785225718195, |
|
"grad_norm": 2.461876392364502, |
|
"learning_rate": 4.065587475300198e-05, |
|
"loss": 0.5104, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 2.701778385772914, |
|
"grad_norm": 6.3255486488342285, |
|
"learning_rate": 4.05608755129959e-05, |
|
"loss": 0.5861, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.718878248974008, |
|
"grad_norm": 11.981715202331543, |
|
"learning_rate": 4.046587627298982e-05, |
|
"loss": 0.5593, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 2.7359781121751023, |
|
"grad_norm": 7.206056594848633, |
|
"learning_rate": 4.037087703298374e-05, |
|
"loss": 0.4665, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.753077975376197, |
|
"grad_norm": 1.9749879837036133, |
|
"learning_rate": 4.027587779297766e-05, |
|
"loss": 0.4984, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 2.7701778385772915, |
|
"grad_norm": 4.363369464874268, |
|
"learning_rate": 4.018087855297158e-05, |
|
"loss": 0.5785, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.7872777017783856, |
|
"grad_norm": 6.443023681640625, |
|
"learning_rate": 4.00858793129655e-05, |
|
"loss": 0.5287, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 2.80437756497948, |
|
"grad_norm": 6.106603622436523, |
|
"learning_rate": 3.999088007295942e-05, |
|
"loss": 0.6016, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.8214774281805743, |
|
"grad_norm": 9.416887283325195, |
|
"learning_rate": 3.989588083295334e-05, |
|
"loss": 0.5532, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 2.838577291381669, |
|
"grad_norm": 2.5579991340637207, |
|
"learning_rate": 3.9800881592947256e-05, |
|
"loss": 0.4579, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.8556771545827635, |
|
"grad_norm": 10.23747444152832, |
|
"learning_rate": 3.970588235294117e-05, |
|
"loss": 0.5402, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 2.8727770177838576, |
|
"grad_norm": 9.435909271240234, |
|
"learning_rate": 3.96108831129351e-05, |
|
"loss": 0.4845, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.889876880984952, |
|
"grad_norm": 3.5227017402648926, |
|
"learning_rate": 3.9515883872929014e-05, |
|
"loss": 0.3969, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 2.9069767441860463, |
|
"grad_norm": 2.499969482421875, |
|
"learning_rate": 3.942088463292294e-05, |
|
"loss": 0.4392, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.924076607387141, |
|
"grad_norm": 8.345962524414062, |
|
"learning_rate": 3.9325885392916854e-05, |
|
"loss": 0.4584, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 2.9411764705882355, |
|
"grad_norm": 3.583355188369751, |
|
"learning_rate": 3.923088615291078e-05, |
|
"loss": 0.3816, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.9582763337893296, |
|
"grad_norm": 1.7532494068145752, |
|
"learning_rate": 3.91358869129047e-05, |
|
"loss": 0.3497, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 2.975376196990424, |
|
"grad_norm": 9.220611572265625, |
|
"learning_rate": 3.904088767289862e-05, |
|
"loss": 0.5747, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.9924760601915183, |
|
"grad_norm": 1.715482234954834, |
|
"learning_rate": 3.894588843289254e-05, |
|
"loss": 0.4701, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9071355411403209, |
|
"eval_f1_macro": 0.7615092412094518, |
|
"eval_f1_micro": 0.9071355411403209, |
|
"eval_f1_weighted": 0.9053250303728207, |
|
"eval_loss": 0.2766139805316925, |
|
"eval_precision_macro": 0.8223788212097681, |
|
"eval_precision_micro": 0.9071355411403209, |
|
"eval_precision_weighted": 0.9095949572268466, |
|
"eval_recall_macro": 0.7322063659665187, |
|
"eval_recall_micro": 0.9071355411403209, |
|
"eval_recall_weighted": 0.9071355411403209, |
|
"eval_runtime": 29.8386, |
|
"eval_samples_per_second": 98.161, |
|
"eval_steps_per_second": 6.167, |
|
"step": 4386 |
|
}, |
|
{ |
|
"epoch": 3.009575923392613, |
|
"grad_norm": 4.551536560058594, |
|
"learning_rate": 3.885088919288646e-05, |
|
"loss": 0.4776, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 3.026675786593707, |
|
"grad_norm": 10.111888885498047, |
|
"learning_rate": 3.875588995288038e-05, |
|
"loss": 0.5966, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 3.0437756497948016, |
|
"grad_norm": 13.998083114624023, |
|
"learning_rate": 3.86608907128743e-05, |
|
"loss": 0.4699, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 3.060875512995896, |
|
"grad_norm": 10.541669845581055, |
|
"learning_rate": 3.856589147286822e-05, |
|
"loss": 0.5154, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 3.0779753761969904, |
|
"grad_norm": 3.1978766918182373, |
|
"learning_rate": 3.847089223286214e-05, |
|
"loss": 0.4304, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.095075239398085, |
|
"grad_norm": 5.206043720245361, |
|
"learning_rate": 3.8375892992856064e-05, |
|
"loss": 0.3223, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 3.112175102599179, |
|
"grad_norm": 9.216475486755371, |
|
"learning_rate": 3.828089375284998e-05, |
|
"loss": 0.6146, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 3.1292749658002736, |
|
"grad_norm": 4.771245002746582, |
|
"learning_rate": 3.81858945128439e-05, |
|
"loss": 0.3407, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 3.146374829001368, |
|
"grad_norm": 8.766341209411621, |
|
"learning_rate": 3.8090895272837814e-05, |
|
"loss": 0.4866, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 3.1634746922024624, |
|
"grad_norm": 6.949918270111084, |
|
"learning_rate": 3.799589603283174e-05, |
|
"loss": 0.4899, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 3.180574555403557, |
|
"grad_norm": 1.0002543926239014, |
|
"learning_rate": 3.7900896792825655e-05, |
|
"loss": 0.481, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.197674418604651, |
|
"grad_norm": 9.768813133239746, |
|
"learning_rate": 3.780589755281958e-05, |
|
"loss": 0.4338, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 3.2147742818057456, |
|
"grad_norm": 4.305062770843506, |
|
"learning_rate": 3.7710898312813495e-05, |
|
"loss": 0.3783, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.23187414500684, |
|
"grad_norm": 0.7493396997451782, |
|
"learning_rate": 3.761589907280742e-05, |
|
"loss": 0.5001, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 3.2489740082079344, |
|
"grad_norm": 5.481766700744629, |
|
"learning_rate": 3.7520899832801336e-05, |
|
"loss": 0.4894, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 3.266073871409029, |
|
"grad_norm": 3.8942747116088867, |
|
"learning_rate": 3.742590059279526e-05, |
|
"loss": 0.4761, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 3.283173734610123, |
|
"grad_norm": 8.91220760345459, |
|
"learning_rate": 3.7330901352789176e-05, |
|
"loss": 0.5526, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 3.3002735978112177, |
|
"grad_norm": 1.765817642211914, |
|
"learning_rate": 3.72359021127831e-05, |
|
"loss": 0.5665, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 3.317373461012312, |
|
"grad_norm": 6.553985118865967, |
|
"learning_rate": 3.714090287277702e-05, |
|
"loss": 0.287, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 3.3344733242134064, |
|
"grad_norm": 5.839998245239258, |
|
"learning_rate": 3.704590363277094e-05, |
|
"loss": 0.4927, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 3.3515731874145005, |
|
"grad_norm": 8.003843307495117, |
|
"learning_rate": 3.6950904392764864e-05, |
|
"loss": 0.552, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 3.368673050615595, |
|
"grad_norm": 3.3592958450317383, |
|
"learning_rate": 3.685590515275878e-05, |
|
"loss": 0.5703, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 3.3857729138166897, |
|
"grad_norm": 4.747976779937744, |
|
"learning_rate": 3.6760905912752705e-05, |
|
"loss": 0.5218, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 3.402872777017784, |
|
"grad_norm": 4.351706504821777, |
|
"learning_rate": 3.666590667274662e-05, |
|
"loss": 0.4671, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 3.4199726402188784, |
|
"grad_norm": 5.302130222320557, |
|
"learning_rate": 3.6570907432740545e-05, |
|
"loss": 0.3913, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.4370725034199725, |
|
"grad_norm": 1.5275038480758667, |
|
"learning_rate": 3.647590819273446e-05, |
|
"loss": 0.3149, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 3.454172366621067, |
|
"grad_norm": 0.8008119463920593, |
|
"learning_rate": 3.638090895272838e-05, |
|
"loss": 0.5232, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 3.471272229822161, |
|
"grad_norm": 6.361542701721191, |
|
"learning_rate": 3.6285909712722296e-05, |
|
"loss": 0.4149, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 3.488372093023256, |
|
"grad_norm": 0.8227640390396118, |
|
"learning_rate": 3.619091047271622e-05, |
|
"loss": 0.5698, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 3.5054719562243504, |
|
"grad_norm": 4.734671115875244, |
|
"learning_rate": 3.609591123271014e-05, |
|
"loss": 0.4835, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 3.5225718194254445, |
|
"grad_norm": 6.9522576332092285, |
|
"learning_rate": 3.600091199270406e-05, |
|
"loss": 0.5387, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 3.539671682626539, |
|
"grad_norm": 3.4246246814727783, |
|
"learning_rate": 3.590591275269798e-05, |
|
"loss": 0.4044, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 3.556771545827633, |
|
"grad_norm": 6.183599472045898, |
|
"learning_rate": 3.58109135126919e-05, |
|
"loss": 0.4785, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 3.573871409028728, |
|
"grad_norm": 3.738954782485962, |
|
"learning_rate": 3.571591427268582e-05, |
|
"loss": 0.5516, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 3.5909712722298224, |
|
"grad_norm": 7.439993381500244, |
|
"learning_rate": 3.562091503267974e-05, |
|
"loss": 0.523, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 3.6080711354309165, |
|
"grad_norm": 4.419105052947998, |
|
"learning_rate": 3.552591579267366e-05, |
|
"loss": 0.4118, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 3.625170998632011, |
|
"grad_norm": 16.105833053588867, |
|
"learning_rate": 3.543091655266758e-05, |
|
"loss": 0.5083, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 3.6422708618331052, |
|
"grad_norm": 5.370678424835205, |
|
"learning_rate": 3.53359173126615e-05, |
|
"loss": 0.4519, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 3.6593707250342, |
|
"grad_norm": 11.25657844543457, |
|
"learning_rate": 3.524091807265542e-05, |
|
"loss": 0.3801, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 3.6764705882352944, |
|
"grad_norm": 3.6341259479522705, |
|
"learning_rate": 3.514591883264934e-05, |
|
"loss": 0.3978, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 3.6935704514363885, |
|
"grad_norm": 0.18588456511497498, |
|
"learning_rate": 3.505091959264326e-05, |
|
"loss": 0.4341, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 3.7106703146374826, |
|
"grad_norm": 4.884586811065674, |
|
"learning_rate": 3.495592035263718e-05, |
|
"loss": 0.5095, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 3.7277701778385772, |
|
"grad_norm": 5.724035739898682, |
|
"learning_rate": 3.4860921112631104e-05, |
|
"loss": 0.4625, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 3.744870041039672, |
|
"grad_norm": 5.873854637145996, |
|
"learning_rate": 3.476592187262502e-05, |
|
"loss": 0.4019, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 3.761969904240766, |
|
"grad_norm": 3.843010902404785, |
|
"learning_rate": 3.467092263261894e-05, |
|
"loss": 0.5835, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 3.7790697674418605, |
|
"grad_norm": 5.200079441070557, |
|
"learning_rate": 3.457592339261286e-05, |
|
"loss": 0.4441, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 3.7961696306429547, |
|
"grad_norm": 7.437492847442627, |
|
"learning_rate": 3.448092415260678e-05, |
|
"loss": 0.3881, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 3.8132694938440492, |
|
"grad_norm": 5.660449504852295, |
|
"learning_rate": 3.43859249126007e-05, |
|
"loss": 0.446, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 3.830369357045144, |
|
"grad_norm": 2.2128264904022217, |
|
"learning_rate": 3.429092567259462e-05, |
|
"loss": 0.4257, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 3.847469220246238, |
|
"grad_norm": 5.218535423278809, |
|
"learning_rate": 3.419592643258854e-05, |
|
"loss": 0.4946, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 3.8645690834473325, |
|
"grad_norm": 1.4989601373672485, |
|
"learning_rate": 3.410092719258246e-05, |
|
"loss": 0.3866, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 3.8816689466484267, |
|
"grad_norm": 11.225085258483887, |
|
"learning_rate": 3.400592795257638e-05, |
|
"loss": 0.4764, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 3.8987688098495212, |
|
"grad_norm": 5.653949737548828, |
|
"learning_rate": 3.39109287125703e-05, |
|
"loss": 0.4362, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 3.915868673050616, |
|
"grad_norm": 10.87972354888916, |
|
"learning_rate": 3.381592947256422e-05, |
|
"loss": 0.4708, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 3.93296853625171, |
|
"grad_norm": 5.110008239746094, |
|
"learning_rate": 3.372093023255814e-05, |
|
"loss": 0.4373, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 3.9500683994528045, |
|
"grad_norm": 4.611896991729736, |
|
"learning_rate": 3.3625930992552064e-05, |
|
"loss": 0.4808, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 3.9671682626538987, |
|
"grad_norm": 3.967752695083618, |
|
"learning_rate": 3.353093175254598e-05, |
|
"loss": 0.3827, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 3.9842681258549932, |
|
"grad_norm": 7.738473892211914, |
|
"learning_rate": 3.3435932512539904e-05, |
|
"loss": 0.4564, |
|
"step": 5825 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.8968931375896211, |
|
"eval_f1_macro": 0.6842607301838518, |
|
"eval_f1_micro": 0.8968931375896211, |
|
"eval_f1_weighted": 0.8922998232908407, |
|
"eval_loss": 0.3079198896884918, |
|
"eval_precision_macro": 0.7711077815931238, |
|
"eval_precision_micro": 0.8968931375896211, |
|
"eval_precision_weighted": 0.9026293299812516, |
|
"eval_recall_macro": 0.6595431638738259, |
|
"eval_recall_micro": 0.8968931375896211, |
|
"eval_recall_weighted": 0.8968931375896211, |
|
"eval_runtime": 29.6904, |
|
"eval_samples_per_second": 98.651, |
|
"eval_steps_per_second": 6.197, |
|
"step": 5848 |
|
}, |
|
{ |
|
"epoch": 4.001367989056088, |
|
"grad_norm": 12.561144828796387, |
|
"learning_rate": 3.334093327253382e-05, |
|
"loss": 0.285, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 4.0184678522571815, |
|
"grad_norm": 4.646189212799072, |
|
"learning_rate": 3.3245934032527745e-05, |
|
"loss": 0.4118, |
|
"step": 5875 |
|
}, |
|
{ |
|
"epoch": 4.035567715458276, |
|
"grad_norm": 3.1821000576019287, |
|
"learning_rate": 3.315093479252166e-05, |
|
"loss": 0.5941, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 4.052667578659371, |
|
"grad_norm": 6.912432670593262, |
|
"learning_rate": 3.3055935552515586e-05, |
|
"loss": 0.3519, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 4.069767441860465, |
|
"grad_norm": 2.416905641555786, |
|
"learning_rate": 3.29609363125095e-05, |
|
"loss": 0.4982, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 4.08686730506156, |
|
"grad_norm": 8.833620071411133, |
|
"learning_rate": 3.286593707250342e-05, |
|
"loss": 0.3608, |
|
"step": 5975 |
|
}, |
|
{ |
|
"epoch": 4.1039671682626535, |
|
"grad_norm": 0.06249883025884628, |
|
"learning_rate": 3.2770937832497336e-05, |
|
"loss": 0.4883, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 4.121067031463748, |
|
"grad_norm": 2.5257112979888916, |
|
"learning_rate": 3.267593859249126e-05, |
|
"loss": 0.3371, |
|
"step": 6025 |
|
}, |
|
{ |
|
"epoch": 4.138166894664843, |
|
"grad_norm": 5.706219673156738, |
|
"learning_rate": 3.258093935248518e-05, |
|
"loss": 0.3998, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 4.155266757865937, |
|
"grad_norm": 9.878058433532715, |
|
"learning_rate": 3.24859401124791e-05, |
|
"loss": 0.5082, |
|
"step": 6075 |
|
}, |
|
{ |
|
"epoch": 4.172366621067032, |
|
"grad_norm": 0.6408124566078186, |
|
"learning_rate": 3.239094087247302e-05, |
|
"loss": 0.2915, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 4.1894664842681255, |
|
"grad_norm": 4.155546188354492, |
|
"learning_rate": 3.229594163246694e-05, |
|
"loss": 0.421, |
|
"step": 6125 |
|
}, |
|
{ |
|
"epoch": 4.20656634746922, |
|
"grad_norm": 2.4020016193389893, |
|
"learning_rate": 3.2200942392460865e-05, |
|
"loss": 0.3445, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 4.223666210670315, |
|
"grad_norm": 9.44356632232666, |
|
"learning_rate": 3.210594315245478e-05, |
|
"loss": 0.478, |
|
"step": 6175 |
|
}, |
|
{ |
|
"epoch": 4.240766073871409, |
|
"grad_norm": 6.012132167816162, |
|
"learning_rate": 3.2010943912448705e-05, |
|
"loss": 0.4779, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 4.257865937072504, |
|
"grad_norm": 5.681918621063232, |
|
"learning_rate": 3.191594467244262e-05, |
|
"loss": 0.5116, |
|
"step": 6225 |
|
}, |
|
{ |
|
"epoch": 4.2749658002735975, |
|
"grad_norm": 7.404222011566162, |
|
"learning_rate": 3.1820945432436546e-05, |
|
"loss": 0.3494, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 4.292065663474692, |
|
"grad_norm": 14.846484184265137, |
|
"learning_rate": 3.172594619243046e-05, |
|
"loss": 0.5149, |
|
"step": 6275 |
|
}, |
|
{ |
|
"epoch": 4.309165526675787, |
|
"grad_norm": 1.3198221921920776, |
|
"learning_rate": 3.1630946952424386e-05, |
|
"loss": 0.4053, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 4.326265389876881, |
|
"grad_norm": 3.9458107948303223, |
|
"learning_rate": 3.15359477124183e-05, |
|
"loss": 0.4499, |
|
"step": 6325 |
|
}, |
|
{ |
|
"epoch": 4.343365253077975, |
|
"grad_norm": 4.272012233734131, |
|
"learning_rate": 3.144094847241223e-05, |
|
"loss": 0.4298, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 4.3604651162790695, |
|
"grad_norm": 8.574115753173828, |
|
"learning_rate": 3.1345949232406144e-05, |
|
"loss": 0.3282, |
|
"step": 6375 |
|
}, |
|
{ |
|
"epoch": 4.377564979480164, |
|
"grad_norm": 10.030281066894531, |
|
"learning_rate": 3.125094999240006e-05, |
|
"loss": 0.486, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 4.394664842681259, |
|
"grad_norm": 8.065329551696777, |
|
"learning_rate": 3.115595075239398e-05, |
|
"loss": 0.332, |
|
"step": 6425 |
|
}, |
|
{ |
|
"epoch": 4.411764705882353, |
|
"grad_norm": 10.601835250854492, |
|
"learning_rate": 3.10609515123879e-05, |
|
"loss": 0.4674, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 4.428864569083447, |
|
"grad_norm": 4.035892486572266, |
|
"learning_rate": 3.096595227238182e-05, |
|
"loss": 0.5078, |
|
"step": 6475 |
|
}, |
|
{ |
|
"epoch": 4.4459644322845415, |
|
"grad_norm": 6.8672685623168945, |
|
"learning_rate": 3.087095303237574e-05, |
|
"loss": 0.3255, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 4.463064295485636, |
|
"grad_norm": 4.926382541656494, |
|
"learning_rate": 3.077595379236966e-05, |
|
"loss": 0.4204, |
|
"step": 6525 |
|
}, |
|
{ |
|
"epoch": 4.480164158686731, |
|
"grad_norm": 4.504785060882568, |
|
"learning_rate": 3.068095455236358e-05, |
|
"loss": 0.4257, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 4.497264021887825, |
|
"grad_norm": 10.662145614624023, |
|
"learning_rate": 3.05859553123575e-05, |
|
"loss": 0.4991, |
|
"step": 6575 |
|
}, |
|
{ |
|
"epoch": 4.514363885088919, |
|
"grad_norm": 4.892814636230469, |
|
"learning_rate": 3.0490956072351423e-05, |
|
"loss": 0.3588, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 4.5314637482900135, |
|
"grad_norm": 2.461637258529663, |
|
"learning_rate": 3.039595683234534e-05, |
|
"loss": 0.3509, |
|
"step": 6625 |
|
}, |
|
{ |
|
"epoch": 4.548563611491108, |
|
"grad_norm": 0.9665390253067017, |
|
"learning_rate": 3.0300957592339263e-05, |
|
"loss": 0.4587, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 4.565663474692203, |
|
"grad_norm": 5.2920002937316895, |
|
"learning_rate": 3.020595835233318e-05, |
|
"loss": 0.3853, |
|
"step": 6675 |
|
}, |
|
{ |
|
"epoch": 4.582763337893297, |
|
"grad_norm": 6.542552471160889, |
|
"learning_rate": 3.0110959112327104e-05, |
|
"loss": 0.475, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 4.599863201094391, |
|
"grad_norm": 3.7444801330566406, |
|
"learning_rate": 3.0015959872321024e-05, |
|
"loss": 0.3638, |
|
"step": 6725 |
|
}, |
|
{ |
|
"epoch": 4.6169630642954855, |
|
"grad_norm": 6.542828559875488, |
|
"learning_rate": 2.992096063231494e-05, |
|
"loss": 0.4892, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 4.63406292749658, |
|
"grad_norm": 1.4131304025650024, |
|
"learning_rate": 2.9825961392308865e-05, |
|
"loss": 0.4617, |
|
"step": 6775 |
|
}, |
|
{ |
|
"epoch": 4.651162790697675, |
|
"grad_norm": 2.6974217891693115, |
|
"learning_rate": 2.973096215230278e-05, |
|
"loss": 0.3848, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 4.668262653898768, |
|
"grad_norm": 5.639090061187744, |
|
"learning_rate": 2.9635962912296705e-05, |
|
"loss": 0.3584, |
|
"step": 6825 |
|
}, |
|
{ |
|
"epoch": 4.685362517099863, |
|
"grad_norm": 11.76826000213623, |
|
"learning_rate": 2.9540963672290622e-05, |
|
"loss": 0.4498, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 4.7024623803009575, |
|
"grad_norm": 3.208690643310547, |
|
"learning_rate": 2.9445964432284546e-05, |
|
"loss": 0.3597, |
|
"step": 6875 |
|
}, |
|
{ |
|
"epoch": 4.719562243502052, |
|
"grad_norm": 5.359434604644775, |
|
"learning_rate": 2.9350965192278463e-05, |
|
"loss": 0.3621, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 4.736662106703147, |
|
"grad_norm": 6.385953426361084, |
|
"learning_rate": 2.9255965952272386e-05, |
|
"loss": 0.4097, |
|
"step": 6925 |
|
}, |
|
{ |
|
"epoch": 4.75376196990424, |
|
"grad_norm": 10.398621559143066, |
|
"learning_rate": 2.9160966712266303e-05, |
|
"loss": 0.3647, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 4.770861833105335, |
|
"grad_norm": 2.8369972705841064, |
|
"learning_rate": 2.9065967472260224e-05, |
|
"loss": 0.3396, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 4.7879616963064295, |
|
"grad_norm": 6.925849437713623, |
|
"learning_rate": 2.897096823225414e-05, |
|
"loss": 0.3689, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 4.805061559507524, |
|
"grad_norm": 3.137173652648926, |
|
"learning_rate": 2.8875968992248064e-05, |
|
"loss": 0.5454, |
|
"step": 7025 |
|
}, |
|
{ |
|
"epoch": 4.822161422708619, |
|
"grad_norm": 12.329541206359863, |
|
"learning_rate": 2.878096975224198e-05, |
|
"loss": 0.417, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 4.839261285909712, |
|
"grad_norm": 1.9504649639129639, |
|
"learning_rate": 2.8685970512235905e-05, |
|
"loss": 0.3552, |
|
"step": 7075 |
|
}, |
|
{ |
|
"epoch": 4.856361149110807, |
|
"grad_norm": 6.352153778076172, |
|
"learning_rate": 2.859097127222982e-05, |
|
"loss": 0.4427, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 4.8734610123119015, |
|
"grad_norm": 5.652337074279785, |
|
"learning_rate": 2.8495972032223745e-05, |
|
"loss": 0.4454, |
|
"step": 7125 |
|
}, |
|
{ |
|
"epoch": 4.890560875512996, |
|
"grad_norm": 4.495980262756348, |
|
"learning_rate": 2.8400972792217662e-05, |
|
"loss": 0.4396, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 4.907660738714091, |
|
"grad_norm": 9.805567741394043, |
|
"learning_rate": 2.8305973552211586e-05, |
|
"loss": 0.4436, |
|
"step": 7175 |
|
}, |
|
{ |
|
"epoch": 4.924760601915184, |
|
"grad_norm": 2.151742696762085, |
|
"learning_rate": 2.8210974312205503e-05, |
|
"loss": 0.4238, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 4.941860465116279, |
|
"grad_norm": 4.97649621963501, |
|
"learning_rate": 2.8115975072199423e-05, |
|
"loss": 0.4693, |
|
"step": 7225 |
|
}, |
|
{ |
|
"epoch": 4.9589603283173735, |
|
"grad_norm": 2.5066914558410645, |
|
"learning_rate": 2.802097583219334e-05, |
|
"loss": 0.4568, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 4.976060191518468, |
|
"grad_norm": 13.676875114440918, |
|
"learning_rate": 2.7925976592187263e-05, |
|
"loss": 0.3527, |
|
"step": 7275 |
|
}, |
|
{ |
|
"epoch": 4.993160054719562, |
|
"grad_norm": 3.240598678588867, |
|
"learning_rate": 2.783097735218118e-05, |
|
"loss": 0.4515, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9166951177876408, |
|
"eval_f1_macro": 0.7653415866627981, |
|
"eval_f1_micro": 0.9166951177876408, |
|
"eval_f1_weighted": 0.9141778991500009, |
|
"eval_loss": 0.24330385029315948, |
|
"eval_precision_macro": 0.794330339771912, |
|
"eval_precision_micro": 0.9166951177876408, |
|
"eval_precision_weighted": 0.9162095802352392, |
|
"eval_recall_macro": 0.7496536933673316, |
|
"eval_recall_micro": 0.9166951177876408, |
|
"eval_recall_weighted": 0.9166951177876408, |
|
"eval_runtime": 29.505, |
|
"eval_samples_per_second": 99.271, |
|
"eval_steps_per_second": 6.236, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 5.010259917920656, |
|
"grad_norm": 3.2240583896636963, |
|
"learning_rate": 2.7735978112175104e-05, |
|
"loss": 0.406, |
|
"step": 7325 |
|
}, |
|
{ |
|
"epoch": 5.027359781121751, |
|
"grad_norm": 1.1182893514633179, |
|
"learning_rate": 2.7640978872169028e-05, |
|
"loss": 0.4151, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 5.0444596443228455, |
|
"grad_norm": 3.5205605030059814, |
|
"learning_rate": 2.7545979632162945e-05, |
|
"loss": 0.4349, |
|
"step": 7375 |
|
}, |
|
{ |
|
"epoch": 5.06155950752394, |
|
"grad_norm": 5.898115634918213, |
|
"learning_rate": 2.7450980392156865e-05, |
|
"loss": 0.4127, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 5.078659370725034, |
|
"grad_norm": 2.958627700805664, |
|
"learning_rate": 2.7355981152150785e-05, |
|
"loss": 0.4479, |
|
"step": 7425 |
|
}, |
|
{ |
|
"epoch": 5.095759233926128, |
|
"grad_norm": 4.672809600830078, |
|
"learning_rate": 2.7260981912144705e-05, |
|
"loss": 0.4976, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 5.112859097127223, |
|
"grad_norm": 8.304091453552246, |
|
"learning_rate": 2.7165982672138622e-05, |
|
"loss": 0.4065, |
|
"step": 7475 |
|
}, |
|
{ |
|
"epoch": 5.1299589603283176, |
|
"grad_norm": 2.276463270187378, |
|
"learning_rate": 2.7070983432132546e-05, |
|
"loss": 0.3508, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 5.147058823529412, |
|
"grad_norm": 0.10079041868448257, |
|
"learning_rate": 2.6975984192126463e-05, |
|
"loss": 0.4274, |
|
"step": 7525 |
|
}, |
|
{ |
|
"epoch": 5.164158686730506, |
|
"grad_norm": 5.3365936279296875, |
|
"learning_rate": 2.6880984952120387e-05, |
|
"loss": 0.2555, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 5.1812585499316, |
|
"grad_norm": 0.33459940552711487, |
|
"learning_rate": 2.6785985712114303e-05, |
|
"loss": 0.4309, |
|
"step": 7575 |
|
}, |
|
{ |
|
"epoch": 5.198358413132695, |
|
"grad_norm": 6.256367206573486, |
|
"learning_rate": 2.6690986472108227e-05, |
|
"loss": 0.3263, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 5.2154582763337896, |
|
"grad_norm": 2.592665672302246, |
|
"learning_rate": 2.6595987232102144e-05, |
|
"loss": 0.4089, |
|
"step": 7625 |
|
}, |
|
{ |
|
"epoch": 5.232558139534884, |
|
"grad_norm": 5.69423246383667, |
|
"learning_rate": 2.6500987992096064e-05, |
|
"loss": 0.4288, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 5.249658002735978, |
|
"grad_norm": 7.463606834411621, |
|
"learning_rate": 2.6405988752089985e-05, |
|
"loss": 0.35, |
|
"step": 7675 |
|
}, |
|
{ |
|
"epoch": 5.266757865937072, |
|
"grad_norm": 4.466919898986816, |
|
"learning_rate": 2.6310989512083905e-05, |
|
"loss": 0.4364, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 5.283857729138167, |
|
"grad_norm": 5.60076904296875, |
|
"learning_rate": 2.621599027207782e-05, |
|
"loss": 0.2817, |
|
"step": 7725 |
|
}, |
|
{ |
|
"epoch": 5.300957592339262, |
|
"grad_norm": 1.8708001375198364, |
|
"learning_rate": 2.6120991032071745e-05, |
|
"loss": 0.3567, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 5.318057455540355, |
|
"grad_norm": 4.970040798187256, |
|
"learning_rate": 2.6025991792065662e-05, |
|
"loss": 0.2724, |
|
"step": 7775 |
|
}, |
|
{ |
|
"epoch": 5.33515731874145, |
|
"grad_norm": 1.4727894067764282, |
|
"learning_rate": 2.5930992552059586e-05, |
|
"loss": 0.3978, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 5.352257181942544, |
|
"grad_norm": 4.697504043579102, |
|
"learning_rate": 2.5835993312053503e-05, |
|
"loss": 0.447, |
|
"step": 7825 |
|
}, |
|
{ |
|
"epoch": 5.369357045143639, |
|
"grad_norm": 2.3965351581573486, |
|
"learning_rate": 2.5740994072047426e-05, |
|
"loss": 0.3596, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 5.386456908344734, |
|
"grad_norm": 8.727534294128418, |
|
"learning_rate": 2.5645994832041343e-05, |
|
"loss": 0.3787, |
|
"step": 7875 |
|
}, |
|
{ |
|
"epoch": 5.403556771545827, |
|
"grad_norm": 5.903003692626953, |
|
"learning_rate": 2.5550995592035264e-05, |
|
"loss": 0.3899, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 5.420656634746922, |
|
"grad_norm": 4.570006847381592, |
|
"learning_rate": 2.5455996352029187e-05, |
|
"loss": 0.4558, |
|
"step": 7925 |
|
}, |
|
{ |
|
"epoch": 5.437756497948016, |
|
"grad_norm": 0.9773057699203491, |
|
"learning_rate": 2.5360997112023104e-05, |
|
"loss": 0.3345, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 5.454856361149111, |
|
"grad_norm": 3.641374349594116, |
|
"learning_rate": 2.5265997872017028e-05, |
|
"loss": 0.4155, |
|
"step": 7975 |
|
}, |
|
{ |
|
"epoch": 5.471956224350206, |
|
"grad_norm": 5.382412433624268, |
|
"learning_rate": 2.5170998632010945e-05, |
|
"loss": 0.4111, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 5.489056087551299, |
|
"grad_norm": 2.235764980316162, |
|
"learning_rate": 2.507599939200487e-05, |
|
"loss": 0.3047, |
|
"step": 8025 |
|
}, |
|
{ |
|
"epoch": 5.506155950752394, |
|
"grad_norm": 3.8063552379608154, |
|
"learning_rate": 2.4981000151998785e-05, |
|
"loss": 0.4702, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 5.523255813953488, |
|
"grad_norm": 3.573058843612671, |
|
"learning_rate": 2.4886000911992706e-05, |
|
"loss": 0.4619, |
|
"step": 8075 |
|
}, |
|
{ |
|
"epoch": 5.540355677154583, |
|
"grad_norm": 7.515729904174805, |
|
"learning_rate": 2.4791001671986626e-05, |
|
"loss": 0.3179, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 5.557455540355678, |
|
"grad_norm": 1.7556642293930054, |
|
"learning_rate": 2.4696002431980546e-05, |
|
"loss": 0.3382, |
|
"step": 8125 |
|
}, |
|
{ |
|
"epoch": 5.574555403556771, |
|
"grad_norm": 2.0541181564331055, |
|
"learning_rate": 2.4601003191974463e-05, |
|
"loss": 0.4592, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 5.591655266757866, |
|
"grad_norm": 2.7318685054779053, |
|
"learning_rate": 2.4506003951968383e-05, |
|
"loss": 0.4169, |
|
"step": 8175 |
|
}, |
|
{ |
|
"epoch": 5.60875512995896, |
|
"grad_norm": 8.095924377441406, |
|
"learning_rate": 2.4411004711962304e-05, |
|
"loss": 0.4333, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 5.625854993160055, |
|
"grad_norm": 2.2528128623962402, |
|
"learning_rate": 2.4316005471956224e-05, |
|
"loss": 0.402, |
|
"step": 8225 |
|
}, |
|
{ |
|
"epoch": 5.642954856361149, |
|
"grad_norm": 10.526260375976562, |
|
"learning_rate": 2.4221006231950147e-05, |
|
"loss": 0.3477, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 5.660054719562243, |
|
"grad_norm": 5.1542487144470215, |
|
"learning_rate": 2.4126006991944068e-05, |
|
"loss": 0.3982, |
|
"step": 8275 |
|
}, |
|
{ |
|
"epoch": 5.677154582763338, |
|
"grad_norm": 5.238847255706787, |
|
"learning_rate": 2.4031007751937988e-05, |
|
"loss": 0.416, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 5.694254445964432, |
|
"grad_norm": 8.261098861694336, |
|
"learning_rate": 2.3936008511931908e-05, |
|
"loss": 0.2776, |
|
"step": 8325 |
|
}, |
|
{ |
|
"epoch": 5.711354309165527, |
|
"grad_norm": 6.928776741027832, |
|
"learning_rate": 2.3841009271925825e-05, |
|
"loss": 0.3845, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 5.728454172366621, |
|
"grad_norm": 9.97043514251709, |
|
"learning_rate": 2.3746010031919745e-05, |
|
"loss": 0.4559, |
|
"step": 8375 |
|
}, |
|
{ |
|
"epoch": 5.745554035567715, |
|
"grad_norm": 1.063759446144104, |
|
"learning_rate": 2.3651010791913666e-05, |
|
"loss": 0.2716, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 5.76265389876881, |
|
"grad_norm": 2.098356246948242, |
|
"learning_rate": 2.3556011551907586e-05, |
|
"loss": 0.3414, |
|
"step": 8425 |
|
}, |
|
{ |
|
"epoch": 5.779753761969904, |
|
"grad_norm": 5.218485355377197, |
|
"learning_rate": 2.3461012311901506e-05, |
|
"loss": 0.3264, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 5.796853625170999, |
|
"grad_norm": 5.245093822479248, |
|
"learning_rate": 2.3366013071895427e-05, |
|
"loss": 0.3575, |
|
"step": 8475 |
|
}, |
|
{ |
|
"epoch": 5.813953488372093, |
|
"grad_norm": 2.249113082885742, |
|
"learning_rate": 2.3271013831889347e-05, |
|
"loss": 0.3042, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 5.831053351573187, |
|
"grad_norm": 8.331680297851562, |
|
"learning_rate": 2.3176014591883267e-05, |
|
"loss": 0.349, |
|
"step": 8525 |
|
}, |
|
{ |
|
"epoch": 5.848153214774282, |
|
"grad_norm": 5.0042805671691895, |
|
"learning_rate": 2.3081015351877187e-05, |
|
"loss": 0.4322, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 5.865253077975376, |
|
"grad_norm": 15.829472541809082, |
|
"learning_rate": 2.2986016111871104e-05, |
|
"loss": 0.5301, |
|
"step": 8575 |
|
}, |
|
{ |
|
"epoch": 5.882352941176471, |
|
"grad_norm": 5.099222660064697, |
|
"learning_rate": 2.2891016871865025e-05, |
|
"loss": 0.4318, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 5.899452804377565, |
|
"grad_norm": 3.355231285095215, |
|
"learning_rate": 2.2796017631858945e-05, |
|
"loss": 0.3114, |
|
"step": 8625 |
|
}, |
|
{ |
|
"epoch": 5.916552667578659, |
|
"grad_norm": 5.962894916534424, |
|
"learning_rate": 2.2701018391852865e-05, |
|
"loss": 0.389, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 5.933652530779754, |
|
"grad_norm": 0.5913941860198975, |
|
"learning_rate": 2.2606019151846785e-05, |
|
"loss": 0.3845, |
|
"step": 8675 |
|
}, |
|
{ |
|
"epoch": 5.950752393980848, |
|
"grad_norm": 2.7873880863189697, |
|
"learning_rate": 2.2511019911840706e-05, |
|
"loss": 0.3045, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 5.967852257181942, |
|
"grad_norm": 6.107980728149414, |
|
"learning_rate": 2.2416020671834626e-05, |
|
"loss": 0.4016, |
|
"step": 8725 |
|
}, |
|
{ |
|
"epoch": 5.984952120383037, |
|
"grad_norm": 2.5966835021972656, |
|
"learning_rate": 2.2321021431828546e-05, |
|
"loss": 0.3413, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.9095254353021509, |
|
"eval_f1_macro": 0.787176394659271, |
|
"eval_f1_micro": 0.9095254353021509, |
|
"eval_f1_weighted": 0.9072901390204282, |
|
"eval_loss": 0.29091742634773254, |
|
"eval_precision_macro": 0.8953297645243259, |
|
"eval_precision_micro": 0.9095254353021509, |
|
"eval_precision_weighted": 0.9174594106183492, |
|
"eval_recall_macro": 0.7531104129954299, |
|
"eval_recall_micro": 0.9095254353021509, |
|
"eval_recall_weighted": 0.9095254353021509, |
|
"eval_runtime": 29.8878, |
|
"eval_samples_per_second": 98.0, |
|
"eval_steps_per_second": 6.156, |
|
"step": 8772 |
|
}, |
|
{ |
|
"epoch": 6.002051983584131, |
|
"grad_norm": 6.595887660980225, |
|
"learning_rate": 2.2226022191822466e-05, |
|
"loss": 0.3606, |
|
"step": 8775 |
|
}, |
|
{ |
|
"epoch": 6.019151846785226, |
|
"grad_norm": 6.390087127685547, |
|
"learning_rate": 2.2131022951816387e-05, |
|
"loss": 0.4026, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 6.03625170998632, |
|
"grad_norm": 6.15175199508667, |
|
"learning_rate": 2.2036023711810304e-05, |
|
"loss": 0.3281, |
|
"step": 8825 |
|
}, |
|
{ |
|
"epoch": 6.053351573187414, |
|
"grad_norm": 18.778564453125, |
|
"learning_rate": 2.1941024471804227e-05, |
|
"loss": 0.4304, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 6.070451436388509, |
|
"grad_norm": 9.964593887329102, |
|
"learning_rate": 2.1846025231798148e-05, |
|
"loss": 0.2677, |
|
"step": 8875 |
|
}, |
|
{ |
|
"epoch": 6.087551299589603, |
|
"grad_norm": 10.952362060546875, |
|
"learning_rate": 2.1751025991792068e-05, |
|
"loss": 0.5143, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 6.104651162790698, |
|
"grad_norm": 2.978898048400879, |
|
"learning_rate": 2.1656026751785988e-05, |
|
"loss": 0.3024, |
|
"step": 8925 |
|
}, |
|
{ |
|
"epoch": 6.121751025991792, |
|
"grad_norm": 3.3607687950134277, |
|
"learning_rate": 2.156102751177991e-05, |
|
"loss": 0.2684, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 6.138850889192886, |
|
"grad_norm": 1.9624247550964355, |
|
"learning_rate": 2.146602827177383e-05, |
|
"loss": 0.3062, |
|
"step": 8975 |
|
}, |
|
{ |
|
"epoch": 6.155950752393981, |
|
"grad_norm": 1.7253330945968628, |
|
"learning_rate": 2.137102903176775e-05, |
|
"loss": 0.3242, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 6.173050615595075, |
|
"grad_norm": 2.6368486881256104, |
|
"learning_rate": 2.1276029791761666e-05, |
|
"loss": 0.3678, |
|
"step": 9025 |
|
}, |
|
{ |
|
"epoch": 6.19015047879617, |
|
"grad_norm": 7.951999664306641, |
|
"learning_rate": 2.1181030551755586e-05, |
|
"loss": 0.3242, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 6.207250341997264, |
|
"grad_norm": 13.932353019714355, |
|
"learning_rate": 2.1086031311749506e-05, |
|
"loss": 0.244, |
|
"step": 9075 |
|
}, |
|
{ |
|
"epoch": 6.224350205198358, |
|
"grad_norm": 0.4225591719150543, |
|
"learning_rate": 2.0991032071743427e-05, |
|
"loss": 0.503, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 6.241450068399453, |
|
"grad_norm": 5.91692590713501, |
|
"learning_rate": 2.0896032831737347e-05, |
|
"loss": 0.3721, |
|
"step": 9125 |
|
}, |
|
{ |
|
"epoch": 6.258549931600547, |
|
"grad_norm": 7.2567009925842285, |
|
"learning_rate": 2.0801033591731267e-05, |
|
"loss": 0.3032, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 6.275649794801642, |
|
"grad_norm": 12.250884056091309, |
|
"learning_rate": 2.0706034351725188e-05, |
|
"loss": 0.4468, |
|
"step": 9175 |
|
}, |
|
{ |
|
"epoch": 6.292749658002736, |
|
"grad_norm": 5.396862030029297, |
|
"learning_rate": 2.0611035111719108e-05, |
|
"loss": 0.357, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 6.30984952120383, |
|
"grad_norm": 0.8270015120506287, |
|
"learning_rate": 2.0516035871713028e-05, |
|
"loss": 0.2127, |
|
"step": 9225 |
|
}, |
|
{ |
|
"epoch": 6.326949384404925, |
|
"grad_norm": 6.996820449829102, |
|
"learning_rate": 2.042103663170695e-05, |
|
"loss": 0.4843, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 6.344049247606019, |
|
"grad_norm": 3.144995927810669, |
|
"learning_rate": 2.0326037391700865e-05, |
|
"loss": 0.4409, |
|
"step": 9275 |
|
}, |
|
{ |
|
"epoch": 6.361149110807114, |
|
"grad_norm": 10.23337459564209, |
|
"learning_rate": 2.0231038151694785e-05, |
|
"loss": 0.406, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 6.378248974008208, |
|
"grad_norm": 3.1300835609436035, |
|
"learning_rate": 2.0136038911688706e-05, |
|
"loss": 0.2881, |
|
"step": 9325 |
|
}, |
|
{ |
|
"epoch": 6.395348837209302, |
|
"grad_norm": 3.322756290435791, |
|
"learning_rate": 2.0041039671682626e-05, |
|
"loss": 0.2679, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 6.412448700410397, |
|
"grad_norm": 7.402932643890381, |
|
"learning_rate": 1.9946040431676546e-05, |
|
"loss": 0.3229, |
|
"step": 9375 |
|
}, |
|
{ |
|
"epoch": 6.429548563611491, |
|
"grad_norm": 2.389960289001465, |
|
"learning_rate": 1.9851041191670467e-05, |
|
"loss": 0.3176, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 6.446648426812586, |
|
"grad_norm": 4.060113906860352, |
|
"learning_rate": 1.9756041951664387e-05, |
|
"loss": 0.4683, |
|
"step": 9425 |
|
}, |
|
{ |
|
"epoch": 6.46374829001368, |
|
"grad_norm": 3.9129064083099365, |
|
"learning_rate": 1.966104271165831e-05, |
|
"loss": 0.3247, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 6.480848153214774, |
|
"grad_norm": 7.171699523925781, |
|
"learning_rate": 1.9566043471652227e-05, |
|
"loss": 0.2698, |
|
"step": 9475 |
|
}, |
|
{ |
|
"epoch": 6.497948016415869, |
|
"grad_norm": 5.123276233673096, |
|
"learning_rate": 1.9471044231646148e-05, |
|
"loss": 0.3883, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 6.515047879616963, |
|
"grad_norm": 1.7513083219528198, |
|
"learning_rate": 1.9376044991640068e-05, |
|
"loss": 0.3488, |
|
"step": 9525 |
|
}, |
|
{ |
|
"epoch": 6.532147742818058, |
|
"grad_norm": 3.334690809249878, |
|
"learning_rate": 1.9281045751633988e-05, |
|
"loss": 0.4843, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 6.549247606019152, |
|
"grad_norm": 2.4918205738067627, |
|
"learning_rate": 1.918604651162791e-05, |
|
"loss": 0.4017, |
|
"step": 9575 |
|
}, |
|
{ |
|
"epoch": 6.566347469220246, |
|
"grad_norm": 3.277615785598755, |
|
"learning_rate": 1.909104727162183e-05, |
|
"loss": 0.3807, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 6.583447332421341, |
|
"grad_norm": 0.09262839704751968, |
|
"learning_rate": 1.899604803161575e-05, |
|
"loss": 0.3623, |
|
"step": 9625 |
|
}, |
|
{ |
|
"epoch": 6.600547195622435, |
|
"grad_norm": 9.153847694396973, |
|
"learning_rate": 1.890104879160967e-05, |
|
"loss": 0.3903, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 6.617647058823529, |
|
"grad_norm": 4.24110221862793, |
|
"learning_rate": 1.880604955160359e-05, |
|
"loss": 0.3078, |
|
"step": 9675 |
|
}, |
|
{ |
|
"epoch": 6.634746922024624, |
|
"grad_norm": 5.625593185424805, |
|
"learning_rate": 1.871105031159751e-05, |
|
"loss": 0.3915, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 6.651846785225718, |
|
"grad_norm": 6.439937591552734, |
|
"learning_rate": 1.8616051071591427e-05, |
|
"loss": 0.4783, |
|
"step": 9725 |
|
}, |
|
{ |
|
"epoch": 6.668946648426813, |
|
"grad_norm": 6.7055559158325195, |
|
"learning_rate": 1.8521051831585347e-05, |
|
"loss": 0.3364, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 6.686046511627907, |
|
"grad_norm": 2.9428536891937256, |
|
"learning_rate": 1.8426052591579267e-05, |
|
"loss": 0.3409, |
|
"step": 9775 |
|
}, |
|
{ |
|
"epoch": 6.703146374829001, |
|
"grad_norm": 3.669163942337036, |
|
"learning_rate": 1.8331053351573188e-05, |
|
"loss": 0.2637, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 6.720246238030096, |
|
"grad_norm": 2.797004461288452, |
|
"learning_rate": 1.8236054111567108e-05, |
|
"loss": 0.3721, |
|
"step": 9825 |
|
}, |
|
{ |
|
"epoch": 6.73734610123119, |
|
"grad_norm": 1.3925397396087646, |
|
"learning_rate": 1.8141054871561028e-05, |
|
"loss": 0.3227, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 6.754445964432285, |
|
"grad_norm": 9.197000503540039, |
|
"learning_rate": 1.804605563155495e-05, |
|
"loss": 0.3789, |
|
"step": 9875 |
|
}, |
|
{ |
|
"epoch": 6.771545827633379, |
|
"grad_norm": 7.823335647583008, |
|
"learning_rate": 1.795105639154887e-05, |
|
"loss": 0.2718, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 6.788645690834473, |
|
"grad_norm": 1.3368386030197144, |
|
"learning_rate": 1.785605715154279e-05, |
|
"loss": 0.3345, |
|
"step": 9925 |
|
}, |
|
{ |
|
"epoch": 6.805745554035568, |
|
"grad_norm": 4.986422061920166, |
|
"learning_rate": 1.776105791153671e-05, |
|
"loss": 0.3514, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 6.822845417236662, |
|
"grad_norm": 3.647663116455078, |
|
"learning_rate": 1.7666058671530626e-05, |
|
"loss": 0.3476, |
|
"step": 9975 |
|
}, |
|
{ |
|
"epoch": 6.839945280437757, |
|
"grad_norm": 10.99355697631836, |
|
"learning_rate": 1.7571059431524546e-05, |
|
"loss": 0.4088, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 6.857045143638851, |
|
"grad_norm": 6.52988862991333, |
|
"learning_rate": 1.7476060191518467e-05, |
|
"loss": 0.3857, |
|
"step": 10025 |
|
}, |
|
{ |
|
"epoch": 6.874145006839945, |
|
"grad_norm": 4.713931560516357, |
|
"learning_rate": 1.738106095151239e-05, |
|
"loss": 0.5173, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 6.89124487004104, |
|
"grad_norm": 1.7083477973937988, |
|
"learning_rate": 1.728606171150631e-05, |
|
"loss": 0.2722, |
|
"step": 10075 |
|
}, |
|
{ |
|
"epoch": 6.908344733242134, |
|
"grad_norm": 2.890033483505249, |
|
"learning_rate": 1.719106247150023e-05, |
|
"loss": 0.346, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 6.925444596443229, |
|
"grad_norm": 2.318939208984375, |
|
"learning_rate": 1.709606323149415e-05, |
|
"loss": 0.3563, |
|
"step": 10125 |
|
}, |
|
{ |
|
"epoch": 6.942544459644322, |
|
"grad_norm": 1.522857666015625, |
|
"learning_rate": 1.700106399148807e-05, |
|
"loss": 0.4441, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 6.959644322845417, |
|
"grad_norm": 4.418458938598633, |
|
"learning_rate": 1.690986472108223e-05, |
|
"loss": 0.253, |
|
"step": 10175 |
|
}, |
|
{ |
|
"epoch": 6.976744186046512, |
|
"grad_norm": 7.665693759918213, |
|
"learning_rate": 1.6814865481076152e-05, |
|
"loss": 0.329, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 6.993844049247606, |
|
"grad_norm": 2.388054609298706, |
|
"learning_rate": 1.6719866241070072e-05, |
|
"loss": 0.3777, |
|
"step": 10225 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.9214749061113008, |
|
"eval_f1_macro": 0.833050479179196, |
|
"eval_f1_micro": 0.9214749061113008, |
|
"eval_f1_weighted": 0.9214057890123231, |
|
"eval_loss": 0.23535355925559998, |
|
"eval_precision_macro": 0.8898753259090769, |
|
"eval_precision_micro": 0.9214749061113008, |
|
"eval_precision_weighted": 0.9262844759199463, |
|
"eval_recall_macro": 0.7987766214501736, |
|
"eval_recall_micro": 0.9214749061113008, |
|
"eval_recall_weighted": 0.9214749061113008, |
|
"eval_runtime": 30.0564, |
|
"eval_samples_per_second": 97.45, |
|
"eval_steps_per_second": 6.122, |
|
"step": 10234 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 14620, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"total_flos": 2.242292315610641e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|