model name
stringlengths
5
56
dataset
stringclasses
3 values
method
stringclasses
3 values
file name
stringclasses
1 value
submitter
stringclasses
1 value
MICRO precision
float64
0.03
0.96
MICRO recall
float64
0
0.94
MICRO f1
float64
0
0.94
MACRO precision
float64
0.13
0.94
MACRO recall
float64
0
0.93
MACRO f1
float64
0
0.93
detail result
stringlengths
1.16k
1.48k
01-ai@Yi-1.5-6B-Chat
Setting1
icl
answer.txt
zhaorui
0.673741
0.336458
0.448793
0.44781
0.248924
0.319981
{ "ID": { "precision": 0.7566633761105627, "recall": 0.34253156071947266, "f1": 0.4715834807352149, "support": 8951 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "DATE": { "precision": 0.7972571428571429, "recall": 0.4552930426837227, "f1": 0.5795945496842805, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.41732283464566927, "recall": 0.37857142857142856, "f1": 0.3970037453183521, "support": 140 }, "LOCATION": { "precision": 0.6281911411739035, "recall": 0.2920470683708663, "f1": 0.3987258500103871, "support": 9858 }, "NAME": { "precision": 0.5352372178719484, "recall": 0.2740242895884919, "f1": 0.3624736801060594, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.673740658337612, "recall": 0.3364576507791801, "f1": 0.4487934638039141, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.4478102446656038, "recall": 0.2489239128477117, "f1": 0.319980518002156, "support": 35101 } }
01-ai@Yi-1.5-6B-Chat
Setting2
icl
answer.txt
zhaorui
0.396261
0.1807
0.248212
0.317656
0.175866
0.226393
{ "ID": { "precision": 0.5772517159266004, "recall": 0.6081759149940968, "f1": 0.5923104563420769, "support": 6776 }, "CONTACT": { "precision": 0.23333333333333334, "recall": 0.006329113924050633, "f1": 0.01232394366197183, "support": 1106 }, "DATE": { "precision": 0.45361826129188926, "recall": 0.18899231080534196, "f1": 0.26681902585344947, "support": 29652 }, "PROFESSION": { "precision": 0.02564102564102564, "recall": 0.0003455425017277125, "f1": 0.0006818956699624957, "support": 2894 }, "AGE": { "precision": 0.42473118279569894, "recall": 0.21703296703296704, "f1": 0.2872727272727273, "support": 8008 }, "LOCATION": { "precision": 0.25829764453961457, "recall": 0.11024791500057124, "f1": 0.15453599167267196, "support": 17506 }, "NAME": { "precision": 0.25071613695266676, "recall": 0.09994018813550105, "f1": 0.14291268175103025, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.3962607587695348, "recall": 0.1807003189735928, "f1": 0.2482123951461846, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.31765561435440415, "recall": 0.17586627891346523, "f1": 0.22639283741829033, "support": 84333 } }
01-ai@Yi-1.5-6B-Chat
Setting3
icl
answer.txt
zhaorui
0.833257
0.600661
0.698094
0.801466
0.576268
0.670462
{ "ID": { "precision": 0.8891784391715813, "recall": 0.8323276415457697, "f1": 0.859814323607427, "support": 7789 }, "CONTACT": { "precision": 0.8794117647058823, "recall": 0.5652173913043478, "f1": 0.6881472957422324, "support": 529 }, "DATE": { "precision": 0.8270029889917621, "recall": 0.5915419512958231, "f1": 0.6897306499665592, "support": 19177 }, "PROFESSION": { "precision": 0.566747572815534, "recall": 0.3286418015482055, "f1": 0.4160356347438752, "support": 1421 }, "AGE": { "precision": 0.7702618453865336, "recall": 0.6048959608323133, "f1": 0.6776360894007953, "support": 4085 }, "LOCATION": { "precision": 0.8608566193311956, "recall": 0.5441187115454217, "f1": 0.6667849381292412, "support": 13815 }, "NAME": { "precision": 0.8167994966442953, "recall": 0.5671326634629387, "f1": 0.669445638160722, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.8332569648093842, "recall": 0.6006606110652354, "f1": 0.698094013320793, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.8014655324352548, "recall": 0.5762680173621171, "f1": 0.670461938636052, "support": 60550 } }
CRF++
Setting1
crf
answer.txt
zhaorui
0.150317
0.028375
0.047739
0.246104
0.096254
0.138384
{ "ID": { "precision": 0.0006702412868632708, "recall": 0.00011171936096525528, "f1": 0.00019151584793641676, "support": 8951 }, "LOCATION": { "precision": 0.5439814814814815, "recall": 0.023838506796510447, "f1": 0.04567541302235179, "support": 9858 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "AGE": { "precision": 0.7096774193548387, "recall": 0.4714285714285714, "f1": 0.5665236051502146, "support": 140 }, "NAME": { "precision": 0.15868121442125238, "recall": 0.07888220728687655, "f1": 0.10537922343860753, "support": 8481 }, "DATE": { "precision": 0.06361323155216285, "recall": 0.0032632815559326457, "f1": 0.006208095356344674, "support": 7661 }, "MICRO_AVERAGE": { "precision": 0.15031693329308785, "recall": 0.028375259964103585, "f1": 0.0477388741102883, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.24610393134943312, "recall": 0.09625404773814272, "f1": 0.13838438712476084, "support": 35101 } }
CRF++
Setting2
crf
answer.txt
zhaorui
0.151517
0.046411
0.071057
0.317146
0.07985
0.127578
{ "ID": { "precision": 0.14167330677290838, "recall": 0.5247933884297521, "f1": 0.2231145689546995, "support": 6776 }, "LOCATION": { "precision": 0.581081081081081, "recall": 0.0024563006969039185, "f1": 0.0048919226393629115, "support": 17506 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 1106 }, "AGE": { "precision": 0.9344978165938864, "recall": 0.026723276723276724, "f1": 0.0519606652907612, "support": 8008 }, "NAME": { "precision": 0.4691358024691358, "recall": 0.004132456092654015, "f1": 0.008192745108607772, "support": 18391 }, "DATE": { "precision": 0.09363295880149813, "recall": 0.0008431134493457439, "f1": 0.0016711788495604799, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "MICRO_AVERAGE": { "precision": 0.15151749767729947, "recall": 0.046411250637354295, "f1": 0.07105705078745518, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.31714585224550135, "recall": 0.07984979077027607, "f1": 0.127578377198753, "support": 84333 } }
CRF++
Setting3
crf
answer.txt
zhaorui
0.662311
0.230206
0.341659
0.601092
0.196578
0.296267
{ "ID": { "precision": 0.860125260960334, "recall": 0.21158043394530748, "f1": 0.3396187532199897, "support": 7789 }, "LOCATION": { "precision": 0.5381040892193308, "recall": 0.16764386536373507, "f1": 0.2556432474198355, "support": 13815 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 529 }, "AGE": { "precision": 0.8114254021075984, "recall": 0.3581395348837209, "f1": 0.49694293478260865, "support": 4085 }, "NAME": { "precision": 0.5187872505830526, "recall": 0.291539245667686, "f1": 0.3732985269438747, "support": 13734 }, "DATE": { "precision": 0.8627138877835256, "recall": 0.22610418730771237, "f1": 0.35830268974920465, "support": 19177 }, "PROFESSION": { "precision": 0.6164874551971327, "recall": 0.12104152005629838, "f1": 0.20235294117647057, "support": 1421 }, "MICRO_AVERAGE": { "precision": 0.6623111280053217, "recall": 0.23020644095788603, "f1": 0.3416589048482769, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.6010919065501391, "recall": 0.1965783981749229, "f1": 0.29626697507879357, "support": 60550 } }
EleutherAI@pythia-1b
Setting1
icl
answer.txt
zhaorui
0.538551
0.250135
0.341608
0.36385
0.153981
0.216387
{ "ID": { "precision": 0.45235707121364094, "recall": 0.20154172718132052, "f1": 0.27884689697812814, "support": 8951 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "DATE": { "precision": 0.7463672391017173, "recall": 0.368750815820389, "f1": 0.49362222610518963, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.25, "recall": 0.05, "f1": 0.08333333333333334, "support": 140 }, "LOCATION": { "precision": 0.7067866516685414, "recall": 0.19121525664434977, "f1": 0.30099800399201593, "support": 9858 }, "NAME": { "precision": 0.3914399584127534, "recall": 0.26636009904492397, "f1": 0.3170081392085321, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.5385511868981169, "recall": 0.2501353237799493, "f1": 0.3416076569916738, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.36385013148523615, "recall": 0.15398112838442618, "f1": 0.21638729891671737, "support": 35101 } }
EleutherAI@pythia-1b
Setting2
icl
answer.txt
zhaorui
0.229472
0.151139
0.182245
0.246173
0.147191
0.184229
{ "ID": { "precision": 0.4749574105621806, "recall": 0.6171782762691853, "f1": 0.5368076503433669, "support": 6776 }, "CONTACT": { "precision": 0.11428571428571428, "recall": 0.003616636528028933, "f1": 0.007011393514460999, "support": 1106 }, "DATE": { "precision": 0.45404242237934217, "recall": 0.14943342776203966, "f1": 0.22486107939407782, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "AGE": { "precision": 0.4815595363540569, "recall": 0.057067932067932065, "f1": 0.10204309478620074, "support": 8008 }, "LOCATION": { "precision": 0.08061344868265828, "recall": 0.07026162458585627, "f1": 0.07508240752044927, "support": 17506 }, "NAME": { "precision": 0.11774916823376248, "recall": 0.13278233918764612, "f1": 0.12481472016355737, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.22947159960392474, "recall": 0.1511389373080526, "f1": 0.18224452737385438, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.24617252864253067, "recall": 0.14719146234295546, "f1": 0.18422883288721825, "support": 84333 } }
EleutherAI@pythia-1b
Setting3
icl
answer.txt
zhaorui
0.861082
0.552898
0.673405
0.814266
0.491727
0.613168
{ "ID": { "precision": 0.87248322147651, "recall": 0.7844395942996534, "f1": 0.8261222282314765, "support": 7789 }, "CONTACT": { "precision": 0.8852459016393442, "recall": 0.5103969754253308, "f1": 0.6474820143884891, "support": 529 }, "DATE": { "precision": 0.8811303704920727, "recall": 0.5593158471085155, "f1": 0.684274322169059, "support": 19177 }, "PROFESSION": { "precision": 0.5316091954022989, "recall": 0.1301900070372977, "f1": 0.2091577162238553, "support": 1421 }, "AGE": { "precision": 0.8147044212617983, "recall": 0.401468788249694, "f1": 0.5378812725483766, "support": 4085 }, "LOCATION": { "precision": 0.8922374429223744, "recall": 0.49504162142598623, "f1": 0.6367783985102421, "support": 13815 }, "NAME": { "precision": 0.8224498506188647, "recall": 0.5612348915101208, "f1": 0.6671860122911798, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.8610818179479925, "recall": 0.55289843104872, "f1": 0.673405143368635, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.8142657719733233, "recall": 0.49172681786522837, "f1": 0.613167823560949, "support": 60550 } }
EleutherAI@pythia-2.8b
Setting1
icl
answer.txt
zhaorui
0.597884
0.235008
0.337396
0.391379
0.148751
0.215571
{ "ID": { "precision": 0.5385248149163696, "recall": 0.21941682493576137, "f1": 0.31179552309890457, "support": 8951 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "DATE": { "precision": 0.6285229202037351, "recall": 0.2416133664012531, "f1": 0.3490477088440505, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.28888888888888886, "recall": 0.09285714285714286, "f1": 0.14054054054054055, "support": 140 }, "LOCATION": { "precision": 0.7332621082621082, "recall": 0.20886589571921282, "f1": 0.3251223748618348, "support": 9858 }, "NAME": { "precision": 0.5504544395245864, "recall": 0.2785048932908855, "f1": 0.36987159411212034, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.5978835978835979, "recall": 0.23500754964246032, "f1": 0.3373962125240296, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.39137902454224116, "recall": 0.1487511604577508, "f1": 0.21557056315777562, "support": 35101 } }
EleutherAI@pythia-2.8b
Setting2
icl
answer.txt
zhaorui
0.258988
0.136246
0.178558
0.249814
0.141902
0.180994
{ "ID": { "precision": 0.5102875706894477, "recall": 0.6258854781582054, "f1": 0.5622058726055544, "support": 6776 }, "CONTACT": { "precision": 0.017543859649122806, "recall": 0.0009041591320072332, "f1": 0.0017196904557179706, "support": 1106 }, "DATE": { "precision": 0.4265042807186784, "recall": 0.11928369081343586, "f1": 0.18642772433785743, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "AGE": { "precision": 0.5568075117370892, "recall": 0.07405094905094906, "f1": 0.13071751350159816, "support": 8008 }, "LOCATION": { "precision": 0.09340736575766095, "recall": 0.0759168285159374, "f1": 0.08375874456418983, "support": 17506 }, "NAME": { "precision": 0.14414632181129644, "recall": 0.09727584144418465, "f1": 0.11616128822803713, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.25898794094443817, "recall": 0.136245597808687, "f1": 0.17855755334193227, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.24981384433761367, "recall": 0.14190242101638853, "f1": 0.18099421673431115, "support": 84333 } }
EleutherAI@pythia-2.8b
Setting3
icl
answer.txt
zhaorui
0.883802
0.55635
0.682849
0.818117
0.511118
0.629165
{ "ID": { "precision": 0.8936535162950258, "recall": 0.8026704326614457, "f1": 0.845722015556307, "support": 7789 }, "CONTACT": { "precision": 0.7795698924731183, "recall": 0.5482041587901701, "f1": 0.6437291897891232, "support": 529 }, "DATE": { "precision": 0.908964316797215, "recall": 0.5446107316055692, "f1": 0.6811230312713992, "support": 19177 }, "PROFESSION": { "precision": 0.5663157894736842, "recall": 0.1893033075299085, "f1": 0.28375527426160335, "support": 1421 }, "AGE": { "precision": 0.8117427772600186, "recall": 0.42643818849449205, "f1": 0.5591397849462366, "support": 4085 }, "LOCATION": { "precision": 0.8899406790357188, "recall": 0.5103872602243937, "f1": 0.6487257337381543, "support": 13815 }, "NAME": { "precision": 0.876635299518017, "recall": 0.5562108635503131, "f1": 0.6805951532430506, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.8838020778675622, "recall": 0.5563501238645747, "f1": 0.6828492084405976, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.8181174672646854, "recall": 0.5111178489794703, "f1": 0.6291654080669206, "support": 60550 } }
Qwen@Qwen1.5-7B-Chat
Setting1
icl
answer.txt
zhaorui
0.027778
0.000114
0.000227
0.126225
0.00008
0.00016
{ "ID": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 8951 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "DATE": { "precision": 0.5, "recall": 0.0002610625244746117, "f1": 0.0005218525766470972, "support": 7661 }, "AGE": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 140 }, "LOCATION": { "precision": 0.007352941176470588, "recall": 0.00010144045445323595, "f1": 0.00020012007204322593, "support": 9858 }, "NAME": { "precision": 0.25, "recall": 0.00011791062374719962, "f1": 0.00023571007660577487, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.027777777777777776, "recall": 0.00011395686732571722, "f1": 0.0002269825507164137, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.12622549019607843, "recall": 8.006893377917454e-05, "f1": 0.00016003635129571264, "support": 35101 } }
Qwen@Qwen1.5-7B-Chat
Setting2
icl
answer.txt
zhaorui
0.476744
0.001945
0.003874
0.273566
0.001119
0.00223
{ "ID": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 6776 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 1106 }, "DATE": { "precision": 0.5463917525773195, "recall": 0.0035748010252259544, "f1": 0.007103129397574213, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "AGE": { "precision": 0.45454545454545453, "recall": 0.0018731268731268732, "f1": 0.00373087924387514, "support": 8008 }, "LOCATION": { "precision": 0.5294117647058824, "recall": 0.0010282188963783845, "f1": 0.0020524515393386543, "support": 17506 }, "NAME": { "precision": 0.38461538461538464, "recall": 0.0013593605567940841, "f1": 0.00270914607715648, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.47674418604651164, "recall": 0.0019446717180700319, "f1": 0.003873542992784345, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.27356633663486296, "recall": 0.0011193581930750425, "f1": 0.002229593502883892, "support": 84333 } }
Qwen@Qwen1.5-7B-Chat
Setting3
icl
answer.txt
zhaorui
0.873993
0.025087
0.048773
0.871603
0.018961
0.037114
{ "ID": { "precision": 1.0, "recall": 0.00115547567081782, "f1": 0.002308284175429597, "support": 7789 }, "CONTACT": { "precision": 1.0, "recall": 0.001890359168241966, "f1": 0.0037735849056603774, "support": 529 }, "DATE": { "precision": 0.8885400313971743, "recall": 0.029514522605204152, "f1": 0.057131321287978196, "support": 19177 }, "PROFESSION": { "precision": 0.7368421052631579, "recall": 0.019704433497536946, "f1": 0.03838245373543523, "support": 1421 }, "AGE": { "precision": 0.7192982456140351, "recall": 0.0200734394124847, "f1": 0.039056918313884255, "support": 4085 }, "LOCATION": { "precision": 0.8971014492753623, "recall": 0.044806369887803114, "f1": 0.08534987935194761, "support": 13815 }, "NAME": { "precision": 0.8594377510040161, "recall": 0.015581767875345856, "f1": 0.030608596152470857, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.8739930955120828, "recall": 0.02508670520231214, "f1": 0.04877343950680709, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.8716027975076779, "recall": 0.018960909731062077, "f1": 0.03711442950246772, "support": 60550 } }
meta-llama@Llama-2-7b-hf
Setting1
icl
answer.txt
zhaorui
0.519124
0.282271
0.365697
0.335145
0.172249
0.227549
{ "ID": { "precision": 0.4086677367576244, "recall": 0.14221874650876998, "f1": 0.21100613293552128, "support": 8951 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "DATE": { "precision": 0.628837393543276, "recall": 0.414436757603446, "f1": 0.49960660896931547, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.23333333333333334, "recall": 0.05, "f1": 0.0823529411764706, "support": 140 }, "LOCATION": { "precision": 0.6646706586826348, "recall": 0.2702373706634206, "f1": 0.38424924275205535, "support": 9858 }, "NAME": { "precision": 0.4105092728878422, "recall": 0.32885272963093976, "f1": 0.36517184942716857, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.5191239652101016, "recall": 0.2822711603658015, "f1": 0.36569656928783656, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.3351454850292444, "recall": 0.17224937205808236, "f1": 0.22754901252163548, "support": 35101 } }
meta-llama@Llama-2-7b-hf
Setting2
icl
answer.txt
zhaorui
0.229651
0.18019
0.201936
0.282236
0.168357
0.210906
{ "ID": { "precision": 0.5350502512562814, "recall": 0.6285419126328218, "f1": 0.5780401737242128, "support": 6776 }, "CONTACT": { "precision": 0.1206896551724138, "recall": 0.006329113924050633, "f1": 0.012027491408934709, "support": 1106 }, "DATE": { "precision": 0.432104526364909, "recall": 0.18737353298259815, "f1": 0.26139731827805224, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "AGE": { "precision": 0.683265306122449, "recall": 0.10452047952047952, "f1": 0.1813061843387848, "support": 8008 }, "LOCATION": { "precision": 0.08711149130310807, "recall": 0.10470695761453216, "f1": 0.09510221023139982, "support": 17506 }, "NAME": { "precision": 0.11742736787249751, "recall": 0.14702843782284813, "f1": 0.13057124921531701, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.22965089919903278, "recall": 0.18019043553531833, "f1": 0.20193617402975356, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.2822355140130941, "recall": 0.16835720492819006, "f1": 0.2109061255244442, "support": 84333 } }
meta-llama@Llama-2-7b-hf
Setting3
icl
answer.txt
zhaorui
0.852069
0.608621
0.710058
0.799693
0.566826
0.663419
{ "ID": { "precision": 0.896279905960448, "recall": 0.8320708691744768, "f1": 0.8629826897470039, "support": 7789 }, "CONTACT": { "precision": 0.7916666666666666, "recall": 0.610586011342155, "f1": 0.6894343649946638, "support": 529 }, "DATE": { "precision": 0.8691779270778289, "recall": 0.6004067372373155, "f1": 0.7102146558105107, "support": 19177 }, "PROFESSION": { "precision": 0.5560747663551402, "recall": 0.2512315270935961, "f1": 0.3460979156568105, "support": 1421 }, "AGE": { "precision": 0.7964327258627375, "recall": 0.5028151774785802, "f1": 0.6164465786314526, "support": 4085 }, "LOCATION": { "precision": 0.8683197463768116, "recall": 0.5551212450235251, "f1": 0.6772640967898618, "support": 13815 }, "NAME": { "precision": 0.8199010765202212, "recall": 0.6155526430755789, "f1": 0.7031815346225826, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.8520693641618498, "recall": 0.6086209744013212, "f1": 0.7100578034682082, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.7996932592599791, "recall": 0.5668263157750325, "f1": 0.6634185008068373, "support": 60550 } }
meta-llama@Meta-Llama-3-8B-Instruct
Setting1
icl
answer.txt
zhaorui
0.507072
0.258397
0.342342
0.368275
0.162881
0.225865
{ "ID": { "precision": 0.4260834345889024, "recall": 0.11752876773544856, "f1": 0.1842381786339755, "support": 8951 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "DATE": { "precision": 0.40783956244302644, "recall": 0.29199843362485317, "f1": 0.34033165982047775, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.5652173913043478, "recall": 0.09285714285714286, "f1": 0.15950920245398773, "support": 140 }, "LOCATION": { "precision": 0.5767504488330341, "recall": 0.26070196794481637, "f1": 0.3590890037725304, "support": 9858 }, "NAME": { "precision": 0.6020331325301205, "recall": 0.3770781747435444, "f1": 0.46371347785108386, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.5070721753228602, "recall": 0.2583971966610638, "f1": 0.34234166226315393, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.36827485281420447, "recall": 0.16288064098654362, "f1": 0.22586547549898708, "support": 35101 } }
meta-llama@Meta-Llama-3-8B-Instruct
Setting2
icl
answer.txt
zhaorui
0.283601
0.223033
0.249696
0.28171
0.1646
0.20779
{ "ID": { "precision": 0.362962962962963, "recall": 0.27479338842975204, "f1": 0.312783470519066, "support": 6776 }, "CONTACT": { "precision": 0.1320754716981132, "recall": 0.012658227848101266, "f1": 0.023102310231023097, "support": 1106 }, "DATE": { "precision": 0.43561872909698995, "recall": 0.22841629569674896, "f1": 0.29969026548672567, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "AGE": { "precision": 0.6350958744915747, "recall": 0.1364885114885115, "f1": 0.2246890739027649, "support": 8008 }, "LOCATION": { "precision": 0.11882324802678786, "recall": 0.14189420770021707, "f1": 0.12933795006638724, "support": 17506 }, "NAME": { "precision": 0.2873919497075002, "recall": 0.3579468218150182, "f1": 0.31881250454028137, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.2836012182986038, "recall": 0.2230325021047514, "f1": 0.2496963260429458, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.2817097479977041, "recall": 0.16459963613976414, "f1": 0.20779003832536255, "support": 84333 } }
meta-llama@Meta-Llama-3-8B-Instruct
Setting3
icl
answer.txt
zhaorui
0.837661
0.611693
0.707061
0.815803
0.581267
0.678848
{ "ID": { "precision": 0.9124541540424175, "recall": 0.7346257542688407, "f1": 0.8139402560455191, "support": 7789 }, "CONTACT": { "precision": 0.7980997624703088, "recall": 0.6351606805293005, "f1": 0.7073684210526315, "support": 529 }, "DATE": { "precision": 0.8020868622535402, "recall": 0.617301976325807, "f1": 0.6976661951909477, "support": 19177 }, "PROFESSION": { "precision": 0.7017310252996005, "recall": 0.37086558761435606, "f1": 0.48526703499079193, "support": 1421 }, "AGE": { "precision": 0.7890359168241966, "recall": 0.5108935128518972, "f1": 0.6202080237741455, "support": 4085 }, "LOCATION": { "precision": 0.8607975921745673, "recall": 0.5796597900832429, "f1": 0.6927934942469072, "support": 13815 }, "NAME": { "precision": 0.8464136697794556, "recall": 0.6203582350371342, "f1": 0.715966386554622, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.8376605753573367, "recall": 0.6116928158546656, "f1": 0.7070614512341792, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.8158027118348695, "recall": 0.5812665052443684, "f1": 0.6788479561070137, "support": 60550 } }
meta-llama@Meta-Llama-3-8B
Setting1
icl
answer.txt
zhaorui
0.540875
0.081052
0.140978
0.292196
0.050096
0.085529
{ "ID": { "precision": 0.22379032258064516, "recall": 0.012400849067143336, "f1": 0.023499523658304225, "support": 8951 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "DATE": { "precision": 0.7064531780688986, "recall": 0.1900535178175173, "f1": 0.29952684632791604, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.25, "recall": 0.014285714285714285, "f1": 0.027027027027027025, "support": 140 }, "LOCATION": { "precision": 0.5538376587520707, "recall": 0.10174477581659566, "f1": 0.1719084754477676, "support": 9858 }, "NAME": { "precision": 0.3112884834663626, "recall": 0.032189600282985495, "f1": 0.05834580038469758, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.5408745247148289, "recall": 0.08105182188541638, "f1": 0.1409776764698595, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.29219566326685387, "recall": 0.050096351038565155, "f1": 0.08552893966086868, "support": 35101 } }
meta-llama@Meta-Llama-3-8B
Setting2
icl
answer.txt
zhaorui
0.299782
0.075154
0.12018
0.296362
0.051534
0.087801
{ "ID": { "precision": 0.38205499276411, "recall": 0.07792207792207792, "f1": 0.12944349105172837, "support": 6776 }, "CONTACT": { "precision": 0.1935483870967742, "recall": 0.0054249547920434, "f1": 0.010554089709762533, "support": 1106 }, "DATE": { "precision": 0.4890259386903682, "recall": 0.09092135437744503, "f1": 0.15333428124555668, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "AGE": { "precision": 0.5902140672782875, "recall": 0.0241008991008991, "f1": 0.046310737852429514, "support": 8008 }, "LOCATION": { "precision": 0.20618405627198125, "recall": 0.08037244373357706, "f1": 0.11565967940813811, "support": 17506 }, "NAME": { "precision": 0.2135070083533909, "recall": 0.08199662878581915, "f1": 0.118488253319714, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.29978242361176805, "recall": 0.07515444725077965, "f1": 0.12018013747333492, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.2963620643507017, "recall": 0.05153405124455167, "f1": 0.08780056532139423, "support": 84333 } }
meta-llama@Meta-Llama-3-8B
Setting3
icl
answer.txt
zhaorui
0.676322
0.07057
0.127804
0.683701
0.073847
0.133297
{ "ID": { "precision": 0.6742857142857143, "recall": 0.015149569906278084, "f1": 0.029633350075339022, "support": 7789 }, "CONTACT": { "precision": 0.7878787878787878, "recall": 0.04914933837429111, "f1": 0.09252669039145905, "support": 529 }, "DATE": { "precision": 0.6527711984841308, "recall": 0.07185691192574438, "f1": 0.12946260804208942, "support": 19177 }, "PROFESSION": { "precision": 0.5702127659574469, "recall": 0.09429978888106967, "f1": 0.16183574879227053, "support": 1421 }, "AGE": { "precision": 0.7622282608695652, "recall": 0.13733170134638922, "f1": 0.2327317983820784, "support": 4085 }, "LOCATION": { "precision": 0.7169500822819528, "recall": 0.09460731089395584, "f1": 0.16715692543803554, "support": 13815 }, "NAME": { "precision": 0.6215767634854772, "recall": 0.0545361875637105, "f1": 0.10027444942767254, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.6763216207660652, "recall": 0.07056977704376548, "f1": 0.12780403182389186, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.6837005104632965, "recall": 0.0738472584130627, "f1": 0.13329696250895143, "support": 60550 } }
microsoft@Phi-3-mini-4k-instruct
Setting1
icl
answer.txt
zhaorui
0.618346
0.257144
0.363234
0.435172
0.184519
0.259153
{ "ID": { "precision": 0.4675850891410049, "recall": 0.12892414255390458, "f1": 0.20211927489272266, "support": 8951 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "DATE": { "precision": 0.7869982762866289, "recall": 0.4171779141104294, "f1": 0.5452994369561508, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.6296296296296297, "recall": 0.24285714285714285, "f1": 0.35051546391752575, "support": 140 }, "LOCATION": { "precision": 0.6183310533515732, "recall": 0.2751065124771759, "f1": 0.38079191238416177, "support": 9858 }, "NAME": { "precision": 0.543661971830986, "recall": 0.22756750383209526, "f1": 0.3208378355913889, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.6183462355278482, "recall": 0.2571436711204809, "f1": 0.3632339329550485, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.43517228860568896, "recall": 0.184519030832964, "f1": 0.2591534411410576, "support": 35101 } }
microsoft@Phi-3-mini-4k-instruct
Setting2
icl
answer.txt
zhaorui
0.504577
0.207202
0.29377
0.400923
0.189641
0.257487
{ "ID": { "precision": 0.6379661016949153, "recall": 0.5554899645808736, "f1": 0.5938781950142, "support": 6776 }, "CONTACT": { "precision": 0.11538461538461539, "recall": 0.0081374321880651, "f1": 0.015202702702702702, "support": 1106 }, "DATE": { "precision": 0.5213417190775681, "recall": 0.2096654525832996, "f1": 0.2990595762079996, "support": 29652 }, "PROFESSION": { "precision": 0.06051873198847262, "recall": 0.014512785072563926, "f1": 0.023411371237458196, "support": 2894 }, "AGE": { "precision": 0.6138115990613476, "recall": 0.22864635364635363, "f1": 0.33318169411336546, "support": 8008 }, "LOCATION": { "precision": 0.3353516534699581, "recall": 0.12338626756540615, "f1": 0.18039837975529294, "support": 17506 }, "NAME": { "precision": 0.5220877458396369, "recall": 0.18764613125985535, "f1": 0.2760689572417103, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.504576824232624, "recall": 0.2072024000094862, "f1": 0.2937695437275142, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.4009231666452163, "recall": 0.18964062669948817, "f1": 0.25748723994856776, "support": 84333 } }
microsoft@Phi-3-mini-4k-instruct
Setting3
icl
answer.txt
zhaorui
0.866033
0.590933
0.702511
0.837363
0.554243
0.667003
{ "ID": { "precision": 0.8974398519432449, "recall": 0.7470792142765439, "f1": 0.8153856932670077, "support": 7789 }, "CONTACT": { "precision": 0.8721590909090909, "recall": 0.5803402646502835, "f1": 0.6969353007945518, "support": 529 }, "DATE": { "precision": 0.8668916935720575, "recall": 0.6160504771340668, "f1": 0.7202560585276635, "support": 19177 }, "PROFESSION": { "precision": 0.705511811023622, "recall": 0.31527093596059114, "f1": 0.43579766536964976, "support": 1421 }, "AGE": { "precision": 0.7685767097966728, "recall": 0.5089351285189718, "f1": 0.6123711340206186, "support": 4085 }, "LOCATION": { "precision": 0.8865888847070631, "recall": 0.5115454216431415, "f1": 0.6487652620949235, "support": 13815 }, "NAME": { "precision": 0.8643748034797191, "recall": 0.6004805591961555, "f1": 0.708657357679914, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.8660325297705489, "recall": 0.5909331131296449, "f1": 0.7025111420886262, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.8373632636330671, "recall": 0.5542431430542506, "f1": 0.6670030331621435, "support": 60550 } }
mistralai@Mistral-7B-Instruct-v0.3
Setting1
icl
answer.txt
zhaorui
0.539066
0.340247
0.417179
0.414726
0.258951
0.318829
{ "ID": { "precision": 0.40844444444444444, "recall": 0.2053401854541392, "f1": 0.2732882313582633, "support": 8951 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "DATE": { "precision": 0.7598395108903324, "recall": 0.5191228299177653, "f1": 0.616828227995347, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.7283950617283951, "recall": 0.42142857142857143, "f1": 0.5339366515837104, "support": 140 }, "LOCATION": { "precision": 0.43686459407691175, "recall": 0.3007709474538446, "f1": 0.3562631420847101, "support": 9858 }, "NAME": { "precision": 0.5695412844036697, "recall": 0.36599457611130765, "f1": 0.4456248654080827, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.5390656736628301, "recall": 0.3402467166177602, "f1": 0.4171789856084952, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.4147264136491076, "recall": 0.25895101576651836, "f1": 0.3188286304108459, "support": 35101 } }
mistralai@Mistral-7B-Instruct-v0.3
Setting2
icl
answer.txt
zhaorui
0.391887
0.290064
0.333374
0.342535
0.250306
0.289246
{ "ID": { "precision": 0.4313184003272988, "recall": 0.6223435655253837, "f1": 0.5095148915604423, "support": 6776 }, "CONTACT": { "precision": 0.1592920353982301, "recall": 0.0162748643761302, "f1": 0.029532403609515995, "support": 1106 }, "DATE": { "precision": 0.52858261550509, "recall": 0.27316875758802106, "f1": 0.36019210245464245, "support": 29652 }, "PROFESSION": { "precision": 0.08888888888888889, "recall": 0.0055286800276434, "f1": 0.01040988939492518, "support": 2894 }, "AGE": { "precision": 0.6095802214782384, "recall": 0.2955794205794206, "f1": 0.3981162223530401, "support": 8008 }, "LOCATION": { "precision": 0.21896364449555938, "recall": 0.19576145321604022, "f1": 0.20671351449166087, "support": 17506 }, "NAME": { "precision": 0.36111587492139713, "recall": 0.3434832254907292, "f1": 0.35207892096756216, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.3918873456048445, "recall": 0.2900643876062751, "f1": 0.3333742180792347, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.34253452585924327, "recall": 0.25030570954333836, "f1": 0.2892460478161246, "support": 84333 } }
mistralai@Mistral-7B-Instruct-v0.3
Setting3
icl
answer.txt
zhaorui
0.840219
0.657688
0.737832
0.80437
0.634122
0.709172
{ "ID": { "precision": 0.8709163346613545, "recall": 0.8419566054692516, "f1": 0.8561916574188915, "support": 7789 }, "CONTACT": { "precision": 0.8289156626506025, "recall": 0.6502835538752363, "f1": 0.728813559322034, "support": 529 }, "DATE": { "precision": 0.8623968675709691, "recall": 0.6431662929551024, "f1": 0.7368200961796948, "support": 19177 }, "PROFESSION": { "precision": 0.6166306695464363, "recall": 0.40182969739619984, "f1": 0.4865786109927567, "support": 1421 }, "AGE": { "precision": 0.7862318840579711, "recall": 0.6374541003671971, "f1": 0.7040692172502365, "support": 4085 }, "LOCATION": { "precision": 0.8358999509563512, "recall": 0.6168657256605139, "f1": 0.7098708871303623, "support": 13815 }, "NAME": { "precision": 0.8296005972377752, "recall": 0.6472986748216106, "f1": 0.7271983640081799, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.8402185838467381, "recall": 0.6576878612716763, "f1": 0.7378318789024142, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.8043702809544943, "recall": 0.6341220929350159, "f1": 0.7091715956399902, "support": 60550 } }
zhaorui-nb@Llama-2-7b-hf._.lora_ft._.Setting1
Setting1
ft
answer.txt
zhaorui
0.80789
0.725791
0.764643
0.642001
0.642574
0.642287
{ "ID": { "precision": 0.89810681853163, "recall": 0.6518824712322645, "f1": 0.7554375970999482, "support": 8951 }, "CONTACT": { "precision": 0.3333333333333333, "recall": 0.6, "f1": 0.42857142857142855, "support": 10 }, "DATE": { "precision": 0.9257619830529911, "recall": 0.9554888395770788, "f1": 0.9403905447070915, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.9347826086956522, "recall": 0.9214285714285714, "f1": 0.9280575539568345, "support": 140 }, "LOCATION": { "precision": 0.5418205804749341, "recall": 0.41661594643944005, "f1": 0.47104025691019613, "support": 9858 }, "NAME": { "precision": 0.8602001703577513, "recall": 0.9525999292536258, "f1": 0.9040452078554244, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.8078898966195218, "recall": 0.7257912879974929, "f1": 0.7646432055226232, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.6420007849208987, "recall": 0.6425736797044258, "f1": 0.6422871045627957, "support": 35101 } }
zhaorui-nb@Llama-2-7b-hf._.lora_ft._.Setting2
Setting2
ft
answer.txt
zhaorui
0.639199
0.461741
0.536168
0.595992
0.413827
0.488479
{ "ID": { "precision": 0.5254995836802664, "recall": 0.7451298701298701, "f1": 0.6163330078124999, "support": 6776 }, "CONTACT": { "precision": 0.9284064665127021, "recall": 0.36347197106690776, "f1": 0.5224171539961013, "support": 1106 }, "DATE": { "precision": 0.6319473415843879, "recall": 0.4597666261972211, "f1": 0.5322791605661299, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "AGE": { "precision": 0.7388535031847133, "recall": 0.40559440559440557, "f1": 0.5237020316027088, "support": 8008 }, "LOCATION": { "precision": 0.5476136104133363, "recall": 0.4109448189192277, "f1": 0.4695362725581699, "support": 17506 }, "NAME": { "precision": 0.7996262634842436, "recall": 0.5118808112663803, "f1": 0.6241877735048401, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.639198949441891, "recall": 0.4617409554978478, "f1": 0.5361679276847983, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.5959923955513785, "recall": 0.41382692902485896, "f1": 0.488478872944317, "support": 84333 } }
zhaorui-nb@Llama-2-7b-hf._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.939451
0.91787
0.928535
0.927537
0.89448
0.910709
{ "ID": { "precision": 0.9340963400830085, "recall": 0.9535242007959943, "f1": 0.943710292249047, "support": 7789 }, "CONTACT": { "precision": 0.9544468546637744, "recall": 0.831758034026465, "f1": 0.888888888888889, "support": 529 }, "DATE": { "precision": 0.9448827292110874, "recall": 0.9243364446993795, "f1": 0.934496665524422, "support": 19177 }, "PROFESSION": { "precision": 0.8394160583941606, "recall": 0.8092892329345531, "f1": 0.8240773916159082, "support": 1421 }, "AGE": { "precision": 0.9343083787803669, "recall": 0.9226438188494492, "f1": 0.9284394629880527, "support": 4085 }, "LOCATION": { "precision": 0.9257186081694403, "recall": 0.8858487151646761, "f1": 0.9053449232476419, "support": 13815 }, "NAME": { "precision": 0.9598892464267006, "recall": 0.9339595165283239, "f1": 0.9467468723474923, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.9394513091837252, "recall": 0.9178695293146161, "f1": 0.928535030782982, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.9275368879612198, "recall": 0.8944799947141202, "f1": 0.9107085653591485, "support": 60550 } }
zhaorui-nb@Meta-Llama-3-8B-Instruct._.lora_ft._.Setting1
Setting1
ft
answer.txt
zhaorui
0.782024
0.747358
0.764298
0.587606
0.663966
0.623456
{ "ID": { "precision": 0.9190300623936472, "recall": 0.7240531784158195, "f1": 0.8099731300381179, "support": 8951 }, "CONTACT": { "precision": 0.12280701754385964, "recall": 0.7, "f1": 0.208955223880597, "support": 10 }, "DATE": { "precision": 0.8604592480444108, "recall": 0.8902232084584258, "f1": 0.8750882145377558, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.8037974683544303, "recall": 0.9071428571428571, "f1": 0.8523489932885906, "support": 140 }, "LOCATION": { "precision": 0.7283807312826466, "recall": 0.5092310813552444, "f1": 0.5994029850746267, "support": 9858 }, "NAME": { "precision": 0.6787677807836635, "recall": 0.9171088315057186, "f1": 0.7801404212637915, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.782024146668654, "recall": 0.7473576251388849, "f1": 0.7642979925997144, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.5876060440575225, "recall": 0.6639655938397235, "f1": 0.623456435369455, "support": 35101 } }
zhaorui-nb@Meta-Llama-3-8B-Instruct._.lora_ft._.Setting2
Setting2
ft
answer.txt
zhaorui
0.604263
0.433603
0.504902
0.5665
0.415873
0.479639
{ "ID": { "precision": 0.4935290918977705, "recall": 0.6697166469893743, "f1": 0.5682800075136184, "support": 6776 }, "CONTACT": { "precision": 0.8682926829268293, "recall": 0.48282097649186256, "f1": 0.620569436374201, "support": 1106 }, "DATE": { "precision": 0.6617442668519805, "recall": 0.5138270605692702, "f1": 0.5784797630799605, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "AGE": { "precision": 0.7260686333534015, "recall": 0.6023976023976024, "f1": 0.6584766584766585, "support": 8008 }, "LOCATION": { "precision": 0.47709169693493914, "recall": 0.42768193762138695, "f1": 0.4510376818578873, "support": 17506 }, "NAME": { "precision": 0.7387724550898204, "recall": 0.21467021912892176, "f1": 0.3326732673267327, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.6042634057671652, "recall": 0.43360250435772474, "f1": 0.5049016900474981, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.5664998324363916, "recall": 0.4158734918854883, "f1": 0.4796389674576976, "support": 84333 } }
zhaorui-nb@Meta-Llama-3-8B-Instruct._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.950231
0.939356
0.944762
0.938533
0.926599
0.932528
{ "ID": { "precision": 0.936260446551079, "recall": 0.9636667094620619, "f1": 0.9497659116791092, "support": 7789 }, "CONTACT": { "precision": 0.932806324110672, "recall": 0.8922495274102079, "f1": 0.9120772946859904, "support": 529 }, "DATE": { "precision": 0.9584013480069506, "recall": 0.9491056995359024, "f1": 0.9537308740306016, "support": 19177 }, "PROFESSION": { "precision": 0.8781183178902352, "recall": 0.8669950738916257, "f1": 0.8725212464589236, "support": 1421 }, "AGE": { "precision": 0.962426326129666, "recall": 0.9593635250917992, "f1": 0.9608924849822238, "support": 4085 }, "LOCATION": { "precision": 0.9400193726249907, "recall": 0.9132102786825914, "f1": 0.9264209134968424, "support": 13815 }, "NAME": { "precision": 0.9617014947571949, "recall": 0.9416047764671618, "f1": 0.9515470365328723, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.950231384800441, "recall": 0.9393559042113956, "f1": 0.9447623477040371, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.9385333757243984, "recall": 0.9265993700773356, "f1": 0.9325281931811916, "support": 60550 } }
zhaorui-nb@Meta-Llama-3-8B._.lora_ft._.Setting1
Setting1
ft
answer.txt
zhaorui
0.731445
0.72015
0.725754
0.603949
0.653613
0.6278
{ "ID": { "precision": 0.7390877192982456, "recall": 0.5883141548430343, "f1": 0.6551380940532471, "support": 8951 }, "CONTACT": { "precision": 0.5, "recall": 0.7, "f1": 0.5833333333333334, "support": 10 }, "DATE": { "precision": 0.8379189594797398, "recall": 0.8745594569899491, "f1": 0.8558472248834387, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.7777777777777778, "recall": 0.95, "f1": 0.855305466237942, "support": 140 }, "LOCATION": { "precision": 0.6978836649347357, "recall": 0.5586325826739704, "f1": 0.6205420023663305, "support": 9858 }, "NAME": { "precision": 0.6749735822472702, "recall": 0.9037849310222851, "f1": 0.7727983061954933, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.7314447756011458, "recall": 0.72015042306487, "f1": 0.7257536606373816, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.6039488148196812, "recall": 0.6536130179327484, "f1": 0.6278002357422227, "support": 35101 } }
zhaorui-nb@Meta-Llama-3-8B._.lora_ft._.Setting2
Setting2
ft
answer.txt
zhaorui
0.604846
0.476516
0.533066
0.567618
0.450128
0.502091
{ "ID": { "precision": 0.5055170921678926, "recall": 0.6896399055489965, "f1": 0.5833957553058677, "support": 6776 }, "CONTACT": { "precision": 0.8285302593659942, "recall": 0.5198915009041591, "f1": 0.638888888888889, "support": 1106 }, "DATE": { "precision": 0.6539695945945946, "recall": 0.5222581950627276, "f1": 0.5807395184879622, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "AGE": { "precision": 0.7456153965544001, "recall": 0.5999000999000998, "f1": 0.6648674832191543, "support": 8008 }, "LOCATION": { "precision": 0.4524513022543226, "recall": 0.47235233634182566, "f1": 0.4621876921357107, "support": 17506 }, "NAME": { "precision": 0.7872392940886092, "recall": 0.3468544396715785, "f1": 0.48154299086585634, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.604846478025286, "recall": 0.4765157174534287, "f1": 0.5330662651801052, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.5676175627179733, "recall": 0.45012806820419815, "f1": 0.502091267645157, "support": 84333 } }
zhaorui-nb@Meta-Llama-3-8B._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.949879
0.938348
0.944078
0.941295
0.928518
0.934863
{ "ID": { "precision": 0.9289779757485771, "recall": 0.9639234818333547, "f1": 0.9461281582761011, "support": 7789 }, "CONTACT": { "precision": 0.944, "recall": 0.8922495274102079, "f1": 0.9173955296404275, "support": 529 }, "DATE": { "precision": 0.953081195450939, "recall": 0.9395630181988841, "f1": 0.9462738301559793, "support": 19177 }, "PROFESSION": { "precision": 0.886685552407932, "recall": 0.8810696692470091, "f1": 0.8838686904341686, "support": 1421 }, "AGE": { "precision": 0.9667651403249631, "recall": 0.9613219094247246, "f1": 0.9640358414140174, "support": 4085 }, "LOCATION": { "precision": 0.9442124105011933, "recall": 0.9163952225841476, "f1": 0.93009587481174, "support": 13815 }, "NAME": { "precision": 0.9653428528930537, "recall": 0.945099752439202, "f1": 0.9551140544518028, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.9498787929449135, "recall": 0.9383484723369117, "f1": 0.9440784281144852, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.9412950181895227, "recall": 0.9285175115910757, "f1": 0.9348626068571345, "support": 60550 } }
zhaorui-nb@Mistral-7B-Instruct-v0.3._.lora_ft._.Setting1
Setting1
ft
answer.txt
zhaorui
0.856992
0.786872
0.820437
0.670888
0.647933
0.659211
{ "ID": { "precision": 0.9347982191584088, "recall": 0.7271813205228466, "f1": 0.8180218675380168, "support": 8951 }, "CONTACT": { "precision": 0.3076923076923077, "recall": 0.4, "f1": 0.34782608695652173, "support": 10 }, "DATE": { "precision": 0.9304197102884735, "recall": 0.9809424357133534, "f1": 0.9550133434998094, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.9558823529411765, "recall": 0.9285714285714286, "f1": 0.9420289855072465, "support": 140 }, "LOCATION": { "precision": 0.7250641112160885, "recall": 0.5449381213227835, "f1": 0.6222273701279898, "support": 9858 }, "NAME": { "precision": 0.8423573511037068, "recall": 0.953896946114845, "f1": 0.8946640862593309, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.8569921499270843, "recall": 0.7868721688840774, "f1": 0.8204366552799645, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.6708877217714517, "recall": 0.6479328931778939, "f1": 0.6592105365013551, "support": 35101 } }
zhaorui-nb@Mistral-7B-Instruct-v0.3._.lora_ft._.Setting2
Setting2
ft
answer.txt
zhaorui
0.572191
0.362503
0.443827
0.533084
0.358032
0.428364
{ "ID": { "precision": 0.4414388489208633, "recall": 0.6791617473435655, "f1": 0.5350851694668914, "support": 6776 }, "CONTACT": { "precision": 0.7743362831858407, "recall": 0.15822784810126583, "f1": 0.2627627627627628, "support": 1106 }, "DATE": { "precision": 0.4384304276740384, "recall": 0.17182652097666262, "f1": 0.24689264167857922, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "AGE": { "precision": 0.7339017935901205, "recall": 0.6233766233766234, "f1": 0.674139095205942, "support": 8008 }, "LOCATION": { "precision": 0.504071372937761, "recall": 0.4066605735176511, "f1": 0.4501565019444181, "support": 17506 }, "NAME": { "precision": 0.8394096373766006, "recall": 0.46696753846990374, "f1": 0.6000978268464817, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.5721906116642959, "recall": 0.36250340910438383, "f1": 0.443826627274773, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.5330840519550322, "recall": 0.35803155025509603, "f1": 0.42836397222618183, "support": 84333 } }
zhaorui-nb@Mistral-7B-Instruct-v0.3._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.954996
0.934682
0.94473
0.943861
0.922748
0.933185
{ "ID": { "precision": 0.9509366636931311, "recall": 0.9580177172936192, "f1": 0.9544640573036582, "support": 7789 }, "CONTACT": { "precision": 0.9387755102040817, "recall": 0.8695652173913043, "f1": 0.9028459273797841, "support": 529 }, "DATE": { "precision": 0.9560117302052786, "recall": 0.9349741878291704, "f1": 0.9453759358852685, "support": 19177 }, "PROFESSION": { "precision": 0.871866295264624, "recall": 0.8810696692470091, "f1": 0.8764438221911095, "support": 1421 }, "AGE": { "precision": 0.9751058003485188, "recall": 0.9588739290085679, "f1": 0.9669217477166132, "support": 4085 }, "LOCATION": { "precision": 0.9425995492111194, "recall": 0.9081433224755701, "f1": 0.9250506912442397, "support": 13815 }, "NAME": { "precision": 0.9717311852017603, "recall": 0.9485947284112421, "f1": 0.9600235805607752, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.9549964564138909, "recall": 0.9346820809248555, "f1": 0.9447300771208227, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.9438609620183591, "recall": 0.9227483959509261, "f1": 0.9331852805566724, "support": 60550 } }
zhaorui-nb@Phi-3-mini-4k-instruct._.lora_ft._.Setting1
Setting1
ft
answer.txt
zhaorui
0.797989
0.723512
0.758928
0.592968
0.653247
0.62165
{ "ID": { "precision": 0.8848518725544997, "recall": 0.7074069936319964, "f1": 0.7862420065809896, "support": 8951 }, "CONTACT": { "precision": 0.28, "recall": 0.7, "f1": 0.4, "support": 10 }, "DATE": { "precision": 0.8538339631842009, "recall": 0.9142409607100901, "f1": 0.8830055471507816, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.6881720430107527, "recall": 0.9142857142857143, "f1": 0.7852760736196318, "support": 140 }, "LOCATION": { "precision": 0.6558067030397506, "recall": 0.42675999188476366, "f1": 0.5170527868248018, "support": 9858 }, "NAME": { "precision": 0.7881139589502706, "recall": 0.9100341940808867, "f1": 0.8446973842617927, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.7979890023566378, "recall": 0.7235121506509786, "f1": 0.758927770970923, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.5929683629627822, "recall": 0.6532468363704931, "f1": 0.6216497879025427, "support": 35101 } }
zhaorui-nb@Phi-3-mini-4k-instruct._.lora_ft._.Setting2
Setting2
ft
answer.txt
zhaorui
0.602849
0.583662
0.5931
0.561752
0.501562
0.529954
{ "ID": { "precision": 0.5282856528285653, "recall": 0.7249114521841794, "f1": 0.6111733233793704, "support": 6776 }, "CONTACT": { "precision": 0.851764705882353, "recall": 0.32730560578661844, "f1": 0.47289353363814507, "support": 1106 }, "DATE": { "precision": 0.6201363818692338, "recall": 0.5735194927829489, "f1": 0.5959176522120017, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "AGE": { "precision": 0.6933780593862592, "recall": 0.6969280719280719, "f1": 0.6951485333499408, "support": 8008 }, "LOCATION": { "precision": 0.4475376699742742, "recall": 0.5564949160287901, "f1": 0.4961042929164332, "support": 17506 }, "NAME": { "precision": 0.7911616505515456, "recall": 0.6317764123756185, "f1": 0.7025425522266227, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.6028487795318987, "recall": 0.583662386017336, "f1": 0.5931004566760252, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.5617520172131759, "recall": 0.5015622787266039, "f1": 0.5299536043267189, "support": 84333 } }
zhaorui-nb@Phi-3-mini-4k-instruct._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.927589
0.910768
0.919101
0.916656
0.891351
0.903827
{ "ID": { "precision": 0.9325942915392457, "recall": 0.9396584927461805, "f1": 0.9361130651659525, "support": 7789 }, "CONTACT": { "precision": 0.9388185654008439, "recall": 0.8412098298676749, "f1": 0.8873379860418744, "support": 529 }, "DATE": { "precision": 0.9372863247863248, "recall": 0.9149502007613287, "f1": 0.9259835870913267, "support": 19177 }, "PROFESSION": { "precision": 0.8317621464829587, "recall": 0.8071780436312456, "f1": 0.8192857142857142, "support": 1421 }, "AGE": { "precision": 0.9274193548387096, "recall": 0.9290085679314566, "f1": 0.9282132811544577, "support": 4085 }, "LOCATION": { "precision": 0.8967751435723752, "recall": 0.8816503800217155, "f1": 0.8891484469102456, "support": 13815 }, "NAME": { "precision": 0.9519353148162012, "recall": 0.9258045725935634, "f1": 0.9386881252076336, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.9275886429388415, "recall": 0.9107679603633361, "f1": 0.9191013483108614, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.9166558773480942, "recall": 0.8913514410790236, "f1": 0.9038265818067448, "support": 60550 } }
zhaorui-nb@Qwen1.5-7B-Chat._.lora_ft._.Setting1
Setting1
ft
answer.txt
zhaorui
0.715421
0.71055
0.712977
0.58507
0.647171
0.614556
{ "ID": { "precision": 0.862237080373311, "recall": 0.6915428443749302, "f1": 0.7675139491630503, "support": 8951 }, "CONTACT": { "precision": 0.3888888888888889, "recall": 0.7, "f1": 0.5, "support": 10 }, "DATE": { "precision": 0.8455487576476464, "recall": 0.8839577078710351, "f1": 0.8643267389917039, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.7797619047619048, "recall": 0.9357142857142857, "f1": 0.8506493506493507, "support": 140 }, "LOCATION": { "precision": 0.6083852544132918, "recall": 0.4754514100223169, "f1": 0.5337660858672132, "support": 9858 }, "NAME": { "precision": 0.6106700810926163, "recall": 0.8435326022874661, "f1": 0.708457120221826, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.7154208020193907, "recall": 0.7105495569926783, "f1": 0.7129768591970042, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.5850702810253798, "recall": 0.6471712643242906, "f1": 0.6145559284520261, "support": 35101 } }
zhaorui-nb@Qwen1.5-7B-Chat._.lora_ft._.Setting2
Setting2
ft
answer.txt
zhaorui
0.563489
0.460958
0.507093
0.557387
0.374568
0.448046
{ "ID": { "precision": 0.3249097472924188, "recall": 0.6375442739079102, "f1": 0.43045037863690716, "support": 6776 }, "CONTACT": { "precision": 1.0, "recall": 0.0009041591320072332, "f1": 0.001806684733514002, "support": 1106 }, "DATE": { "precision": 0.5966897037460932, "recall": 0.4571361122352624, "f1": 0.5176726689453685, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "AGE": { "precision": 0.709350150808884, "recall": 0.6461038961038961, "f1": 0.6762514703960266, "support": 8008 }, "LOCATION": { "precision": 0.495135653603727, "recall": 0.41282988689592137, "f1": 0.4502523207276805, "support": 17506 }, "NAME": { "precision": 0.7756225189462288, "recall": 0.46745690827034964, "f1": 0.5833418150975402, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.5634893024873892, "recall": 0.4609583437088684, "f1": 0.5070929618251904, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.5573868249139075, "recall": 0.37456789093504955, "f1": 0.4480458200220603, "support": 84333 } }
zhaorui-nb@Qwen1.5-7B-Chat._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.946103
0.932634
0.93932
0.930536
0.914584
0.922491
{ "ID": { "precision": 0.9327973872629066, "recall": 0.953395814610348, "f1": 0.942984126984127, "support": 7789 }, "CONTACT": { "precision": 0.9458333333333333, "recall": 0.8582230623818525, "f1": 0.8999008919722498, "support": 529 }, "DATE": { "precision": 0.9644054269752593, "recall": 0.9451947645617146, "f1": 0.9547034657115769, "support": 19177 }, "PROFESSION": { "precision": 0.8327574291637871, "recall": 0.8479943701618579, "f1": 0.8403068340306833, "support": 1421 }, "AGE": { "precision": 0.9534032690900219, "recall": 0.956670746634027, "f1": 0.9550342130987292, "support": 4085 }, "LOCATION": { "precision": 0.9241783515097559, "recall": 0.9017010495837857, "f1": 0.9128013482816736, "support": 13815 }, "NAME": { "precision": 0.9603783421464214, "recall": 0.9389107324887142, "f1": 0.9495232134310224, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.9461030692936604, "recall": 0.932634186622626, "f1": 0.9393203479765133, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.9305362199259264, "recall": 0.9145843629174715, "f1": 0.9224913361066956, "support": 60550 } }
zhaorui-nb@Yi-1.5-6B-Chat._.lora_ft._.Setting1
Setting1
ft
answer.txt
zhaorui
0.814126
0.779266
0.796314
0.59846
0.636761
0.617016
{ "ID": { "precision": 0.9259150374834632, "recall": 0.703720254720143, "f1": 0.7996699250983876, "support": 8951 }, "CONTACT": { "precision": 0.0547945205479452, "recall": 0.4, "f1": 0.09638554216867469, "support": 10 }, "DATE": { "precision": 0.9139444173573866, "recall": 0.9787234042553191, "f1": 0.945225338796092, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.8482758620689655, "recall": 0.8785714285714286, "f1": 0.863157894736842, "support": 140 }, "LOCATION": { "precision": 0.6641969107120432, "recall": 0.536518563603165, "f1": 0.5935693844340946, "support": 9858 }, "NAME": { "precision": 0.7820906994619523, "recall": 0.959792477302205, "f1": 0.8618772830748055, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.8141258408238585, "recall": 0.7792655479900857, "f1": 0.7963143568319772, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.5984596353759651, "recall": 0.6367608754931801, "f1": 0.617016440410612, "support": 35101 } }
zhaorui-nb@Yi-1.5-6B-Chat._.lora_ft._.Setting2
Setting2
ft
answer.txt
zhaorui
0.604842
0.466614
0.526812
0.56391
0.383105
0.456248
{ "ID": { "precision": 0.4680517853414074, "recall": 0.6615997638724912, "f1": 0.5482450776568424, "support": 6776 }, "CONTACT": { "precision": 0.8809523809523809, "recall": 0.06690777576853527, "f1": 0.12436974789915968, "support": 1106 }, "DATE": { "precision": 0.6502504173622704, "recall": 0.49915688655065427, "f1": 0.5647727706338002, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "AGE": { "precision": 0.7712037765538946, "recall": 0.612012987012987, "f1": 0.6824479565550372, "support": 8008 }, "LOCATION": { "precision": 0.5061656282450675, "recall": 0.4455043984919456, "f1": 0.4739016831743331, "support": 17506 }, "NAME": { "precision": 0.6707440448818174, "recall": 0.3965526616279702, "f1": 0.498428102788409, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.6048416845988318, "recall": 0.4666144925474014, "f1": 0.5268118318796731, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.5639097190481197, "recall": 0.3831049247606548, "f1": 0.4562476238358041, "support": 84333 } }
zhaorui-nb@Yi-1.5-6B-Chat._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.93232
0.915706
0.923938
0.925996
0.905732
0.915752
{ "ID": { "precision": 0.9366515837104072, "recall": 0.956733855437155, "f1": 0.9465862178469355, "support": 7789 }, "CONTACT": { "precision": 0.9475890985324947, "recall": 0.8544423440453687, "f1": 0.8986083499005963, "support": 529 }, "DATE": { "precision": 0.9129951281508155, "recall": 0.8990457318662982, "f1": 0.905966737605423, "support": 19177 }, "PROFESSION": { "precision": 0.8453970484891076, "recall": 0.8465869106263195, "f1": 0.8459915611814347, "support": 1421 }, "AGE": { "precision": 0.9451589420043679, "recall": 0.9534883720930233, "f1": 0.9493053863027053, "support": 4085 }, "LOCATION": { "precision": 0.9262214861100655, "recall": 0.8905537459283388, "f1": 0.9080374935419588, "support": 13815 }, "NAME": { "precision": 0.9679597808959256, "recall": 0.9392747924858017, "f1": 0.953401574221204, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.9323199542634225, "recall": 0.9157060280759702, "f1": 0.9239383107956108, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.9259961525561691, "recall": 0.905732250354615, "f1": 0.9157521144964899, "support": 60550 } }
zhaorui-nb@pythia-1b._.lora_ft._.Setting1
Setting1
ft
answer.txt
zhaorui
0.680503
0.588473
0.631151
0.445592
0.492792
0.468005
{ "ID": { "precision": 0.5646608797999118, "recall": 0.42877890738464974, "f1": 0.4874269748539497, "support": 8951 }, "CONTACT": { "precision": 0.017857142857142856, "recall": 0.1, "f1": 0.030303030303030304, "support": 10 }, "DATE": { "precision": 0.8248572911261027, "recall": 0.8299177653047904, "f1": 0.8273797904873447, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.4740740740740741, "recall": 0.9142857142857143, "f1": 0.624390243902439, "support": 140 }, "LOCATION": { "precision": 0.4418767507002801, "recall": 0.25603570703996753, "f1": 0.3242132305716121, "support": 9858 }, "NAME": { "precision": 0.7958205912334353, "recall": 0.9205282395943875, "f1": 0.8536438685692417, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.680503393292482, "recall": 0.5884732628700037, "f1": 0.6311511725613017, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.4455923899701352, "recall": 0.4927923333727871, "f1": 0.46800530342030106, "support": 35101 } }
zhaorui-nb@pythia-1b._.lora_ft._.Setting2
Setting2
ft
answer.txt
zhaorui
0.49663
0.344266
0.406645
0.379872
0.257412
0.306876
{ "ID": { "precision": 0.2760147141342976, "recall": 0.6533353010625738, "f1": 0.3880780188472496, "support": 6776 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 1106 }, "DATE": { "precision": 0.6032955897255143, "recall": 0.4617900984756509, "f1": 0.5231427534432368, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "AGE": { "precision": 0.7150668286755771, "recall": 0.14697802197802198, "f1": 0.2438367516055521, "support": 8008 }, "LOCATION": { "precision": 0.49008425145819834, "recall": 0.2159830915114818, "f1": 0.2998295071567345, "support": 17506 }, "NAME": { "precision": 0.5746405481038309, "recall": 0.32379968462835085, "f1": 0.4142032412881686, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.49663017447827573, "recall": 0.34426618287028804, "f1": 0.40664458341795473, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.3798717045853454, "recall": 0.25741231395086844, "f1": 0.30687621731476983, "support": 84333 } }
zhaorui-nb@pythia-1b._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.887324
0.834319
0.860005
0.86639
0.796946
0.830219
{ "ID": { "precision": 0.8819513406156901, "recall": 0.9121838490178457, "f1": 0.8968128747238876, "support": 7789 }, "CONTACT": { "precision": 0.8789808917197452, "recall": 0.782608695652174, "f1": 0.828, "support": 529 }, "DATE": { "precision": 0.8795610425240055, "recall": 0.8358971684830787, "f1": 0.8571734131864607, "support": 19177 }, "PROFESSION": { "precision": 0.7373107747105966, "recall": 0.5826882477128783, "f1": 0.6509433962264152, "support": 1421 }, "AGE": { "precision": 0.8835836909871244, "recall": 0.8063647490820074, "f1": 0.8432100345577882, "support": 4085 }, "LOCATION": { "precision": 0.8689831048772713, "recall": 0.7892870068765834, "f1": 0.827219967378523, "support": 13815 }, "NAME": { "precision": 0.9343608199029886, "recall": 0.8695937090432503, "f1": 0.9008146025041484, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.8873236962745683, "recall": 0.8343187448389761, "f1": 0.8600052773592775, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.866390237905346, "recall": 0.7969462036954024, "f1": 0.8302185820602073, "support": 60550 } }
zhaorui-nb@pythia-2.8b._.lora_ft._.Setting1
Setting1
ft
answer.txt
zhaorui
0.67938
0.608985
0.642259
0.557119
0.515451
0.535476
{ "ID": { "precision": 0.5445768592547896, "recall": 0.40330689308457157, "f1": 0.4634146341463415, "support": 8951 }, "CONTACT": { "precision": 0.26666666666666666, "recall": 0.4, "f1": 0.32, "support": 10 }, "DATE": { "precision": 0.934979544126242, "recall": 0.83526954705652, "f1": 0.882316442605998, "support": 7661 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "AGE": { "precision": 0.9333333333333333, "recall": 0.7, "f1": 0.8, "support": 140 }, "LOCATION": { "precision": 0.5129570976101353, "recall": 0.3614323392168797, "f1": 0.42406569864318017, "support": 9858 }, "NAME": { "precision": 0.707319313068234, "recall": 0.9081476241009315, "f1": 0.795250387196696, "support": 8481 }, "MICRO_AVERAGE": { "precision": 0.6793796084413933, "recall": 0.6089854989886329, "f1": 0.6422594456546233, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.5571189734370573, "recall": 0.5154509147798432, "f1": 0.535475566961406, "support": 35101 } }
zhaorui-nb@pythia-2.8b._.lora_ft._.Setting2
Setting2
ft
answer.txt
zhaorui
0.468348
0.399867
0.431407
0.317785
0.303954
0.310716
{ "ID": { "precision": 0.3203775680177679, "recall": 0.6812278630460449, "f1": 0.43580060422960726, "support": 6776 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 1106 }, "DATE": { "precision": 0.6085518966980922, "recall": 0.4593282072035613, "f1": 0.5235139238560146, "support": 29652 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "AGE": { "precision": 0.3564082278481013, "recall": 0.22502497502497504, "f1": 0.2758726270667483, "support": 8008 }, "LOCATION": { "precision": 0.3736051013436575, "recall": 0.37484291100194217, "f1": 0.3742229826062161, "support": 17506 }, "NAME": { "precision": 0.5655522909552926, "recall": 0.38725463541949867, "f1": 0.45972114639814093, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.46834810144162664, "recall": 0.3998671931509611, "f1": 0.4314069146384367, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.3177850121232731, "recall": 0.3039540845280032, "f1": 0.3107157100362528, "support": 84333 } }
zhaorui-nb@pythia-2.8b._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.910426
0.873212
0.891431
0.907407
0.84512
0.875156
{ "ID": { "precision": 0.9455119768360095, "recall": 0.9223263576839132, "f1": 0.9337752648339507, "support": 7789 }, "CONTACT": { "precision": 0.9186295503211992, "recall": 0.8109640831758034, "f1": 0.8614457831325301, "support": 529 }, "DATE": { "precision": 0.8768380379950628, "recall": 0.8520102205767326, "f1": 0.8642458543810003, "support": 19177 }, "PROFESSION": { "precision": 0.8442906574394463, "recall": 0.6868402533427164, "f1": 0.7574699262708575, "support": 1421 }, "AGE": { "precision": 0.9146810146041506, "recall": 0.8739290085679314, "f1": 0.8938407611417125, "support": 4085 }, "LOCATION": { "precision": 0.9062764728532923, "recall": 0.8518277234889613, "f1": 0.8782089552238806, "support": 13815 }, "NAME": { "precision": 0.9456195619561956, "recall": 0.917940876656473, "f1": 0.9315746693268306, "support": 13734 }, "MICRO_AVERAGE": { "precision": 0.9104261730520878, "recall": 0.8732122213047069, "f1": 0.8914309799789253, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.9074067531436223, "recall": 0.8451197890703614, "f1": 0.8751563931796076, "support": 60550 } }

No dataset card yet

New: Create and edit this dataset card directly on the website!

Contribute a Dataset Card
Downloads last month
208