model name
stringlengths
20
56
dataset
stringclasses
3 values
method
stringclasses
2 values
file name
stringclasses
1 value
submitter
stringclasses
1 value
MICRO precision
float64
0.03
0.96
MICRO recall
float64
0
0.93
MICRO f1
float64
0
0.94
MACRO precision
float64
0.13
0.94
MACRO recall
float64
0
0.92
MACRO f1
float64
0
0.93
detail result
stringlengths
1.16k
1.47k
01-ai@Yi-1.5-6B-Chat
Setting1
icl
answer.txt
zhaorui
0.673741
0.336458
0.448793
0.44781
0.248924
0.319981
{ "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "NAME": { "precision": 0.5352372178719484, "recall": 0.2740242895884919, "f1": 0.3624736801060594, "support": 8481 }, "LOCATION": { "precision": 0.6281911411739035, "recall": 0.2920470683708663, "f1": 0.3987258500103871, "support": 9858 }, "ID": { "precision": 0.7566633761105627, "recall": 0.34253156071947266, "f1": 0.4715834807352149, "support": 8951 }, "DATE": { "precision": 0.7972571428571429, "recall": 0.4552930426837227, "f1": 0.5795945496842805, "support": 7661 }, "AGE": { "precision": 0.41732283464566927, "recall": 0.37857142857142856, "f1": 0.3970037453183521, "support": 140 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "MICRO_AVERAGE": { "precision": 0.673740658337612, "recall": 0.3364576507791801, "f1": 0.4487934638039141, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.4478102446656038, "recall": 0.2489239128477117, "f1": 0.319980518002156, "support": 35101 } }
01-ai@Yi-1.5-6B-Chat
Setting3
icl
answer.txt
zhaorui
0.833257
0.600661
0.698094
0.801466
0.576268
0.670462
{ "PROFESSION": { "precision": 0.566747572815534, "recall": 0.3286418015482055, "f1": 0.4160356347438752, "support": 1421 }, "NAME": { "precision": 0.8167994966442953, "recall": 0.5671326634629387, "f1": 0.669445638160722, "support": 13734 }, "LOCATION": { "precision": 0.8608566193311956, "recall": 0.5441187115454217, "f1": 0.6667849381292412, "support": 13815 }, "ID": { "precision": 0.8891784391715813, "recall": 0.8323276415457697, "f1": 0.859814323607427, "support": 7789 }, "DATE": { "precision": 0.8270029889917621, "recall": 0.5915419512958231, "f1": 0.6897306499665592, "support": 19177 }, "AGE": { "precision": 0.7702618453865336, "recall": 0.6048959608323133, "f1": 0.6776360894007953, "support": 4085 }, "CONTACT": { "precision": 0.8794117647058823, "recall": 0.5652173913043478, "f1": 0.6881472957422324, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.8332569648093842, "recall": 0.6006606110652354, "f1": 0.698094013320793, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.8014655324352548, "recall": 0.5762680173621171, "f1": 0.670461938636052, "support": 60550 } }
EleutherAI@pythia-1b
Setting1
icl
answer.txt
zhaorui
0.538551
0.250135
0.341608
0.36385
0.153981
0.216387
{ "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "NAME": { "precision": 0.3914399584127534, "recall": 0.26636009904492397, "f1": 0.3170081392085321, "support": 8481 }, "LOCATION": { "precision": 0.7067866516685414, "recall": 0.19121525664434977, "f1": 0.30099800399201593, "support": 9858 }, "ID": { "precision": 0.45235707121364094, "recall": 0.20154172718132052, "f1": 0.27884689697812814, "support": 8951 }, "DATE": { "precision": 0.7463672391017173, "recall": 0.368750815820389, "f1": 0.49362222610518963, "support": 7661 }, "AGE": { "precision": 0.25, "recall": 0.05, "f1": 0.08333333333333334, "support": 140 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "MICRO_AVERAGE": { "precision": 0.5385511868981169, "recall": 0.2501353237799493, "f1": 0.3416076569916738, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.36385013148523615, "recall": 0.15398112838442618, "f1": 0.21638729891671737, "support": 35101 } }
EleutherAI@pythia-1b
Setting2
icl
answer.txt
zhaorui
0.229472
0.151139
0.182245
0.246173
0.147191
0.184229
{ "CONTACT": { "precision": 0.11428571428571428, "recall": 0.003616636528028933, "f1": 0.007011393514460999, "support": 1106 }, "AGE": { "precision": 0.4815595363540569, "recall": 0.057067932067932065, "f1": 0.10204309478620074, "support": 8008 }, "ID": { "precision": 0.4749574105621806, "recall": 0.6171782762691853, "f1": 0.5368076503433669, "support": 6776 }, "LOCATION": { "precision": 0.08061344868265828, "recall": 0.07026162458585627, "f1": 0.07508240752044927, "support": 17506 }, "DATE": { "precision": 0.45404242237934217, "recall": 0.14943342776203966, "f1": 0.22486107939407782, "support": 29652 }, "NAME": { "precision": 0.11774916823376248, "recall": 0.13278233918764612, "f1": 0.12481472016355737, "support": 18391 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "MICRO_AVERAGE": { "precision": 0.22947159960392474, "recall": 0.1511389373080526, "f1": 0.18224452737385438, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.2461725286425307, "recall": 0.1471914623429555, "f1": 0.1842288328872183, "support": 84333 } }
EleutherAI@pythia-1b
Setting3
icl
answer.txt
zhaorui
0.861082
0.552898
0.673405
0.814266
0.491727
0.613168
{ "PROFESSION": { "precision": 0.5316091954022989, "recall": 0.1301900070372977, "f1": 0.2091577162238553, "support": 1421 }, "NAME": { "precision": 0.8224498506188647, "recall": 0.5612348915101208, "f1": 0.6671860122911798, "support": 13734 }, "LOCATION": { "precision": 0.8922374429223744, "recall": 0.49504162142598623, "f1": 0.6367783985102421, "support": 13815 }, "ID": { "precision": 0.87248322147651, "recall": 0.7844395942996534, "f1": 0.8261222282314765, "support": 7789 }, "DATE": { "precision": 0.8811303704920727, "recall": 0.5593158471085155, "f1": 0.684274322169059, "support": 19177 }, "AGE": { "precision": 0.8147044212617983, "recall": 0.401468788249694, "f1": 0.5378812725483766, "support": 4085 }, "CONTACT": { "precision": 0.8852459016393442, "recall": 0.5103969754253308, "f1": 0.6474820143884891, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.8610818179479925, "recall": 0.55289843104872, "f1": 0.673405143368635, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.8142657719733233, "recall": 0.4917268178652283, "f1": 0.613167823560949, "support": 60550 } }
EleutherAI@pythia-2.8b
Setting1
icl
answer.txt
zhaorui
0.597884
0.235008
0.337396
0.391379
0.148751
0.215571
{ "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "NAME": { "precision": 0.5504544395245864, "recall": 0.2785048932908855, "f1": 0.36987159411212034, "support": 8481 }, "LOCATION": { "precision": 0.7332621082621082, "recall": 0.20886589571921282, "f1": 0.3251223748618348, "support": 9858 }, "ID": { "precision": 0.5385248149163696, "recall": 0.21941682493576137, "f1": 0.31179552309890457, "support": 8951 }, "DATE": { "precision": 0.6285229202037351, "recall": 0.2416133664012531, "f1": 0.3490477088440505, "support": 7661 }, "AGE": { "precision": 0.28888888888888886, "recall": 0.09285714285714286, "f1": 0.14054054054054055, "support": 140 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "MICRO_AVERAGE": { "precision": 0.5978835978835979, "recall": 0.23500754964246032, "f1": 0.3373962125240296, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.3913790245422412, "recall": 0.1487511604577508, "f1": 0.21557056315777565, "support": 35101 } }
EleutherAI@pythia-2.8b
Setting2
icl
answer.txt
zhaorui
0.258988
0.136246
0.178558
0.249814
0.141902
0.180994
{ "NAME": { "precision": 0.14414632181129644, "recall": 0.09727584144418465, "f1": 0.11616128822803713, "support": 18391 }, "CONTACT": { "precision": 0.017543859649122806, "recall": 0.0009041591320072332, "f1": 0.0017196904557179706, "support": 1106 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "ID": { "precision": 0.5102875706894477, "recall": 0.6258854781582054, "f1": 0.5622058726055544, "support": 6776 }, "LOCATION": { "precision": 0.09340736575766095, "recall": 0.0759168285159374, "f1": 0.08375874456418983, "support": 17506 }, "DATE": { "precision": 0.4265042807186784, "recall": 0.11928369081343586, "f1": 0.18642772433785743, "support": 29652 }, "AGE": { "precision": 0.5568075117370892, "recall": 0.07405094905094906, "f1": 0.13071751350159816, "support": 8008 }, "MICRO_AVERAGE": { "precision": 0.25898794094443817, "recall": 0.136245597808687, "f1": 0.17855755334193227, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.24981384433761367, "recall": 0.14190242101638853, "f1": 0.18099421673431115, "support": 84333 } }
EleutherAI@pythia-2.8b
Setting3
icl
answer.txt
zhaorui
0.883802
0.55635
0.682849
0.818117
0.511118
0.629165
{ "PROFESSION": { "precision": 0.5663157894736842, "recall": 0.1893033075299085, "f1": 0.28375527426160335, "support": 1421 }, "NAME": { "precision": 0.876635299518017, "recall": 0.5562108635503131, "f1": 0.6805951532430506, "support": 13734 }, "LOCATION": { "precision": 0.8899406790357188, "recall": 0.5103872602243937, "f1": 0.6487257337381543, "support": 13815 }, "ID": { "precision": 0.8936535162950258, "recall": 0.8026704326614457, "f1": 0.845722015556307, "support": 7789 }, "DATE": { "precision": 0.908964316797215, "recall": 0.5446107316055692, "f1": 0.6811230312713992, "support": 19177 }, "AGE": { "precision": 0.8117427772600186, "recall": 0.42643818849449205, "f1": 0.5591397849462366, "support": 4085 }, "CONTACT": { "precision": 0.7795698924731183, "recall": 0.5482041587901701, "f1": 0.6437291897891232, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.8838020778675622, "recall": 0.5563501238645747, "f1": 0.6828492084405976, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.8181174672646854, "recall": 0.5111178489794703, "f1": 0.6291654080669206, "support": 60550 } }
Qwen@Qwen1.5-7B-Chat
Setting1
icl
answer.txt
zhaorui
0.027778
0.000114
0.000227
0.126225
0.00008
0.00016
{ "NAME": { "precision": 0.25, "recall": 0.00011791062374719962, "f1": 0.00023571007660577487, "support": 8481 }, "LOCATION": { "precision": 0.007352941176470588, "recall": 0.00010144045445323595, "f1": 0.00020012007204322593, "support": 9858 }, "ID": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 8951 }, "DATE": { "precision": 0.5, "recall": 0.0002610625244746117, "f1": 0.0005218525766470972, "support": 7661 }, "AGE": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 140 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "MICRO_AVERAGE": { "precision": 0.027777777777777776, "recall": 0.00011395686732571722, "f1": 0.0002269825507164137, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.12622549019607843, "recall": 8.006893377917454e-05, "f1": 0.00016003635129571264, "support": 35101 } }
Qwen@Qwen1.5-7B-Chat
Setting3
icl
answer.txt
zhaorui
0.873993
0.025087
0.048773
0.871603
0.018961
0.037114
{ "PROFESSION": { "precision": 0.7368421052631579, "recall": 0.019704433497536946, "f1": 0.03838245373543523, "support": 1421 }, "NAME": { "precision": 0.8594377510040161, "recall": 0.015581767875345856, "f1": 0.030608596152470857, "support": 13734 }, "LOCATION": { "precision": 0.8971014492753623, "recall": 0.044806369887803114, "f1": 0.08534987935194761, "support": 13815 }, "ID": { "precision": 1.0, "recall": 0.00115547567081782, "f1": 0.002308284175429597, "support": 7789 }, "DATE": { "precision": 0.8885400313971743, "recall": 0.029514522605204152, "f1": 0.057131321287978196, "support": 19177 }, "AGE": { "precision": 0.7192982456140351, "recall": 0.0200734394124847, "f1": 0.039056918313884255, "support": 4085 }, "CONTACT": { "precision": 1.0, "recall": 0.001890359168241966, "f1": 0.0037735849056603774, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.8739930955120828, "recall": 0.02508670520231214, "f1": 0.04877343950680709, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.871602797507678, "recall": 0.018960909731062077, "f1": 0.03711442950246772, "support": 60550 } }
meta-llama@Llama-2-7b-hf
Setting1
icl
answer.txt
zhaorui
0.519124
0.282271
0.365697
0.335145
0.172249
0.227549
{ "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "NAME": { "precision": 0.4105092728878422, "recall": 0.32885272963093976, "f1": 0.36517184942716857, "support": 8481 }, "LOCATION": { "precision": 0.6646706586826348, "recall": 0.2702373706634206, "f1": 0.38424924275205535, "support": 9858 }, "ID": { "precision": 0.4086677367576244, "recall": 0.14221874650876998, "f1": 0.21100613293552128, "support": 8951 }, "DATE": { "precision": 0.628837393543276, "recall": 0.414436757603446, "f1": 0.49960660896931547, "support": 7661 }, "AGE": { "precision": 0.23333333333333334, "recall": 0.05, "f1": 0.0823529411764706, "support": 140 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "MICRO_AVERAGE": { "precision": 0.5191239652101016, "recall": 0.2822711603658015, "f1": 0.36569656928783656, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.3351454850292444, "recall": 0.17224937205808236, "f1": 0.22754901252163548, "support": 35101 } }
meta-llama@Llama-2-7b-hf
Setting3
icl
answer.txt
zhaorui
0.852069
0.608621
0.710058
0.799693
0.566826
0.663419
{ "PROFESSION": { "precision": 0.5560747663551402, "recall": 0.2512315270935961, "f1": 0.3460979156568105, "support": 1421 }, "NAME": { "precision": 0.8199010765202212, "recall": 0.6155526430755789, "f1": 0.7031815346225826, "support": 13734 }, "LOCATION": { "precision": 0.8683197463768116, "recall": 0.5551212450235251, "f1": 0.6772640967898618, "support": 13815 }, "ID": { "precision": 0.896279905960448, "recall": 0.8320708691744768, "f1": 0.8629826897470039, "support": 7789 }, "DATE": { "precision": 0.8691779270778289, "recall": 0.6004067372373155, "f1": 0.7102146558105107, "support": 19177 }, "AGE": { "precision": 0.7964327258627375, "recall": 0.5028151774785802, "f1": 0.6164465786314526, "support": 4085 }, "CONTACT": { "precision": 0.7916666666666666, "recall": 0.610586011342155, "f1": 0.6894343649946638, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.8520693641618498, "recall": 0.6086209744013212, "f1": 0.7100578034682082, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.7996932592599792, "recall": 0.5668263157750325, "f1": 0.6634185008068372, "support": 60550 } }
meta-llama@Meta-Llama-3-8B-Instruct
Setting1
icl
answer.txt
zhaorui
0.507072
0.258397
0.342342
0.368275
0.162881
0.225865
{ "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "NAME": { "precision": 0.6020331325301205, "recall": 0.3770781747435444, "f1": 0.46371347785108386, "support": 8481 }, "LOCATION": { "precision": 0.5767504488330341, "recall": 0.26070196794481637, "f1": 0.3590890037725304, "support": 9858 }, "ID": { "precision": 0.4260834345889024, "recall": 0.11752876773544856, "f1": 0.1842381786339755, "support": 8951 }, "DATE": { "precision": 0.40783956244302644, "recall": 0.29199843362485317, "f1": 0.34033165982047775, "support": 7661 }, "AGE": { "precision": 0.5652173913043478, "recall": 0.09285714285714286, "f1": 0.15950920245398773, "support": 140 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "MICRO_AVERAGE": { "precision": 0.5070721753228602, "recall": 0.2583971966610638, "f1": 0.34234166226315393, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.36827485281420447, "recall": 0.16288064098654362, "f1": 0.22586547549898708, "support": 35101 } }
meta-llama@Meta-Llama-3-8B-Instruct
Setting2
icl
answer.txt
zhaorui
0.283601
0.223033
0.249696
0.28171
0.1646
0.20779
{ "AGE": { "precision": 0.6350958744915747, "recall": 0.1364885114885115, "f1": 0.2246890739027649, "support": 8008 }, "LOCATION": { "precision": 0.11882324802678786, "recall": 0.14189420770021707, "f1": 0.12933795006638724, "support": 17506 }, "ID": { "precision": 0.362962962962963, "recall": 0.27479338842975204, "f1": 0.312783470519066, "support": 6776 }, "DATE": { "precision": 0.43561872909698995, "recall": 0.22841629569674896, "f1": 0.29969026548672567, "support": 29652 }, "CONTACT": { "precision": 0.1320754716981132, "recall": 0.012658227848101266, "f1": 0.023102310231023097, "support": 1106 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "NAME": { "precision": 0.2873919497075002, "recall": 0.3579468218150182, "f1": 0.31881250454028137, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.2836012182986038, "recall": 0.2230325021047514, "f1": 0.2496963260429458, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.2817097479977041, "recall": 0.16459963613976414, "f1": 0.20779003832536255, "support": 84333 } }
meta-llama@Meta-Llama-3-8B-Instruct
Setting3
icl
answer.txt
zhaorui
0.837661
0.611693
0.707061
0.815803
0.581267
0.678848
{ "PROFESSION": { "precision": 0.7017310252996005, "recall": 0.37086558761435606, "f1": 0.48526703499079193, "support": 1421 }, "NAME": { "precision": 0.8464136697794556, "recall": 0.6203582350371342, "f1": 0.715966386554622, "support": 13734 }, "LOCATION": { "precision": 0.8607975921745673, "recall": 0.5796597900832429, "f1": 0.6927934942469072, "support": 13815 }, "ID": { "precision": 0.9124541540424175, "recall": 0.7346257542688407, "f1": 0.8139402560455191, "support": 7789 }, "DATE": { "precision": 0.8020868622535402, "recall": 0.617301976325807, "f1": 0.6976661951909477, "support": 19177 }, "AGE": { "precision": 0.7890359168241966, "recall": 0.5108935128518972, "f1": 0.6202080237741455, "support": 4085 }, "CONTACT": { "precision": 0.7980997624703088, "recall": 0.6351606805293005, "f1": 0.7073684210526315, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.8376605753573367, "recall": 0.6116928158546656, "f1": 0.7070614512341792, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.8158027118348695, "recall": 0.5812665052443683, "f1": 0.6788479561070134, "support": 60550 } }
meta-llama@Meta-Llama-3-8B
Setting1
icl
answer.txt
zhaorui
0.540875
0.081052
0.140978
0.292196
0.050096
0.085529
{ "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "NAME": { "precision": 0.3112884834663626, "recall": 0.032189600282985495, "f1": 0.05834580038469758, "support": 8481 }, "LOCATION": { "precision": 0.5538376587520707, "recall": 0.10174477581659566, "f1": 0.1719084754477676, "support": 9858 }, "ID": { "precision": 0.22379032258064516, "recall": 0.012400849067143336, "f1": 0.023499523658304225, "support": 8951 }, "DATE": { "precision": 0.7064531780688986, "recall": 0.1900535178175173, "f1": 0.29952684632791604, "support": 7661 }, "AGE": { "precision": 0.25, "recall": 0.014285714285714285, "f1": 0.027027027027027025, "support": 140 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "MICRO_AVERAGE": { "precision": 0.5408745247148289, "recall": 0.08105182188541638, "f1": 0.1409776764698595, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.29219566326685387, "recall": 0.050096351038565155, "f1": 0.08552893966086868, "support": 35101 } }
meta-llama@Meta-Llama-3-8B
Setting2
icl
answer.txt
zhaorui
0.299782
0.075154
0.12018
0.296362
0.051534
0.087801
{ "AGE": { "precision": 0.5902140672782875, "recall": 0.0241008991008991, "f1": 0.046310737852429514, "support": 8008 }, "LOCATION": { "precision": 0.20618405627198125, "recall": 0.08037244373357706, "f1": 0.11565967940813811, "support": 17506 }, "ID": { "precision": 0.38205499276411, "recall": 0.07792207792207792, "f1": 0.12944349105172837, "support": 6776 }, "DATE": { "precision": 0.4890259386903682, "recall": 0.09092135437744503, "f1": 0.15333428124555668, "support": 29652 }, "CONTACT": { "precision": 0.1935483870967742, "recall": 0.0054249547920434, "f1": 0.010554089709762533, "support": 1106 }, "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 2894 }, "NAME": { "precision": 0.2135070083533909, "recall": 0.08199662878581915, "f1": 0.118488253319714, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.29978242361176805, "recall": 0.07515444725077965, "f1": 0.12018013747333492, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.2963620643507017, "recall": 0.05153405124455167, "f1": 0.08780056532139423, "support": 84333 } }
meta-llama@Meta-Llama-3-8B
Setting3
icl
answer.txt
zhaorui
0.676322
0.07057
0.127804
0.683701
0.073847
0.133297
{ "PROFESSION": { "precision": 0.5702127659574469, "recall": 0.09429978888106967, "f1": 0.16183574879227053, "support": 1421 }, "NAME": { "precision": 0.6215767634854772, "recall": 0.0545361875637105, "f1": 0.10027444942767254, "support": 13734 }, "LOCATION": { "precision": 0.7169500822819528, "recall": 0.09460731089395584, "f1": 0.16715692543803554, "support": 13815 }, "ID": { "precision": 0.6742857142857143, "recall": 0.015149569906278084, "f1": 0.029633350075339022, "support": 7789 }, "DATE": { "precision": 0.6527711984841308, "recall": 0.07185691192574438, "f1": 0.12946260804208942, "support": 19177 }, "AGE": { "precision": 0.7622282608695652, "recall": 0.13733170134638922, "f1": 0.2327317983820784, "support": 4085 }, "CONTACT": { "precision": 0.7878787878787878, "recall": 0.04914933837429111, "f1": 0.09252669039145905, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.6763216207660652, "recall": 0.07056977704376548, "f1": 0.12780403182389186, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.6837005104632965, "recall": 0.0738472584130627, "f1": 0.13329696250895143, "support": 60550 } }
microsoft@Phi-3-mini-4k-instruct
Setting1
icl
answer.txt
zhaorui
0.618346
0.257144
0.363234
0.435172
0.184519
0.259153
{ "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "NAME": { "precision": 0.543661971830986, "recall": 0.22756750383209526, "f1": 0.3208378355913889, "support": 8481 }, "LOCATION": { "precision": 0.6183310533515732, "recall": 0.2751065124771759, "f1": 0.38079191238416177, "support": 9858 }, "ID": { "precision": 0.4675850891410049, "recall": 0.12892414255390458, "f1": 0.20211927489272266, "support": 8951 }, "DATE": { "precision": 0.7869982762866289, "recall": 0.4171779141104294, "f1": 0.5452994369561508, "support": 7661 }, "AGE": { "precision": 0.6296296296296297, "recall": 0.24285714285714285, "f1": 0.35051546391752575, "support": 140 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "MICRO_AVERAGE": { "precision": 0.6183462355278482, "recall": 0.2571436711204809, "f1": 0.3632339329550485, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.43517228860568896, "recall": 0.184519030832964, "f1": 0.2591534411410576, "support": 35101 } }
microsoft@Phi-3-mini-4k-instruct
Setting2
icl
answer.txt
zhaorui
0.504577
0.207202
0.29377
0.400923
0.189641
0.257487
{ "AGE": { "precision": 0.6138115990613476, "recall": 0.22864635364635363, "f1": 0.33318169411336546, "support": 8008 }, "LOCATION": { "precision": 0.3353516534699581, "recall": 0.12338626756540615, "f1": 0.18039837975529294, "support": 17506 }, "ID": { "precision": 0.6379661016949153, "recall": 0.5554899645808736, "f1": 0.5938781950142, "support": 6776 }, "DATE": { "precision": 0.5213417190775681, "recall": 0.2096654525832996, "f1": 0.2990595762079996, "support": 29652 }, "CONTACT": { "precision": 0.11538461538461539, "recall": 0.0081374321880651, "f1": 0.015202702702702702, "support": 1106 }, "PROFESSION": { "precision": 0.06051873198847262, "recall": 0.014512785072563926, "f1": 0.023411371237458196, "support": 2894 }, "NAME": { "precision": 0.5220877458396369, "recall": 0.18764613125985535, "f1": 0.2760689572417103, "support": 18391 }, "MICRO_AVERAGE": { "precision": 0.504576824232624, "recall": 0.2072024000094862, "f1": 0.2937695437275142, "support": 84333 }, "MACRO_AVERAGE": { "precision": 0.4009231666452163, "recall": 0.18964062669948825, "f1": 0.2574872399485678, "support": 84333 } }
microsoft@Phi-3-mini-4k-instruct
Setting3
icl
answer.txt
zhaorui
0.866033
0.590933
0.702511
0.837363
0.554243
0.667003
{ "PROFESSION": { "precision": 0.705511811023622, "recall": 0.31527093596059114, "f1": 0.43579766536964976, "support": 1421 }, "NAME": { "precision": 0.8643748034797191, "recall": 0.6004805591961555, "f1": 0.708657357679914, "support": 13734 }, "LOCATION": { "precision": 0.8865888847070631, "recall": 0.5115454216431415, "f1": 0.6487652620949235, "support": 13815 }, "ID": { "precision": 0.8974398519432449, "recall": 0.7470792142765439, "f1": 0.8153856932670077, "support": 7789 }, "DATE": { "precision": 0.8668916935720575, "recall": 0.6160504771340668, "f1": 0.7202560585276635, "support": 19177 }, "AGE": { "precision": 0.7685767097966728, "recall": 0.5089351285189718, "f1": 0.6123711340206186, "support": 4085 }, "CONTACT": { "precision": 0.8721590909090909, "recall": 0.5803402646502835, "f1": 0.6969353007945518, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.8660325297705489, "recall": 0.5909331131296449, "f1": 0.7025111420886262, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.8373632636330671, "recall": 0.5542431430542506, "f1": 0.6670030331621435, "support": 60550 } }
mistralai@Mistral-7B-Instruct-v0.3
Setting1
icl
answer.txt
zhaorui
0.539066
0.340247
0.417179
0.414726
0.258951
0.318829
{ "PROFESSION": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 0 }, "NAME": { "precision": 0.5695412844036697, "recall": 0.36599457611130765, "f1": 0.4456248654080827, "support": 8481 }, "LOCATION": { "precision": 0.43686459407691175, "recall": 0.3007709474538446, "f1": 0.3562631420847101, "support": 9858 }, "ID": { "precision": 0.40844444444444444, "recall": 0.2053401854541392, "f1": 0.2732882313582633, "support": 8951 }, "DATE": { "precision": 0.7598395108903324, "recall": 0.5191228299177653, "f1": 0.616828227995347, "support": 7661 }, "AGE": { "precision": 0.7283950617283951, "recall": 0.42142857142857143, "f1": 0.5339366515837104, "support": 140 }, "CONTACT": { "precision": 0.0, "recall": 0.0, "f1": 0.0, "support": 10 }, "MICRO_AVERAGE": { "precision": 0.5390656736628301, "recall": 0.3402467166177602, "f1": 0.4171789856084952, "support": 35101 }, "MACRO_AVERAGE": { "precision": 0.4147264136491076, "recall": 0.2589510157665183, "f1": 0.31882863041084586, "support": 35101 } }
mistralai@Mistral-7B-Instruct-v0.3
Setting3
icl
answer.txt
zhaorui
0.840219
0.657688
0.737832
0.80437
0.634122
0.709172
{ "PROFESSION": { "precision": 0.6166306695464363, "recall": 0.40182969739619984, "f1": 0.4865786109927567, "support": 1421 }, "NAME": { "precision": 0.8296005972377752, "recall": 0.6472986748216106, "f1": 0.7271983640081799, "support": 13734 }, "LOCATION": { "precision": 0.8358999509563512, "recall": 0.6168657256605139, "f1": 0.7098708871303623, "support": 13815 }, "ID": { "precision": 0.8709163346613545, "recall": 0.8419566054692516, "f1": 0.8561916574188915, "support": 7789 }, "DATE": { "precision": 0.8623968675709691, "recall": 0.6431662929551024, "f1": 0.7368200961796948, "support": 19177 }, "AGE": { "precision": 0.7862318840579711, "recall": 0.6374541003671971, "f1": 0.7040692172502365, "support": 4085 }, "CONTACT": { "precision": 0.8289156626506025, "recall": 0.6502835538752363, "f1": 0.728813559322034, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.8402185838467381, "recall": 0.6576878612716763, "f1": 0.7378318789024142, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.8043702809544941, "recall": 0.634122092935016, "f1": 0.7091715956399902, "support": 60550 } }
zhaorui-nb@Llama-2-7b-hf._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.939451
0.91787
0.928535
0.927537
0.89448
0.910709
{ "PROFESSION": { "precision": 0.8394160583941606, "recall": 0.8092892329345531, "f1": 0.8240773916159082, "support": 1421 }, "NAME": { "precision": 0.9598892464267006, "recall": 0.9339595165283239, "f1": 0.9467468723474923, "support": 13734 }, "LOCATION": { "precision": 0.9257186081694403, "recall": 0.8858487151646761, "f1": 0.9053449232476419, "support": 13815 }, "ID": { "precision": 0.9340963400830085, "recall": 0.9535242007959943, "f1": 0.943710292249047, "support": 7789 }, "DATE": { "precision": 0.9448827292110874, "recall": 0.9243364446993795, "f1": 0.934496665524422, "support": 19177 }, "AGE": { "precision": 0.9343083787803669, "recall": 0.9226438188494492, "f1": 0.9284394629880527, "support": 4085 }, "CONTACT": { "precision": 0.9544468546637744, "recall": 0.831758034026465, "f1": 0.888888888888889, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.9394513091837252, "recall": 0.9178695293146161, "f1": 0.928535030782982, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.9275368879612198, "recall": 0.8944799947141202, "f1": 0.9107085653591485, "support": 60550 } }
zhaorui-nb@Mistral-7B-Instruct-v0.3._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.954996
0.934682
0.94473
0.943861
0.922748
0.933185
{ "PROFESSION": { "precision": 0.871866295264624, "recall": 0.8810696692470091, "f1": 0.8764438221911095, "support": 1421 }, "NAME": { "precision": 0.9717311852017603, "recall": 0.9485947284112421, "f1": 0.9600235805607752, "support": 13734 }, "LOCATION": { "precision": 0.9425995492111194, "recall": 0.9081433224755701, "f1": 0.9250506912442397, "support": 13815 }, "ID": { "precision": 0.9509366636931311, "recall": 0.9580177172936192, "f1": 0.9544640573036582, "support": 7789 }, "DATE": { "precision": 0.9560117302052786, "recall": 0.9349741878291704, "f1": 0.9453759358852685, "support": 19177 }, "AGE": { "precision": 0.9751058003485188, "recall": 0.9588739290085679, "f1": 0.9669217477166132, "support": 4085 }, "CONTACT": { "precision": 0.9387755102040817, "recall": 0.8695652173913043, "f1": 0.9028459273797841, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.9549964564138909, "recall": 0.9346820809248555, "f1": 0.9447300771208227, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.9438609620183591, "recall": 0.9227483959509263, "f1": 0.9331852805566726, "support": 60550 } }
zhaorui-nb@Phi-3-mini-4k-instruct._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.927589
0.910768
0.919101
0.916656
0.891351
0.903827
{ "PROFESSION": { "precision": 0.8317621464829587, "recall": 0.8071780436312456, "f1": 0.8192857142857142, "support": 1421 }, "NAME": { "precision": 0.9519353148162012, "recall": 0.9258045725935634, "f1": 0.9386881252076336, "support": 13734 }, "LOCATION": { "precision": 0.8967751435723752, "recall": 0.8816503800217155, "f1": 0.8891484469102456, "support": 13815 }, "ID": { "precision": 0.9325942915392457, "recall": 0.9396584927461805, "f1": 0.9361130651659525, "support": 7789 }, "DATE": { "precision": 0.9372863247863248, "recall": 0.9149502007613287, "f1": 0.9259835870913267, "support": 19177 }, "AGE": { "precision": 0.9274193548387096, "recall": 0.9290085679314566, "f1": 0.9282132811544577, "support": 4085 }, "CONTACT": { "precision": 0.9388185654008439, "recall": 0.8412098298676749, "f1": 0.8873379860418744, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.9275886429388415, "recall": 0.9107679603633361, "f1": 0.9191013483108614, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.9166558773480942, "recall": 0.8913514410790235, "f1": 0.9038265818067448, "support": 60550 } }
zhaorui-nb@Qwen1.5-7B-Chat._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.946103
0.932634
0.93932
0.930536
0.914584
0.922491
{ "PROFESSION": { "precision": 0.8327574291637871, "recall": 0.8479943701618579, "f1": 0.8403068340306833, "support": 1421 }, "NAME": { "precision": 0.9603783421464214, "recall": 0.9389107324887142, "f1": 0.9495232134310224, "support": 13734 }, "LOCATION": { "precision": 0.9241783515097559, "recall": 0.9017010495837857, "f1": 0.9128013482816736, "support": 13815 }, "ID": { "precision": 0.9327973872629066, "recall": 0.953395814610348, "f1": 0.942984126984127, "support": 7789 }, "DATE": { "precision": 0.9644054269752593, "recall": 0.9451947645617146, "f1": 0.9547034657115769, "support": 19177 }, "AGE": { "precision": 0.9534032690900219, "recall": 0.956670746634027, "f1": 0.9550342130987292, "support": 4085 }, "CONTACT": { "precision": 0.9458333333333333, "recall": 0.8582230623818525, "f1": 0.8999008919722498, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.9461030692936604, "recall": 0.932634186622626, "f1": 0.9393203479765133, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.9305362199259266, "recall": 0.9145843629174714, "f1": 0.9224913361066956, "support": 60550 } }
zhaorui-nb@Yi-1.5-6B-Chat._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.93232
0.915706
0.923938
0.925996
0.905732
0.915752
{ "PROFESSION": { "precision": 0.8453970484891076, "recall": 0.8465869106263195, "f1": 0.8459915611814347, "support": 1421 }, "NAME": { "precision": 0.9679597808959256, "recall": 0.9392747924858017, "f1": 0.953401574221204, "support": 13734 }, "LOCATION": { "precision": 0.9262214861100655, "recall": 0.8905537459283388, "f1": 0.9080374935419588, "support": 13815 }, "ID": { "precision": 0.9366515837104072, "recall": 0.956733855437155, "f1": 0.9465862178469355, "support": 7789 }, "DATE": { "precision": 0.9129951281508155, "recall": 0.8990457318662982, "f1": 0.905966737605423, "support": 19177 }, "AGE": { "precision": 0.9451589420043679, "recall": 0.9534883720930233, "f1": 0.9493053863027053, "support": 4085 }, "CONTACT": { "precision": 0.9475890985324947, "recall": 0.8544423440453687, "f1": 0.8986083499005963, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.9323199542634225, "recall": 0.9157060280759702, "f1": 0.9239383107956108, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.9259961525561692, "recall": 0.905732250354615, "f1": 0.9157521144964899, "support": 60550 } }
zhaorui-nb@pythia-1b._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.887324
0.834319
0.860005
0.86639
0.796946
0.830219
{ "PROFESSION": { "precision": 0.7373107747105966, "recall": 0.5826882477128783, "f1": 0.6509433962264152, "support": 1421 }, "NAME": { "precision": 0.9343608199029886, "recall": 0.8695937090432503, "f1": 0.9008146025041484, "support": 13734 }, "LOCATION": { "precision": 0.8689831048772713, "recall": 0.7892870068765834, "f1": 0.827219967378523, "support": 13815 }, "ID": { "precision": 0.8819513406156901, "recall": 0.9121838490178457, "f1": 0.8968128747238876, "support": 7789 }, "DATE": { "precision": 0.8795610425240055, "recall": 0.8358971684830787, "f1": 0.8571734131864607, "support": 19177 }, "AGE": { "precision": 0.8835836909871244, "recall": 0.8063647490820074, "f1": 0.8432100345577882, "support": 4085 }, "CONTACT": { "precision": 0.8789808917197452, "recall": 0.782608695652174, "f1": 0.828, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.8873236962745683, "recall": 0.8343187448389761, "f1": 0.8600052773592775, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.866390237905346, "recall": 0.7969462036954026, "f1": 0.8302185820602073, "support": 60550 } }
zhaorui-nb@pythia-2.8b._.lora_ft._.Setting3
Setting3
ft
answer.txt
zhaorui
0.910426
0.873212
0.891431
0.907407
0.84512
0.875156
{ "PROFESSION": { "precision": 0.8442906574394463, "recall": 0.6868402533427164, "f1": 0.7574699262708575, "support": 1421 }, "NAME": { "precision": 0.9456195619561956, "recall": 0.917940876656473, "f1": 0.9315746693268306, "support": 13734 }, "LOCATION": { "precision": 0.9062764728532923, "recall": 0.8518277234889613, "f1": 0.8782089552238806, "support": 13815 }, "ID": { "precision": 0.9455119768360095, "recall": 0.9223263576839132, "f1": 0.9337752648339507, "support": 7789 }, "DATE": { "precision": 0.8768380379950628, "recall": 0.8520102205767326, "f1": 0.8642458543810003, "support": 19177 }, "AGE": { "precision": 0.9146810146041506, "recall": 0.8739290085679314, "f1": 0.8938407611417125, "support": 4085 }, "CONTACT": { "precision": 0.9186295503211992, "recall": 0.8109640831758034, "f1": 0.8614457831325301, "support": 529 }, "MICRO_AVERAGE": { "precision": 0.9104261730520878, "recall": 0.8732122213047069, "f1": 0.8914309799789253, "support": 60550 }, "MACRO_AVERAGE": { "precision": 0.9074067531436223, "recall": 0.8451197890703616, "f1": 0.8751563931796076, "support": 60550 } }

No dataset card yet

New: Create and edit this dataset card directly on the website!

Contribute a Dataset Card
Downloads last month
281