results / results /all-MiniLM-L12-v2 /MassiveIntentClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
13.2 kB
{
"mteb_version": "0.0.2",
"test": {
"af": {
"accuracy": 0.3894082044384667,
"accuracy_stderr": 0.014843298255043774,
"f1": 0.36222023448848595,
"f1_stderr": 0.011974727113244064,
"main_score": 0.3894082044384667
},
"am": {
"accuracy": 0.02451244115669133,
"accuracy_stderr": 0.008717114542098295,
"f1": 0.011859369824825732,
"f1_stderr": 0.005132531160890538,
"main_score": 0.02451244115669133
},
"ar": {
"accuracy": 0.20938130464021523,
"accuracy_stderr": 0.014781165304556301,
"f1": 0.17984223607695032,
"f1_stderr": 0.008919559305642803,
"main_score": 0.20938130464021523
},
"az": {
"accuracy": 0.3425016812373907,
"accuracy_stderr": 0.010309371807210924,
"f1": 0.33954933856088615,
"f1_stderr": 0.00928202987170274,
"main_score": 0.3425016812373907
},
"bn": {
"accuracy": 0.1366509751176866,
"accuracy_stderr": 0.010569301805422461,
"f1": 0.12091606412618154,
"f1_stderr": 0.007824784073012972,
"main_score": 0.1366509751176866
},
"cy": {
"accuracy": 0.357128446536651,
"accuracy_stderr": 0.015111284892881,
"f1": 0.33620710516405233,
"f1_stderr": 0.014410375777490462,
"main_score": 0.357128446536651
},
"da": {
"accuracy": 0.4442501681237391,
"accuracy_stderr": 0.010917226036449498,
"f1": 0.41207701667671814,
"f1_stderr": 0.009968732302602255,
"main_score": 0.4442501681237391
},
"de": {
"accuracy": 0.44166106254203097,
"accuracy_stderr": 0.01748503052017763,
"f1": 0.40374580049861,
"f1_stderr": 0.009346902426791745,
"main_score": 0.44166106254203097
},
"el": {
"accuracy": 0.28698722259583054,
"accuracy_stderr": 0.008972516821256162,
"f1": 0.24131330009557753,
"f1_stderr": 0.008101240183854662,
"main_score": 0.28698722259583054
},
"en": {
"accuracy": 0.6714862138533961,
"accuracy_stderr": 0.017584167880886657,
"f1": 0.6529267177342918,
"f1_stderr": 0.013490884180998986,
"main_score": 0.6714862138533961
},
"es": {
"accuracy": 0.40907868190988567,
"accuracy_stderr": 0.014247690880039129,
"f1": 0.3970580551316215,
"f1_stderr": 0.012167821019417905,
"main_score": 0.40907868190988567
},
"evaluation_time": 386.45,
"fa": {
"accuracy": 0.23517148621385342,
"accuracy_stderr": 0.01352049395347175,
"f1": 0.20450403227141453,
"f1_stderr": 0.009099316185835598,
"main_score": 0.23517148621385342
},
"fi": {
"accuracy": 0.3927370544720915,
"accuracy_stderr": 0.01329262451350613,
"f1": 0.36445576637033883,
"f1_stderr": 0.01006281511095352,
"main_score": 0.3927370544720915
},
"fr": {
"accuracy": 0.4481506388702085,
"accuracy_stderr": 0.023517524577099,
"f1": 0.42613350883262935,
"f1_stderr": 0.015593267127668911,
"main_score": 0.4481506388702085
},
"he": {
"accuracy": 0.23648285137861466,
"accuracy_stderr": 0.016919664380503557,
"f1": 0.19948568467541378,
"f1_stderr": 0.012338106143404236,
"main_score": 0.23648285137861466
},
"hi": {
"accuracy": 0.1797579018157364,
"accuracy_stderr": 0.012403111643199464,
"f1": 0.16067396613569118,
"f1_stderr": 0.011239703284886375,
"main_score": 0.1797579018157364
},
"hu": {
"accuracy": 0.37995965030262274,
"accuracy_stderr": 0.01732376246061841,
"f1": 0.3526841971527663,
"f1_stderr": 0.015984533302175252,
"main_score": 0.37995965030262274
},
"hy": {
"accuracy": 0.08691997310020175,
"accuracy_stderr": 0.009552328665343775,
"f1": 0.07237344584036491,
"f1_stderr": 0.005801372390335636,
"main_score": 0.08691997310020175
},
"id": {
"accuracy": 0.3966039004707465,
"accuracy_stderr": 0.011316363792406083,
"f1": 0.38775085127634473,
"f1_stderr": 0.00859858741223599,
"main_score": 0.3966039004707465
},
"is": {
"accuracy": 0.3514122394082045,
"accuracy_stderr": 0.01424233341901167,
"f1": 0.3361281534585094,
"f1_stderr": 0.008134173704746043,
"main_score": 0.3514122394082045
},
"it": {
"accuracy": 0.4317081371889711,
"accuracy_stderr": 0.01646682553173883,
"f1": 0.4180158989235553,
"f1_stderr": 0.010779951123474605,
"main_score": 0.4317081371889711
},
"ja": {
"accuracy": 0.30944855413584393,
"accuracy_stderr": 0.020806655958485746,
"f1": 0.2778570205803673,
"f1_stderr": 0.01501767142701864,
"main_score": 0.30944855413584393
},
"jv": {
"accuracy": 0.36694687289845324,
"accuracy_stderr": 0.011431474703272793,
"f1": 0.3421258336813279,
"f1_stderr": 0.009092247976934024,
"main_score": 0.36694687289845324
},
"ka": {
"accuracy": 0.09169468728984533,
"accuracy_stderr": 0.011049247668196564,
"f1": 0.06904570655222886,
"f1_stderr": 0.007360549048080932,
"main_score": 0.09169468728984533
},
"km": {
"accuracy": 0.04986550100874244,
"accuracy_stderr": 0.01530871660281756,
"f1": 0.017161855654054863,
"f1_stderr": 0.005183985270733025,
"main_score": 0.04986550100874244
},
"kn": {
"accuracy": 0.030766644250168125,
"accuracy_stderr": 0.00991474026911328,
"f1": 0.01957772420146887,
"f1_stderr": 0.005276356889400735,
"main_score": 0.030766644250168125
},
"ko": {
"accuracy": 0.19966375252185609,
"accuracy_stderr": 0.0152122164518042,
"f1": 0.16545470254940456,
"f1_stderr": 0.009707242309864661,
"main_score": 0.19966375252185609
},
"lv": {
"accuracy": 0.38611297915265635,
"accuracy_stderr": 0.015837256220578348,
"f1": 0.37447802930149615,
"f1_stderr": 0.013684507219135222,
"main_score": 0.38611297915265635
},
"ml": {
"accuracy": 0.028513786146603902,
"accuracy_stderr": 0.009955197036232122,
"f1": 0.008642500845287098,
"f1_stderr": 0.001571902305200399,
"main_score": 0.028513786146603902
},
"mn": {
"accuracy": 0.2324815063887021,
"accuracy_stderr": 0.01635089583044442,
"f1": 0.22162182623622098,
"f1_stderr": 0.011269592423222553,
"main_score": 0.2324815063887021
},
"ms": {
"accuracy": 0.36213853396099527,
"accuracy_stderr": 0.009828554623507792,
"f1": 0.3362879988681565,
"f1_stderr": 0.00871973199916617,
"main_score": 0.36213853396099527
},
"my": {
"accuracy": 0.04381304640215199,
"accuracy_stderr": 0.01415442136453105,
"f1": 0.014197071894925672,
"f1_stderr": 0.00228976745865627,
"main_score": 0.04381304640215199
},
"nb": {
"accuracy": 0.4191324815063887,
"accuracy_stderr": 0.011112712084506206,
"f1": 0.3889056261628219,
"f1_stderr": 0.008342940017293013,
"main_score": 0.4191324815063887
},
"nl": {
"accuracy": 0.4185272360457296,
"accuracy_stderr": 0.01342975121724947,
"f1": 0.3879874724974811,
"f1_stderr": 0.013060997840714501,
"main_score": 0.4185272360457296
},
"pl": {
"accuracy": 0.3763281775386685,
"accuracy_stderr": 0.011234691964217235,
"f1": 0.345071421221765,
"f1_stderr": 0.008292412301367411,
"main_score": 0.3763281775386685
},
"pt": {
"accuracy": 0.4512104909213182,
"accuracy_stderr": 0.017631972626488985,
"f1": 0.4332946794837761,
"f1_stderr": 0.00818873016542884,
"main_score": 0.4512104909213182
},
"ro": {
"accuracy": 0.4171486213853396,
"accuracy_stderr": 0.012473104620689485,
"f1": 0.39500043810450014,
"f1_stderr": 0.008235779839511964,
"main_score": 0.4171486213853396
},
"ru": {
"accuracy": 0.263315400134499,
"accuracy_stderr": 0.012102527225848628,
"f1": 0.24213252556865478,
"f1_stderr": 0.011173007158836493,
"main_score": 0.263315400134499
},
"sl": {
"accuracy": 0.38520511096166776,
"accuracy_stderr": 0.00969326307826617,
"f1": 0.37079001320228344,
"f1_stderr": 0.006833736740435372,
"main_score": 0.38520511096166776
},
"sq": {
"accuracy": 0.4162071284465367,
"accuracy_stderr": 0.016218007493715093,
"f1": 0.3989522566274897,
"f1_stderr": 0.0121607059333764,
"main_score": 0.4162071284465367
},
"sv": {
"accuracy": 0.4041694687289846,
"accuracy_stderr": 0.01553012042264413,
"f1": 0.3843895125974106,
"f1_stderr": 0.009949921832355983,
"main_score": 0.4041694687289846
},
"sw": {
"accuracy": 0.3527908540685945,
"accuracy_stderr": 0.012264324277117331,
"f1": 0.33807909846971695,
"f1_stderr": 0.013608148649758832,
"main_score": 0.3527908540685945
},
"ta": {
"accuracy": 0.13096839273705446,
"accuracy_stderr": 0.0057167015886462795,
"f1": 0.10242672209632939,
"f1_stderr": 0.00550341266553369,
"main_score": 0.13096839273705446
},
"te": {
"accuracy": 0.025622057834566236,
"accuracy_stderr": 0.006001428787444009,
"f1": 0.010615210594147622,
"f1_stderr": 0.003218268654456316,
"main_score": 0.025622057834566236
},
"th": {
"accuracy": 0.10537995965030263,
"accuracy_stderr": 0.02135910043884725,
"f1": 0.061708791409071,
"f1_stderr": 0.009225288789079054,
"main_score": 0.10537995965030263
},
"tl": {
"accuracy": 0.3856086079354405,
"accuracy_stderr": 0.016372041294273,
"f1": 0.3501569008015146,
"f1_stderr": 0.010495520140104133,
"main_score": 0.3856086079354405
},
"tr": {
"accuracy": 0.3589778076664425,
"accuracy_stderr": 0.010103327522324162,
"f1": 0.3390602650751521,
"f1_stderr": 0.011139196270459638,
"main_score": 0.3589778076664425
},
"ur": {
"accuracy": 0.161768661735037,
"accuracy_stderr": 0.006479114606130416,
"f1": 0.1571392525925565,
"f1_stderr": 0.008317312650251166,
"main_score": 0.161768661735037
},
"vi": {
"accuracy": 0.3737726967047747,
"accuracy_stderr": 0.012185155815943203,
"f1": 0.35652051460172907,
"f1_stderr": 0.013863954475315505,
"main_score": 0.3737726967047747
},
"zh-CN": {
"accuracy": 0.2374243443174176,
"accuracy_stderr": 0.018366901003595087,
"f1": 0.19255371431159424,
"f1_stderr": 0.006108510379511656,
"main_score": 0.2374243443174176
},
"zh-TW": {
"accuracy": 0.2238735709482179,
"accuracy_stderr": 0.01456599857848224,
"f1": 0.19094067620374383,
"f1_stderr": 0.007052425401653985,
"main_score": 0.2238735709482179
}
},
"mteb_dataset_name": "MassiveIntentClassification",
"dataset_revision": "072a486a144adf7f4479a4a0dddb2152e161e1ea"
}