results / results /allenai-specter /MassiveScenarioClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
13.2 kB
{
"mteb_version": "0.0.2",
"test": {
"af": {
"accuracy": 0.3616677874915938,
"accuracy_stderr": 0.018068226246498054,
"f1": 0.3417650470658954,
"f1_stderr": 0.017257110301075764,
"main_score": 0.3616677874915938
},
"am": {
"accuracy": 0.07636180228648284,
"accuracy_stderr": 0.018728984532616004,
"f1": 0.047516165940066066,
"f1_stderr": 0.009408955341125607,
"main_score": 0.07636180228648284
},
"ar": {
"accuracy": 0.15262273032952253,
"accuracy_stderr": 0.020978959547060798,
"f1": 0.1426853087100533,
"f1_stderr": 0.01940788284281469,
"main_score": 0.15262273032952253
},
"az": {
"accuracy": 0.3072965702757229,
"accuracy_stderr": 0.015412793638780951,
"f1": 0.29919488400150557,
"f1_stderr": 0.01346071420496045,
"main_score": 0.3072965702757229
},
"bn": {
"accuracy": 0.07151983860121049,
"accuracy_stderr": 0.02509904429679187,
"f1": 0.04809320089379981,
"f1_stderr": 0.010696094119637595,
"main_score": 0.07151983860121049
},
"cy": {
"accuracy": 0.3473100201748487,
"accuracy_stderr": 0.0124488334882261,
"f1": 0.331867550478127,
"f1_stderr": 0.013551391662080905,
"main_score": 0.3473100201748487
},
"da": {
"accuracy": 0.39926025554808336,
"accuracy_stderr": 0.02069067101933648,
"f1": 0.37591819078203625,
"f1_stderr": 0.020704513417575194,
"main_score": 0.39926025554808336
},
"de": {
"accuracy": 0.3862474781439139,
"accuracy_stderr": 0.023861752002115603,
"f1": 0.3526469918582946,
"f1_stderr": 0.02039825688863895,
"main_score": 0.3862474781439139
},
"el": {
"accuracy": 0.27182246133154,
"accuracy_stderr": 0.01897592020190411,
"f1": 0.2598214298735641,
"f1_stderr": 0.014119162065115914,
"main_score": 0.27182246133154
},
"en": {
"accuracy": 0.5857767316745124,
"accuracy_stderr": 0.01286773893116942,
"f1": 0.5663640403777431,
"f1_stderr": 0.0147265418231733,
"main_score": 0.5857767316745124
},
"es": {
"accuracy": 0.3943510423671822,
"accuracy_stderr": 0.026100169510780077,
"f1": 0.3901241380882482,
"f1_stderr": 0.018522527401672432,
"main_score": 0.3943510423671822
},
"evaluation_time": 186.97,
"fa": {
"accuracy": 0.2143241425689307,
"accuracy_stderr": 0.014871264622835135,
"f1": 0.2099610997880673,
"f1_stderr": 0.012491078034913781,
"main_score": 0.2143241425689307
},
"fi": {
"accuracy": 0.3320780094149294,
"accuracy_stderr": 0.02885551965873153,
"f1": 0.31307566481777355,
"f1_stderr": 0.025312330710409562,
"main_score": 0.3320780094149294
},
"fr": {
"accuracy": 0.40262273032952256,
"accuracy_stderr": 0.01568672869249043,
"f1": 0.3876326193862667,
"f1_stderr": 0.015785929187868388,
"main_score": 0.40262273032952256
},
"he": {
"accuracy": 0.07420981842636179,
"accuracy_stderr": 0.013885066729353843,
"f1": 0.03543499363221243,
"f1_stderr": 0.005280679394744658,
"main_score": 0.07420981842636179
},
"hi": {
"accuracy": 0.08063214525891056,
"accuracy_stderr": 0.018953026896352625,
"f1": 0.06001076470061022,
"f1_stderr": 0.009529589012369473,
"main_score": 0.08063214525891056
},
"hu": {
"accuracy": 0.3454270342972428,
"accuracy_stderr": 0.01796206836180007,
"f1": 0.32796386445110365,
"f1_stderr": 0.015061264904963238,
"main_score": 0.3454270342972428
},
"hy": {
"accuracy": 0.08611297915265635,
"accuracy_stderr": 0.018030171711355375,
"f1": 0.05038663829641861,
"f1_stderr": 0.011988575512211939,
"main_score": 0.08611297915265635
},
"id": {
"accuracy": 0.4004371217215871,
"accuracy_stderr": 0.01404568947341625,
"f1": 0.39222859255536213,
"f1_stderr": 0.012932632408944067,
"main_score": 0.4004371217215871
},
"is": {
"accuracy": 0.33570948217888363,
"accuracy_stderr": 0.014430619848830059,
"f1": 0.325474136261405,
"f1_stderr": 0.013144531560167396,
"main_score": 0.33570948217888363
},
"it": {
"accuracy": 0.40100874243443174,
"accuracy_stderr": 0.01878970644552799,
"f1": 0.3823623281618702,
"f1_stderr": 0.015883408640954753,
"main_score": 0.40100874243443174
},
"ja": {
"accuracy": 0.09956287827841291,
"accuracy_stderr": 0.013860127574999924,
"f1": 0.09304292622888562,
"f1_stderr": 0.011460136277742252,
"main_score": 0.09956287827841291
},
"jv": {
"accuracy": 0.3610625420309348,
"accuracy_stderr": 0.014757242559829069,
"f1": 0.3493978326696315,
"f1_stderr": 0.01585905206678222,
"main_score": 0.3610625420309348
},
"ka": {
"accuracy": 0.07131809011432413,
"accuracy_stderr": 0.01234394672650292,
"f1": 0.04119261350608749,
"f1_stderr": 0.008993972411199183,
"main_score": 0.07131809011432413
},
"km": {
"accuracy": 0.09663752521856087,
"accuracy_stderr": 0.023249367400766562,
"f1": 0.05055803034723423,
"f1_stderr": 0.011029344506127763,
"main_score": 0.09663752521856087
},
"kn": {
"accuracy": 0.07548755884330868,
"accuracy_stderr": 0.018621705919494837,
"f1": 0.054485487303894374,
"f1_stderr": 0.005541466169385536,
"main_score": 0.07548755884330868
},
"ko": {
"accuracy": 0.0726630800268998,
"accuracy_stderr": 0.01109968153663616,
"f1": 0.03948830479676819,
"f1_stderr": 0.005240717980572163,
"main_score": 0.0726630800268998
},
"lv": {
"accuracy": 0.370275722932078,
"accuracy_stderr": 0.016970873580212313,
"f1": 0.351587064256129,
"f1_stderr": 0.018686686188143207,
"main_score": 0.370275722932078
},
"ml": {
"accuracy": 0.07219233355749832,
"accuracy_stderr": 0.011548571262959315,
"f1": 0.03887784366381798,
"f1_stderr": 0.009048144111216743,
"main_score": 0.07219233355749832
},
"mn": {
"accuracy": 0.21526563550773367,
"accuracy_stderr": 0.022268013040144294,
"f1": 0.19531062113665904,
"f1_stderr": 0.021572352716136563,
"main_score": 0.21526563550773367
},
"ms": {
"accuracy": 0.37565568258238063,
"accuracy_stderr": 0.021238919642934104,
"f1": 0.3560943883512832,
"f1_stderr": 0.015695752600401573,
"main_score": 0.37565568258238063
},
"my": {
"accuracy": 0.09542703429724278,
"accuracy_stderr": 0.018085488767463322,
"f1": 0.0595393044954551,
"f1_stderr": 0.012788824974885593,
"main_score": 0.09542703429724278
},
"nb": {
"accuracy": 0.3570948217888366,
"accuracy_stderr": 0.016045030652912105,
"f1": 0.3402203847333534,
"f1_stderr": 0.010820413897629666,
"main_score": 0.3570948217888366
},
"nl": {
"accuracy": 0.34623402824478816,
"accuracy_stderr": 0.026409900982791338,
"f1": 0.3197765164167639,
"f1_stderr": 0.023098396668669627,
"main_score": 0.34623402824478816
},
"pl": {
"accuracy": 0.36869535978480156,
"accuracy_stderr": 0.024435847097761653,
"f1": 0.3570498707813095,
"f1_stderr": 0.02240089425484702,
"main_score": 0.36869535978480156
},
"pt": {
"accuracy": 0.44677202420981843,
"accuracy_stderr": 0.01506377443756566,
"f1": 0.43990166439939593,
"f1_stderr": 0.01660327884131786,
"main_score": 0.44677202420981843
},
"ro": {
"accuracy": 0.37289845326160054,
"accuracy_stderr": 0.022847515103374835,
"f1": 0.3578476929219868,
"f1_stderr": 0.019989642690407098,
"main_score": 0.37289845326160054
},
"ru": {
"accuracy": 0.2816408876933423,
"accuracy_stderr": 0.017238841224284256,
"f1": 0.2591777358227373,
"f1_stderr": 0.014951513344973794,
"main_score": 0.2816408876933423
},
"sl": {
"accuracy": 0.3794552790854069,
"accuracy_stderr": 0.023477322890169343,
"f1": 0.3676171037248198,
"f1_stderr": 0.022236854136642965,
"main_score": 0.3794552790854069
},
"sq": {
"accuracy": 0.37817753866846,
"accuracy_stderr": 0.015667869698676434,
"f1": 0.3706743538173397,
"f1_stderr": 0.017958498535626675,
"main_score": 0.37817753866846
},
"sv": {
"accuracy": 0.3534633490248823,
"accuracy_stderr": 0.02258011350139619,
"f1": 0.33678982626655907,
"f1_stderr": 0.01827472512218342,
"main_score": 0.3534633490248823
},
"sw": {
"accuracy": 0.3537323470073974,
"accuracy_stderr": 0.016946339163892606,
"f1": 0.3380376928569214,
"f1_stderr": 0.015633122397655224,
"main_score": 0.3537323470073974
},
"ta": {
"accuracy": 0.07192333557498319,
"accuracy_stderr": 0.016345536020268028,
"f1": 0.03870761789135318,
"f1_stderr": 0.008341493489724499,
"main_score": 0.07192333557498319
},
"te": {
"accuracy": 0.07286482851378616,
"accuracy_stderr": 0.01578433912367531,
"f1": 0.044949336989442985,
"f1_stderr": 0.006902500528600326,
"main_score": 0.07286482851378616
},
"th": {
"accuracy": 0.09468728984532615,
"accuracy_stderr": 0.022032909466266598,
"f1": 0.058668212488023064,
"f1_stderr": 0.008938477100192285,
"main_score": 0.09468728984532615
},
"tl": {
"accuracy": 0.37306657700067253,
"accuracy_stderr": 0.01286492694445391,
"f1": 0.3593425532758004,
"f1_stderr": 0.013683101609325041,
"main_score": 0.37306657700067253
},
"tr": {
"accuracy": 0.34566240753194355,
"accuracy_stderr": 0.023825690546448355,
"f1": 0.3481543683924369,
"f1_stderr": 0.020971311611925437,
"main_score": 0.34566240753194355
},
"ur": {
"accuracy": 0.1617014122394082,
"accuracy_stderr": 0.013337162017545522,
"f1": 0.15917967348590184,
"f1_stderr": 0.014468472494611709,
"main_score": 0.1617014122394082
},
"vi": {
"accuracy": 0.35907868190988573,
"accuracy_stderr": 0.01201062624794675,
"f1": 0.3548153704983825,
"f1_stderr": 0.013422987079991441,
"main_score": 0.35907868190988573
},
"zh-CN": {
"accuracy": 0.09189643577673168,
"accuracy_stderr": 0.0212268055493796,
"f1": 0.04634244579644921,
"f1_stderr": 0.010739599200391267,
"main_score": 0.09189643577673168
},
"zh-TW": {
"accuracy": 0.10188298587760591,
"accuracy_stderr": 0.024370868347336965,
"f1": 0.07355880040355851,
"f1_stderr": 0.013223708716503572,
"main_score": 0.10188298587760591
}
},
"mteb_dataset_name": "MassiveScenarioClassification",
"dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634"
}