results / results /allenai-specter /MassiveIntentClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
13.3 kB
{
"mteb_version": "0.0.2",
"test": {
"af": {
"accuracy": 0.3367518493611298,
"accuracy_stderr": 0.013755152391621206,
"f1": 0.3256347110149821,
"f1_stderr": 0.013267931051662647,
"main_score": 0.3367518493611298
},
"am": {
"accuracy": 0.029354404841963687,
"accuracy_stderr": 0.006124967252782422,
"f1": 0.015516151587728002,
"f1_stderr": 0.00430008455803558,
"main_score": 0.029354404841963687
},
"ar": {
"accuracy": 0.10040349697377268,
"accuracy_stderr": 0.010133497116137489,
"f1": 0.09966201768299347,
"f1_stderr": 0.0078237587480606,
"main_score": 0.10040349697377268
},
"az": {
"accuracy": 0.3074310692669805,
"accuracy_stderr": 0.014820124287586537,
"f1": 0.2996426618771265,
"f1_stderr": 0.010786525763878787,
"main_score": 0.3074310692669805
},
"bn": {
"accuracy": 0.030195023537323472,
"accuracy_stderr": 0.01001859503801087,
"f1": 0.017584038790851013,
"f1_stderr": 0.004424081986394375,
"main_score": 0.030195023537323472
},
"cy": {
"accuracy": 0.3393745796906523,
"accuracy_stderr": 0.008149476347589347,
"f1": 0.32852777305129444,
"f1_stderr": 0.010283881376816566,
"main_score": 0.3393745796906523
},
"da": {
"accuracy": 0.3847007397444519,
"accuracy_stderr": 0.012269531794184338,
"f1": 0.366554211281156,
"f1_stderr": 0.00932154910555262,
"main_score": 0.3847007397444519
},
"de": {
"accuracy": 0.3605917955615333,
"accuracy_stderr": 0.011099426881439143,
"f1": 0.3420910025963574,
"f1_stderr": 0.007766143452686158,
"main_score": 0.3605917955615333
},
"el": {
"accuracy": 0.27696704774714187,
"accuracy_stderr": 0.005043824253406088,
"f1": 0.259620726986025,
"f1_stderr": 0.006051714706768571,
"main_score": 0.27696704774714187
},
"en": {
"accuracy": 0.5172831203765972,
"accuracy_stderr": 0.011122830754020357,
"f1": 0.4993785341352476,
"f1_stderr": 0.007596756288620821,
"main_score": 0.5172831203765972
},
"es": {
"accuracy": 0.3560188298587761,
"accuracy_stderr": 0.011572776543687024,
"f1": 0.3637392773500823,
"f1_stderr": 0.00814895132990359,
"main_score": 0.3560188298587761
},
"evaluation_time": 688.77,
"fa": {
"accuracy": 0.1796570275722932,
"accuracy_stderr": 0.011197646857114439,
"f1": 0.18068628241440574,
"f1_stderr": 0.007390532604321481,
"main_score": 0.1796570275722932
},
"fi": {
"accuracy": 0.35531271015467386,
"accuracy_stderr": 0.013808312916041407,
"f1": 0.34473939228945166,
"f1_stderr": 0.009936845677630222,
"main_score": 0.35531271015467386
},
"fr": {
"accuracy": 0.38412911903160724,
"accuracy_stderr": 0.022095425347630645,
"f1": 0.37459616241573346,
"f1_stderr": 0.01462097077950036,
"main_score": 0.38412911903160724
},
"he": {
"accuracy": 0.026866173503698722,
"accuracy_stderr": 0.01199461257488419,
"f1": 0.010108314696675414,
"f1_stderr": 0.0035743900949102934,
"main_score": 0.026866173503698722
},
"hi": {
"accuracy": 0.03429724277067922,
"accuracy_stderr": 0.010605396845570632,
"f1": 0.02183248841945027,
"f1_stderr": 0.003954216714109758,
"main_score": 0.03429724277067922
},
"hu": {
"accuracy": 0.34045057162071285,
"accuracy_stderr": 0.011173590214064052,
"f1": 0.33654907990984334,
"f1_stderr": 0.011178831772973413,
"main_score": 0.34045057162071285
},
"hy": {
"accuracy": 0.031102891728312036,
"accuracy_stderr": 0.0071316972451289685,
"f1": 0.01532077582122988,
"f1_stderr": 0.0026828898543155588,
"main_score": 0.031102891728312036
},
"id": {
"accuracy": 0.4002017484868864,
"accuracy_stderr": 0.015581578978148894,
"f1": 0.39671034130433275,
"f1_stderr": 0.010931424584520311,
"main_score": 0.4002017484868864
},
"is": {
"accuracy": 0.32632817753866844,
"accuracy_stderr": 0.009293152785542522,
"f1": 0.32784940821552594,
"f1_stderr": 0.007686520633444197,
"main_score": 0.32632817753866844
},
"it": {
"accuracy": 0.39283792871553463,
"accuracy_stderr": 0.008809491417168991,
"f1": 0.39505270762715716,
"f1_stderr": 0.007307214035879664,
"main_score": 0.39283792871553463
},
"ja": {
"accuracy": 0.04952925353059852,
"accuracy_stderr": 0.006584014122255295,
"f1": 0.048854768637327246,
"f1_stderr": 0.005080198104274519,
"main_score": 0.04952925353059852
},
"jv": {
"accuracy": 0.3494956287827841,
"accuracy_stderr": 0.010852201781280284,
"f1": 0.34331872289995574,
"f1_stderr": 0.010765987924014307,
"main_score": 0.3494956287827841
},
"ka": {
"accuracy": 0.02565568258238063,
"accuracy_stderr": 0.005325145270924312,
"f1": 0.014810261961097063,
"f1_stderr": 0.0026449108391531292,
"main_score": 0.02565568258238063
},
"km": {
"accuracy": 0.04734364492266308,
"accuracy_stderr": 0.00916026014722238,
"f1": 0.024299128442126724,
"f1_stderr": 0.00508565218816095,
"main_score": 0.04734364492266308
},
"kn": {
"accuracy": 0.03537323470073974,
"accuracy_stderr": 0.010524922054771004,
"f1": 0.02346480124575722,
"f1_stderr": 0.003922406095014941,
"main_score": 0.03537323470073974
},
"ko": {
"accuracy": 0.026798924008069942,
"accuracy_stderr": 0.008090997519516967,
"f1": 0.010555787599351715,
"f1_stderr": 0.0026174631706755917,
"main_score": 0.026798924008069942
},
"lv": {
"accuracy": 0.37905178211163415,
"accuracy_stderr": 0.013680810364302746,
"f1": 0.37663185251418285,
"f1_stderr": 0.013677903931250028,
"main_score": 0.37905178211163415
},
"ml": {
"accuracy": 0.028816408876933423,
"accuracy_stderr": 0.006222032218911547,
"f1": 0.011315907666991129,
"f1_stderr": 0.0027761111485689965,
"main_score": 0.028816408876933423
},
"mn": {
"accuracy": 0.16940147948890383,
"accuracy_stderr": 0.012530622178538668,
"f1": 0.14457096980569056,
"f1_stderr": 0.012731798019966142,
"main_score": 0.16940147948890383
},
"ms": {
"accuracy": 0.36597175521183595,
"accuracy_stderr": 0.011938632324427113,
"f1": 0.35362667420930455,
"f1_stderr": 0.008621407755947259,
"main_score": 0.36597175521183595
},
"my": {
"accuracy": 0.039576328177538665,
"accuracy_stderr": 0.011228903856089666,
"f1": 0.018601447906463835,
"f1_stderr": 0.004622684602333679,
"main_score": 0.039576328177538665
},
"nb": {
"accuracy": 0.3474781439139206,
"accuracy_stderr": 0.009899847565859776,
"f1": 0.3371866980622218,
"f1_stderr": 0.005036243595271499,
"main_score": 0.3474781439139206
},
"nl": {
"accuracy": 0.3394754539340955,
"accuracy_stderr": 0.012698585341678191,
"f1": 0.32344204483828237,
"f1_stderr": 0.009421436585037605,
"main_score": 0.3394754539340955
},
"pl": {
"accuracy": 0.3576664425016812,
"accuracy_stderr": 0.010733670488067544,
"f1": 0.3414455757429223,
"f1_stderr": 0.008650023623298406,
"main_score": 0.3576664425016812
},
"pt": {
"accuracy": 0.43053127101546745,
"accuracy_stderr": 0.010151333083676883,
"f1": 0.4174757587130514,
"f1_stderr": 0.0064632417143647815,
"main_score": 0.43053127101546745
},
"ro": {
"accuracy": 0.36197041022192333,
"accuracy_stderr": 0.011136088036617614,
"f1": 0.3510479382155576,
"f1_stderr": 0.010094822109980577,
"main_score": 0.36197041022192333
},
"ru": {
"accuracy": 0.25302622730329516,
"accuracy_stderr": 0.013605563554932873,
"f1": 0.23068955736220237,
"f1_stderr": 0.010290324854672479,
"main_score": 0.25302622730329516
},
"sl": {
"accuracy": 0.3589778076664425,
"accuracy_stderr": 0.00998400244653016,
"f1": 0.351875678911339,
"f1_stderr": 0.007736940798776665,
"main_score": 0.3589778076664425
},
"sq": {
"accuracy": 0.3660053799596503,
"accuracy_stderr": 0.0161827286494057,
"f1": 0.3653379734709672,
"f1_stderr": 0.0156425435153939,
"main_score": 0.3660053799596503
},
"sv": {
"accuracy": 0.36002017484868865,
"accuracy_stderr": 0.012013073515902487,
"f1": 0.34852989956492647,
"f1_stderr": 0.0076010399230062985,
"main_score": 0.36002017484868865
},
"sw": {
"accuracy": 0.3480833893745797,
"accuracy_stderr": 0.013447208876879613,
"f1": 0.33642396110617223,
"f1_stderr": 0.009899370738138338,
"main_score": 0.3480833893745797
},
"ta": {
"accuracy": 0.031069266980497647,
"accuracy_stderr": 0.00827223468813898,
"f1": 0.011310127144109963,
"f1_stderr": 0.003242224828948265,
"main_score": 0.031069266980497647
},
"te": {
"accuracy": 0.02528581035642232,
"accuracy_stderr": 0.006100459702045246,
"f1": 0.0149316398615809,
"f1_stderr": 0.0043622412759533645,
"main_score": 0.02528581035642232
},
"th": {
"accuracy": 0.04381304640215199,
"accuracy_stderr": 0.008706212791118988,
"f1": 0.024976624324703062,
"f1_stderr": 0.0032869011907868525,
"main_score": 0.04381304640215199
},
"tl": {
"accuracy": 0.3550773369199731,
"accuracy_stderr": 0.016947006329466927,
"f1": 0.34211359786929857,
"f1_stderr": 0.014411091489050901,
"main_score": 0.3550773369199731
},
"tr": {
"accuracy": 0.3202084734364492,
"accuracy_stderr": 0.013395189672096358,
"f1": 0.3183811859978853,
"f1_stderr": 0.011816211605741362,
"main_score": 0.3202084734364492
},
"ur": {
"accuracy": 0.09606590450571621,
"accuracy_stderr": 0.008199269100300145,
"f1": 0.10254546263580425,
"f1_stderr": 0.0058777211193856245,
"main_score": 0.09606590450571621
},
"vi": {
"accuracy": 0.3707128446536651,
"accuracy_stderr": 0.006757235879744444,
"f1": 0.3633069220797723,
"f1_stderr": 0.0064151300600404255,
"main_score": 0.3707128446536651
},
"zh-CN": {
"accuracy": 0.028143913920645598,
"accuracy_stderr": 0.009861915551097524,
"f1": 0.009821602573032814,
"f1_stderr": 0.002347452111073345,
"main_score": 0.028143913920645598
},
"zh-TW": {
"accuracy": 0.04788164088769334,
"accuracy_stderr": 0.00841183401445532,
"f1": 0.038720728965930026,
"f1_stderr": 0.004685980335123813,
"main_score": 0.04788164088769334
}
},
"mteb_dataset_name": "MassiveIntentClassification",
"dataset_revision": "072a486a144adf7f4479a4a0dddb2152e161e1ea"
}