results / results /all-MiniLM-L12-v2 /MassiveScenarioClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
13.2 kB
{
"mteb_version": "0.0.2",
"test": {
"af": {
"accuracy": 0.45709482178883654,
"accuracy_stderr": 0.018048817496500454,
"f1": 0.43612288503911695,
"f1_stderr": 0.017698725320965992,
"main_score": 0.45709482178883654
},
"am": {
"accuracy": 0.07407531943510423,
"accuracy_stderr": 0.023948304558496326,
"f1": 0.038875366763112984,
"f1_stderr": 0.009066174049756005,
"main_score": 0.07407531943510423
},
"ar": {
"accuracy": 0.2761936785474109,
"accuracy_stderr": 0.015581869221682493,
"f1": 0.25329931057423755,
"f1_stderr": 0.01357445255552927,
"main_score": 0.2761936785474109
},
"az": {
"accuracy": 0.3957969065232011,
"accuracy_stderr": 0.02333492089030947,
"f1": 0.37392584326173106,
"f1_stderr": 0.023506622512889427,
"main_score": 0.3957969065232011
},
"bn": {
"accuracy": 0.189778076664425,
"accuracy_stderr": 0.016878283718274097,
"f1": 0.17620144033142865,
"f1_stderr": 0.01610687205590955,
"main_score": 0.189778076664425
},
"cy": {
"accuracy": 0.4140215198386012,
"accuracy_stderr": 0.01289898093845506,
"f1": 0.3806372767307641,
"f1_stderr": 0.013201375755239927,
"main_score": 0.4140215198386012
},
"da": {
"accuracy": 0.4946872898453262,
"accuracy_stderr": 0.01782860103448663,
"f1": 0.4690610579296604,
"f1_stderr": 0.015080906559115863,
"main_score": 0.4946872898453262
},
"de": {
"accuracy": 0.5207128446536652,
"accuracy_stderr": 0.017987951609640066,
"f1": 0.49469135337789893,
"f1_stderr": 0.017339717049259035,
"main_score": 0.5207128446536652
},
"el": {
"accuracy": 0.35507733691997306,
"accuracy_stderr": 0.01686005344002444,
"f1": 0.3166524248503607,
"f1_stderr": 0.014614074977917998,
"main_score": 0.35507733691997306
},
"en": {
"accuracy": 0.7457632817753866,
"accuracy_stderr": 0.013458680724318188,
"f1": 0.739563845494346,
"f1_stderr": 0.010931466876626106,
"main_score": 0.7457632817753866
},
"es": {
"accuracy": 0.5074310692669806,
"accuracy_stderr": 0.01888109682647732,
"f1": 0.4820093905893342,
"f1_stderr": 0.014690821725764849,
"main_score": 0.5074310692669806
},
"evaluation_time": 82.3,
"fa": {
"accuracy": 0.29004707464694013,
"accuracy_stderr": 0.02475119490960584,
"f1": 0.2578452995069975,
"f1_stderr": 0.02113837814231098,
"main_score": 0.29004707464694013
},
"fi": {
"accuracy": 0.4580026899798252,
"accuracy_stderr": 0.015791070857301488,
"f1": 0.41794594657649914,
"f1_stderr": 0.01516154956850759,
"main_score": 0.4580026899798252
},
"fr": {
"accuracy": 0.537626092804304,
"accuracy_stderr": 0.021141625365393178,
"f1": 0.5170423088264189,
"f1_stderr": 0.018171889464145795,
"main_score": 0.537626092804304
},
"he": {
"accuracy": 0.25682582380632146,
"accuracy_stderr": 0.020788580216964773,
"f1": 0.23167903144579022,
"f1_stderr": 0.015985513918557532,
"main_score": 0.25682582380632146
},
"hi": {
"accuracy": 0.23022864828513784,
"accuracy_stderr": 0.013503633415071546,
"f1": 0.21459384490296488,
"f1_stderr": 0.013440774718525712,
"main_score": 0.23022864828513784
},
"hu": {
"accuracy": 0.4408540685944856,
"accuracy_stderr": 0.01866609665673465,
"f1": 0.40993402601455725,
"f1_stderr": 0.017917589482051886,
"main_score": 0.4408540685944856
},
"hy": {
"accuracy": 0.1483187626092804,
"accuracy_stderr": 0.021541604871525153,
"f1": 0.12970096153546534,
"f1_stderr": 0.016982875837872592,
"main_score": 0.1483187626092804
},
"id": {
"accuracy": 0.4434767989240081,
"accuracy_stderr": 0.018446139475051748,
"f1": 0.42215392783764394,
"f1_stderr": 0.015947895961860165,
"main_score": 0.4434767989240081
},
"is": {
"accuracy": 0.4308002689979825,
"accuracy_stderr": 0.013928765146957903,
"f1": 0.4001184510787284,
"f1_stderr": 0.012941761446772119,
"main_score": 0.4308002689979825
},
"it": {
"accuracy": 0.5171486213853396,
"accuracy_stderr": 0.02112051753671044,
"f1": 0.48492328079605845,
"f1_stderr": 0.01821163378184312,
"main_score": 0.5171486213853396
},
"ja": {
"accuracy": 0.36748486886348347,
"accuracy_stderr": 0.020140325763923984,
"f1": 0.3546615048175051,
"f1_stderr": 0.019468938849941567,
"main_score": 0.36748486886348347
},
"jv": {
"accuracy": 0.44566240753194347,
"accuracy_stderr": 0.025514603987973852,
"f1": 0.4090741041356553,
"f1_stderr": 0.020308960810010962,
"main_score": 0.44566240753194347
},
"ka": {
"accuracy": 0.1483523873570948,
"accuracy_stderr": 0.023833187094751775,
"f1": 0.12296442463483719,
"f1_stderr": 0.016503702339108008,
"main_score": 0.1483523873570948
},
"km": {
"accuracy": 0.09754539340954943,
"accuracy_stderr": 0.028493655739369343,
"f1": 0.04250353307219123,
"f1_stderr": 0.010367202768171841,
"main_score": 0.09754539340954943
},
"kn": {
"accuracy": 0.08315400134498993,
"accuracy_stderr": 0.02278460649017292,
"f1": 0.053881185487834035,
"f1_stderr": 0.010921057599737978,
"main_score": 0.08315400134498993
},
"ko": {
"accuracy": 0.25719569603227976,
"accuracy_stderr": 0.021994517888979877,
"f1": 0.23205230051654158,
"f1_stderr": 0.020727476907684134,
"main_score": 0.25719569603227976
},
"lv": {
"accuracy": 0.42747141896435775,
"accuracy_stderr": 0.018535167591560514,
"f1": 0.4061202626722604,
"f1_stderr": 0.016992093452132893,
"main_score": 0.42747141896435775
},
"ml": {
"accuracy": 0.07252858103564222,
"accuracy_stderr": 0.017301591034439637,
"f1": 0.03448646759763805,
"f1_stderr": 0.011727962256104762,
"main_score": 0.07252858103564222
},
"mn": {
"accuracy": 0.2903496973772697,
"accuracy_stderr": 0.020135721978509356,
"f1": 0.2663495414552696,
"f1_stderr": 0.019335659913804884,
"main_score": 0.2903496973772697
},
"ms": {
"accuracy": 0.4464694014794889,
"accuracy_stderr": 0.01974938048794482,
"f1": 0.40192107405242156,
"f1_stderr": 0.0160791902882972,
"main_score": 0.4464694014794889
},
"my": {
"accuracy": 0.10067249495628783,
"accuracy_stderr": 0.017903528470304245,
"f1": 0.05764723442216905,
"f1_stderr": 0.010830234547614895,
"main_score": 0.10067249495628783
},
"nb": {
"accuracy": 0.47357094821788837,
"accuracy_stderr": 0.019302596852505693,
"f1": 0.4459691441744327,
"f1_stderr": 0.01636202093822695,
"main_score": 0.47357094821788837
},
"nl": {
"accuracy": 0.4915265635507734,
"accuracy_stderr": 0.005419324712281324,
"f1": 0.4615820727175712,
"f1_stderr": 0.00693720002474299,
"main_score": 0.4915265635507734
},
"pl": {
"accuracy": 0.4472091459314056,
"accuracy_stderr": 0.02249995672219696,
"f1": 0.4288213581673335,
"f1_stderr": 0.02189256978537112,
"main_score": 0.4472091459314056
},
"pt": {
"accuracy": 0.5299932750504373,
"accuracy_stderr": 0.01896793453085118,
"f1": 0.5101176637403334,
"f1_stderr": 0.013034194283010447,
"main_score": 0.5299932750504373
},
"ro": {
"accuracy": 0.4997310020174849,
"accuracy_stderr": 0.0138848631595807,
"f1": 0.4722673671303613,
"f1_stderr": 0.012167988761980044,
"main_score": 0.4997310020174849
},
"ru": {
"accuracy": 0.2874915938130464,
"accuracy_stderr": 0.010995858074624144,
"f1": 0.2725888866616121,
"f1_stderr": 0.012069361329188753,
"main_score": 0.2874915938130464
},
"sl": {
"accuracy": 0.42259583053127103,
"accuracy_stderr": 0.01840954175149463,
"f1": 0.41261927156734785,
"f1_stderr": 0.016966097470546233,
"main_score": 0.42259583053127103
},
"sq": {
"accuracy": 0.4913584398117014,
"accuracy_stderr": 0.015678690251241585,
"f1": 0.4708320600523055,
"f1_stderr": 0.016268134765516237,
"main_score": 0.4913584398117014
},
"sv": {
"accuracy": 0.46825823806321454,
"accuracy_stderr": 0.010398911956107197,
"f1": 0.43404234700847566,
"f1_stderr": 0.009400109000443425,
"main_score": 0.46825823806321454
},
"sw": {
"accuracy": 0.43184263618022867,
"accuracy_stderr": 0.024439918435836254,
"f1": 0.39815480841992085,
"f1_stderr": 0.021029046751848066,
"main_score": 0.43184263618022867
},
"ta": {
"accuracy": 0.19381304640215197,
"accuracy_stderr": 0.015172326809261625,
"f1": 0.16699966519668613,
"f1_stderr": 0.017128365697659357,
"main_score": 0.19381304640215197
},
"te": {
"accuracy": 0.07737054472091459,
"accuracy_stderr": 0.015156110302861722,
"f1": 0.038594459698077364,
"f1_stderr": 0.006009416828271714,
"main_score": 0.07737054472091459
},
"th": {
"accuracy": 0.1831540013449899,
"accuracy_stderr": 0.04682276015015863,
"f1": 0.13491482848005418,
"f1_stderr": 0.02920464544455521,
"main_score": 0.1831540013449899
},
"tl": {
"accuracy": 0.4830531271015467,
"accuracy_stderr": 0.022829792374316166,
"f1": 0.45487908214131806,
"f1_stderr": 0.01516958044926312,
"main_score": 0.4830531271015467
},
"tr": {
"accuracy": 0.4179219905850706,
"accuracy_stderr": 0.027495279292404914,
"f1": 0.4124552662271258,
"f1_stderr": 0.023346238690036228,
"main_score": 0.4179219905850706
},
"ur": {
"accuracy": 0.24462004034969737,
"accuracy_stderr": 0.014999992085632253,
"f1": 0.22270575649981797,
"f1_stderr": 0.017842030262391655,
"main_score": 0.24462004034969737
},
"vi": {
"accuracy": 0.4094149293880296,
"accuracy_stderr": 0.016350895830444415,
"f1": 0.3908540872012287,
"f1_stderr": 0.016348413456632776,
"main_score": 0.4094149293880296
},
"zh-CN": {
"accuracy": 0.3317753866845998,
"accuracy_stderr": 0.0211510354976044,
"f1": 0.3164001182395128,
"f1_stderr": 0.02027603430930915,
"main_score": 0.3317753866845998
},
"zh-TW": {
"accuracy": 0.3115669132481506,
"accuracy_stderr": 0.023912468193932,
"f1": 0.3089137619124565,
"f1_stderr": 0.026321599190456384,
"main_score": 0.3115669132481506
}
},
"mteb_dataset_name": "MassiveScenarioClassification",
"dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634"
}