results / results /LASER2 /MassiveScenarioClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
13.1 kB
{
"mteb_version": "0.0.2",
"test": {
"af": {
"accuracy": 0.4710154673839947,
"accuracy_stderr": 0.018564271686437515,
"f1": 0.4451973179524046,
"f1_stderr": 0.016582236744477792,
"main_score": 0.4710154673839947
},
"am": {
"accuracy": 0.17696704774714192,
"accuracy_stderr": 0.03488947970284516,
"f1": 0.15821694371719605,
"f1_stderr": 0.018041297388910648,
"main_score": 0.17696704774714192
},
"ar": {
"accuracy": 0.4520847343644922,
"accuracy_stderr": 0.02052556073259087,
"f1": 0.4493556808838758,
"f1_stderr": 0.02219131959628148,
"main_score": 0.4520847343644922
},
"az": {
"accuracy": 0.2821116341627438,
"accuracy_stderr": 0.03302593905078847,
"f1": 0.25909697848350605,
"f1_stderr": 0.025892139767993743,
"main_score": 0.2821116341627438
},
"bn": {
"accuracy": 0.5052454606590451,
"accuracy_stderr": 0.025871484930688898,
"f1": 0.4856524051409231,
"f1_stderr": 0.02999372933773852,
"main_score": 0.5052454606590451
},
"cy": {
"accuracy": 0.2257565568258238,
"accuracy_stderr": 0.029955555426665198,
"f1": 0.22253995596411508,
"f1_stderr": 0.024577736879299292,
"main_score": 0.2257565568258238
},
"da": {
"accuracy": 0.5486550100874243,
"accuracy_stderr": 0.024402881290646793,
"f1": 0.5266171141921359,
"f1_stderr": 0.022103636920251905,
"main_score": 0.5486550100874243
},
"de": {
"accuracy": 0.543375924680565,
"accuracy_stderr": 0.021944881957252572,
"f1": 0.5252613318654367,
"f1_stderr": 0.026364483689257163,
"main_score": 0.543375924680565
},
"el": {
"accuracy": 0.5547074646940148,
"accuracy_stderr": 0.025938196719597618,
"f1": 0.5349766031105144,
"f1_stderr": 0.029002837280894762,
"main_score": 0.5547074646940148
},
"en": {
"accuracy": 0.5592131809011432,
"accuracy_stderr": 0.02460975514543639,
"f1": 0.5360164729184802,
"f1_stderr": 0.023764425252363358,
"main_score": 0.5592131809011432
},
"es": {
"accuracy": 0.5277404169468729,
"accuracy_stderr": 0.01930086885231061,
"f1": 0.5147488045351095,
"f1_stderr": 0.02102482512919075,
"main_score": 0.5277404169468729
},
"evaluation_time": 2170.8,
"fa": {
"accuracy": 0.5250168123739072,
"accuracy_stderr": 0.02656193333119369,
"f1": 0.5107120593131073,
"f1_stderr": 0.027581382894975626,
"main_score": 0.5250168123739072
},
"fi": {
"accuracy": 0.5263281775386685,
"accuracy_stderr": 0.028049993574005946,
"f1": 0.5028105658943992,
"f1_stderr": 0.029331037664081416,
"main_score": 0.5263281775386685
},
"fr": {
"accuracy": 0.5431741761936786,
"accuracy_stderr": 0.02330862085752492,
"f1": 0.5225553974023132,
"f1_stderr": 0.025371329635383216,
"main_score": 0.5431741761936786
},
"he": {
"accuracy": 0.5241425689307331,
"accuracy_stderr": 0.023080787167122134,
"f1": 0.5053900298960137,
"f1_stderr": 0.023963342674475203,
"main_score": 0.5241425689307331
},
"hi": {
"accuracy": 0.47370544720914587,
"accuracy_stderr": 0.02723430216840629,
"f1": 0.4497838830218261,
"f1_stderr": 0.029160940440385055,
"main_score": 0.47370544720914587
},
"hu": {
"accuracy": 0.5343308675184936,
"accuracy_stderr": 0.02571158946620075,
"f1": 0.5127958383061038,
"f1_stderr": 0.02800743447651377,
"main_score": 0.5343308675184936
},
"hy": {
"accuracy": 0.33574310692669806,
"accuracy_stderr": 0.0279984332671053,
"f1": 0.31782884203375084,
"f1_stderr": 0.024137129936083107,
"main_score": 0.33574310692669806
},
"id": {
"accuracy": 0.543813046402152,
"accuracy_stderr": 0.02173056397773423,
"f1": 0.5300232571973252,
"f1_stderr": 0.02368021779116069,
"main_score": 0.543813046402152
},
"is": {
"accuracy": 0.49778076664425014,
"accuracy_stderr": 0.02425237237474775,
"f1": 0.4706894365473266,
"f1_stderr": 0.026960565416528054,
"main_score": 0.49778076664425014
},
"it": {
"accuracy": 0.5483523873570949,
"accuracy_stderr": 0.0236096496608969,
"f1": 0.5335311466130319,
"f1_stderr": 0.02545386322306336,
"main_score": 0.5483523873570949
},
"ja": {
"accuracy": 0.541223940820444,
"accuracy_stderr": 0.02132943677295098,
"f1": 0.5297398634789287,
"f1_stderr": 0.02456627872645516,
"main_score": 0.541223940820444
},
"jv": {
"accuracy": 0.32713517148621385,
"accuracy_stderr": 0.035614449716496376,
"f1": 0.31631803435296824,
"f1_stderr": 0.02907835125753927,
"main_score": 0.32713517148621385
},
"ka": {
"accuracy": 0.2691997310020175,
"accuracy_stderr": 0.014025349472741056,
"f1": 0.2553767051487454,
"f1_stderr": 0.01162136511437043,
"main_score": 0.2691997310020175
},
"km": {
"accuracy": 0.2723268325487559,
"accuracy_stderr": 0.02335380952684497,
"f1": 0.2515485444512877,
"f1_stderr": 0.017257971891773076,
"main_score": 0.2723268325487559
},
"kn": {
"accuracy": 0.10063887020847344,
"accuracy_stderr": 0.024999293350221934,
"f1": 0.064525855332009,
"f1_stderr": 0.013368181011921797,
"main_score": 0.10063887020847344
},
"ko": {
"accuracy": 0.5200739744451917,
"accuracy_stderr": 0.028003601644626674,
"f1": 0.508622045242862,
"f1_stderr": 0.02866048570926957,
"main_score": 0.5200739744451917
},
"lv": {
"accuracy": 0.44821788836583726,
"accuracy_stderr": 0.03405873263075454,
"f1": 0.44404598642640175,
"f1_stderr": 0.034820002597344316,
"main_score": 0.44821788836583726
},
"ml": {
"accuracy": 0.49098856758574316,
"accuracy_stderr": 0.036953827315555375,
"f1": 0.477330044105713,
"f1_stderr": 0.04031567283455406,
"main_score": 0.49098856758574316
},
"mn": {
"accuracy": 0.2150975117686617,
"accuracy_stderr": 0.031789545064276226,
"f1": 0.19956650806636905,
"f1_stderr": 0.02753725989804031,
"main_score": 0.2150975117686617
},
"ms": {
"accuracy": 0.5360121049092131,
"accuracy_stderr": 0.024483088411342932,
"f1": 0.5186987112385978,
"f1_stderr": 0.02535803268828841,
"main_score": 0.5360121049092131
},
"my": {
"accuracy": 0.29717552118359114,
"accuracy_stderr": 0.028042233316740755,
"f1": 0.2811205919315657,
"f1_stderr": 0.027129866000958187,
"main_score": 0.29717552118359114
},
"nb": {
"accuracy": 0.439004707464694,
"accuracy_stderr": 0.027998816890400836,
"f1": 0.4292074186822793,
"f1_stderr": 0.026243622116762927,
"main_score": 0.439004707464694
},
"nl": {
"accuracy": 0.5333221250840617,
"accuracy_stderr": 0.025110033232975306,
"f1": 0.5077836780602125,
"f1_stderr": 0.027053518325785973,
"main_score": 0.5333221250840617
},
"pl": {
"accuracy": 0.5291526563550774,
"accuracy_stderr": 0.02756493168576702,
"f1": 0.5199897798231456,
"f1_stderr": 0.029788904479210038,
"main_score": 0.5291526563550774
},
"pt": {
"accuracy": 0.5341291190316073,
"accuracy_stderr": 0.0218184197703392,
"f1": 0.5199879514297052,
"f1_stderr": 0.02121614576868715,
"main_score": 0.5341291190316073
},
"ro": {
"accuracy": 0.5047747141896436,
"accuracy_stderr": 0.02555648952102798,
"f1": 0.4877286315335815,
"f1_stderr": 0.02639141604347184,
"main_score": 0.5047747141896436
},
"ru": {
"accuracy": 0.5184263618022864,
"accuracy_stderr": 0.030568502165383708,
"f1": 0.5141368540903201,
"f1_stderr": 0.033485640371287616,
"main_score": 0.5184263618022864
},
"sl": {
"accuracy": 0.5128782784129118,
"accuracy_stderr": 0.02677596327757706,
"f1": 0.5069111807663103,
"f1_stderr": 0.028984297990527246,
"main_score": 0.5128782784129118
},
"sq": {
"accuracy": 0.556523201075992,
"accuracy_stderr": 0.022907536560761797,
"f1": 0.5280391253265858,
"f1_stderr": 0.02766946734240689,
"main_score": 0.556523201075992
},
"sv": {
"accuracy": 0.5464357767316745,
"accuracy_stderr": 0.02703916904648807,
"f1": 0.5289646853571585,
"f1_stderr": 0.027215870565783958,
"main_score": 0.5464357767316745
},
"sw": {
"accuracy": 0.4204438466711499,
"accuracy_stderr": 0.02543427134780407,
"f1": 0.39698226248309787,
"f1_stderr": 0.02495774581205971,
"main_score": 0.4204438466711499
},
"ta": {
"accuracy": 0.3672158708809684,
"accuracy_stderr": 0.024620778778865062,
"f1": 0.35611628288260666,
"f1_stderr": 0.02508451846783393,
"main_score": 0.3672158708809684
},
"te": {
"accuracy": 0.4208137188971082,
"accuracy_stderr": 0.02723531925936748,
"f1": 0.39746768684058836,
"f1_stderr": 0.029476453506871073,
"main_score": 0.4208137188971082
},
"th": {
"accuracy": 0.521486213853396,
"accuracy_stderr": 0.02513073701134646,
"f1": 0.5038882871449852,
"f1_stderr": 0.025901347057158762,
"main_score": 0.521486213853396
},
"tl": {
"accuracy": 0.3734028244788164,
"accuracy_stderr": 0.036431060397290715,
"f1": 0.3428567103143635,
"f1_stderr": 0.03562323164275494,
"main_score": 0.3734028244788164
},
"tr": {
"accuracy": 0.5255548083389374,
"accuracy_stderr": 0.030376754244448495,
"f1": 0.5139704021609626,
"f1_stderr": 0.02797264681517368,
"main_score": 0.5255548083389374
},
"ur": {
"accuracy": 0.32599193006052457,
"accuracy_stderr": 0.03133503581066815,
"f1": 0.32576383434482714,
"f1_stderr": 0.02553427963140292,
"main_score": 0.32599193006052457
},
"vi": {
"accuracy": 0.5097175521183592,
"accuracy_stderr": 0.01667763764284436,
"f1": 0.49826499437648436,
"f1_stderr": 0.019814421278178205,
"main_score": 0.5097175521183592
},
"zh-CN": {
"accuracy": 0.5021856086079354,
"accuracy_stderr": 0.01872946746711234,
"f1": 0.49069359630568155,
"f1_stderr": 0.022400099688479058,
"main_score": 0.5021856086079354
},
"zh-TW": {
"accuracy": 0.4232347007397445,
"accuracy_stderr": 0.01456541641125866,
"f1": 0.41073538173900237,
"f1_stderr": 0.015525857226939374,
"main_score": 0.4232347007397445
}
},
"mteb_dataset_name": "MassiveScenarioClassification",
"dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634"
}