results / results /multilingual-e5-small /MassiveScenarioClassification.json
Muennighoff's picture
Update
5fd5d7c
raw history blame
No virus
1.54 kB
{
"dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634",
"mteb_dataset_name": "MassiveScenarioClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.623369199731002,
"accuracy_stderr": 0.02133592922394949,
"f1": 0.6033919131144819,
"f1_stderr": 0.022980437896054005,
"main_score": 0.623369199731002
},
"evaluation_time": 46.02,
"nb": {
"accuracy": 0.5990248823133827,
"accuracy_stderr": 0.028594581227804304,
"f1": 0.580635279158187,
"f1_stderr": 0.026449069332460447,
"main_score": 0.5990248823133827
},
"sv": {
"accuracy": 0.655413584398117,
"accuracy_stderr": 0.023055791032083404,
"f1": 0.6396523071925566,
"f1_stderr": 0.022890034079768754,
"main_score": 0.655413584398117
}
},
"validation": {
"da": {
"accuracy": 0.6162813575996066,
"accuracy_stderr": 0.02254250909972233,
"f1": 0.6015663820443542,
"f1_stderr": 0.021793610783376285,
"main_score": 0.6162813575996066
},
"evaluation_time": 37.95,
"nb": {
"accuracy": 0.5890309886866699,
"accuracy_stderr": 0.026072863610808437,
"f1": 0.5721595083894724,
"f1_stderr": 0.022139707329639016,
"main_score": 0.5890309886866699
},
"sv": {
"accuracy": 0.6483521888834234,
"accuracy_stderr": 0.022057524920591747,
"f1": 0.6325768031169654,
"f1_stderr": 0.02207399794234202,
"main_score": 0.6483521888834234
}
}
}