results / results /all-MiniLM-L12-v2 /MTOPIntentClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
1.73 kB
{
"mteb_version": "0.0.2",
"test": {
"de": {
"accuracy": 0.43412228796844177,
"accuracy_stderr": 0.02833655838161886,
"f1": 0.2596122949091921,
"f1_stderr": 0.014286308934501543,
"main_score": 0.43412228796844177
},
"en": {
"accuracy": 0.6283629730962152,
"accuracy_stderr": 0.016307209926331702,
"f1": 0.44241027031016733,
"f1_stderr": 0.006746871737170047,
"main_score": 0.6283629730962152
},
"es": {
"accuracy": 0.418812541694463,
"accuracy_stderr": 0.02630111577120924,
"f1": 0.2793481154758236,
"f1_stderr": 0.010247097409877589,
"main_score": 0.418812541694463
},
"evaluation_time": 56.0,
"fr": {
"accuracy": 0.3893830253679925,
"accuracy_stderr": 0.029926486678172083,
"f1": 0.25820783392796054,
"f1_stderr": 0.01148521561504446,
"main_score": 0.3893830253679925
},
"hi": {
"accuracy": 0.177518823951237,
"accuracy_stderr": 0.01595167162980909,
"f1": 0.11681226129204576,
"f1_stderr": 0.009112908309892032,
"main_score": 0.177518823951237
},
"th": {
"accuracy": 0.05631103074141049,
"accuracy_stderr": 0.019331962547093504,
"f1": 0.02046543337618445,
"f1_stderr": 0.005206301634773074,
"main_score": 0.05631103074141049
}
},
"mteb_dataset_name": "MTOPIntentClassification",
"dataset_revision": "6299947a7777084cc2d4b64235bf7190381ce755"
}