results / results /allenai-specter /MTOPIntentClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
1.74 kB
{
"mteb_version": "0.0.2",
"test": {
"de": {
"accuracy": 0.35548041701887856,
"accuracy_stderr": 0.019526118970053952,
"f1": 0.2280418192080155,
"f1_stderr": 0.008718034684922146,
"main_score": 0.35548041701887856
},
"en": {
"accuracy": 0.500547195622435,
"accuracy_stderr": 0.016130351302176212,
"f1": 0.3337056622067417,
"f1_stderr": 0.012366685485631924,
"main_score": 0.500547195622435
},
"es": {
"accuracy": 0.3672114743162108,
"accuracy_stderr": 0.0145488800341903,
"f1": 0.24579372633871294,
"f1_stderr": 0.00796102163509892,
"main_score": 0.3672114743162108
},
"evaluation_time": 135.03,
"fr": {
"accuracy": 0.3470717193861572,
"accuracy_stderr": 0.011580905479656305,
"f1": 0.23982315580880628,
"f1_stderr": 0.007589073472572122,
"main_score": 0.3470717193861572
},
"hi": {
"accuracy": 0.04435281462889925,
"accuracy_stderr": 0.010665384008535709,
"f1": 0.021148387249285677,
"f1_stderr": 0.0028750733359774196,
"main_score": 0.04435281462889925
},
"th": {
"accuracy": 0.04672694394213382,
"accuracy_stderr": 0.011437723805651768,
"f1": 0.01908226384939537,
"f1_stderr": 0.004417045296338254,
"main_score": 0.04672694394213382
}
},
"mteb_dataset_name": "MTOPIntentClassification",
"dataset_revision": "6299947a7777084cc2d4b64235bf7190381ce755"
}