leaderboard / results /LASER2 /MTOPDomainClassification.json
Muennighoff's picture
Add external models
2c63c2f
raw
history blame
1.64 kB
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"de": {
"accuracy": 0.7407720484643561,
"accuracy_stderr": 0.02097470751351384,
"f1": 0.7246747959017267,
"f1_stderr": 0.021409255315184686,
"main_score": 0.7407720484643561
},
"en": {
"accuracy": 0.7535795713634291,
"accuracy_stderr": 0.01932929756272018,
"f1": 0.7471023646717295,
"f1_stderr": 0.020603379350450358,
"main_score": 0.7535795713634291
},
"es": {
"accuracy": 0.7346897931954637,
"accuracy_stderr": 0.029275171505061988,
"f1": 0.7201695057514836,
"f1_stderr": 0.02619166045098598,
"main_score": 0.7346897931954637
},
"evaluation_time": 238.88,
"fr": {
"accuracy": 0.7226119636705294,
"accuracy_stderr": 0.017579729530633047,
"f1": 0.7138676065809201,
"f1_stderr": 0.016450702159748233,
"main_score": 0.7226119636705294
},
"hi": {
"accuracy": 0.7295087845105772,
"accuracy_stderr": 0.022688800454798208,
"f1": 0.7163844311220117,
"f1_stderr": 0.019843811193564726,
"main_score": 0.7295087845105772
},
"th": {
"accuracy": 0.7267631103074141,
"accuracy_stderr": 0.014703529100373161,
"f1": 0.7222556760062775,
"f1_stderr": 0.012793360924583051,
"main_score": 0.7267631103074141
}
}
}