results / results /LaBSE /MTOPDomainClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
1.72 kB
{
"mteb_version": "0.0.2",
"test": {
"de": {
"accuracy": 0.8695407156945618,
"accuracy_stderr": 0.012562623487259608,
"f1": 0.860039412518864,
"f1_stderr": 0.013768287384200205,
"main_score": 0.8695407156945618
},
"en": {
"accuracy": 0.8606475148198814,
"accuracy_stderr": 0.012944113280877105,
"f1": 0.8564334296943723,
"f1_stderr": 0.014296654126998752,
"main_score": 0.8606475148198814
},
"es": {
"accuracy": 0.840660440293529,
"accuracy_stderr": 0.014049032050950141,
"f1": 0.8399106603061199,
"f1_stderr": 0.013444338136579304,
"main_score": 0.840660440293529
},
"evaluation_time": 28.77,
"fr": {
"accuracy": 0.8413717507046664,
"accuracy_stderr": 0.010344831193926792,
"f1": 0.8360072280905483,
"f1_stderr": 0.01076070445993987,
"main_score": 0.8413717507046664
},
"hi": {
"accuracy": 0.8511294370742201,
"accuracy_stderr": 0.015155999994202028,
"f1": 0.844906626111696,
"f1_stderr": 0.01567255233506952,
"main_score": 0.8511294370742201
},
"th": {
"accuracy": 0.8123688969258589,
"accuracy_stderr": 0.011275324784571696,
"f1": 0.8078479567092949,
"f1_stderr": 0.01064163380537252,
"main_score": 0.8123688969258589
}
},
"mteb_dataset_name": "MTOPDomainClassification",
"dataset_revision": "a7e2a951126a26fc8c6a69f835f33a346ba259e3"
}