results / results /allenai-specter /MTOPDomainClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
1.74 kB
{
"mteb_version": "0.0.2",
"test": {
"de": {
"accuracy": 0.48548887010425473,
"accuracy_stderr": 0.02013341261503217,
"f1": 0.46861917639468426,
"f1_stderr": 0.017163549637196754,
"main_score": 0.48548887010425473
},
"en": {
"accuracy": 0.7453488372093022,
"accuracy_stderr": 0.01426563653023606,
"f1": 0.7371336860122857,
"f1_stderr": 0.013623383638066767,
"main_score": 0.7453488372093022
},
"es": {
"accuracy": 0.5838559039359572,
"accuracy_stderr": 0.018980579864383652,
"f1": 0.5596743388912342,
"f1_stderr": 0.022952517347689834,
"main_score": 0.5838559039359572
},
"evaluation_time": 23.24,
"fr": {
"accuracy": 0.5461321641089885,
"accuracy_stderr": 0.014874676425052273,
"f1": 0.5188649309562938,
"f1_stderr": 0.012917508919138973,
"main_score": 0.5461321641089885
},
"hi": {
"accuracy": 0.21222660451774827,
"accuracy_stderr": 0.017064525516140214,
"f1": 0.17767622443716807,
"f1_stderr": 0.010824181154978959,
"main_score": 0.21222660451774827
},
"th": {
"accuracy": 0.14976491862567812,
"accuracy_stderr": 0.02061892031337306,
"f1": 0.12479671910199683,
"f1_stderr": 0.010693656445237513,
"main_score": 0.14976491862567812
}
},
"mteb_dataset_name": "MTOPDomainClassification",
"dataset_revision": "a7e2a951126a26fc8c6a69f835f33a346ba259e3"
}