results / results /all-MiniLM-L12-v2 /AmazonCounterfactualClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
1.57 kB
{
"mteb_version": "0.0.2",
"test": {
"de": {
"accuracy": 0.5709850107066381,
"accuracy_stderr": 0.032418490921355646,
"ap": 0.7338224986285773,
"ap_stderr": 0.014519500829907068,
"f1": 0.5518332251622343,
"f1_stderr": 0.0287737138699505,
"main_score": 0.5709850107066381
},
"en": {
"accuracy": 0.6528358208955224,
"accuracy_stderr": 0.047227427834394854,
"ap": 0.2802247873560022,
"ap_stderr": 0.019305982032960956,
"f1": 0.5909977445939425,
"f1_stderr": 0.03411747847368747,
"main_score": 0.6528358208955224
},
"en-ext": {
"accuracy": 0.6724137931034483,
"accuracy_stderr": 0.05483855378474212,
"ap": 0.1793337056203553,
"ap_stderr": 0.021671691977739136,
"f1": 0.5520071109085884,
"f1_stderr": 0.0394159690305972,
"main_score": 0.6724137931034483
},
"evaluation_time": 11.57,
"ja": {
"accuracy": 0.5991434689507494,
"accuracy_stderr": 0.04765885308177663,
"ap": 0.13610920446878455,
"ap_stderr": 0.009570182009962704,
"f1": 0.4870464699796398,
"f1_stderr": 0.02863549580159552,
"main_score": 0.5991434689507494
}
},
"mteb_dataset_name": "AmazonCounterfactualClassification",
"dataset_revision": "2d8a100785abf0ae21420d2a55b0c56e3e1ea996"
}