results / results /LaBSE /AmazonCounterfactualClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
1.57 kB
{
"mteb_version": "0.0.2",
"test": {
"de": {
"accuracy": 0.7316916488222698,
"accuracy_stderr": 0.023889002602476356,
"ap": 0.8290076420386528,
"ap_stderr": 0.012660161052184985,
"f1": 0.7130862059785664,
"f1_stderr": 0.02181189033885791,
"main_score": 0.7316916488222698
},
"en": {
"accuracy": 0.7592537313432837,
"accuracy_stderr": 0.03223362313231533,
"ap": 0.3878982837837993,
"ap_stderr": 0.027793090363084784,
"f1": 0.6983638841265855,
"f1_stderr": 0.02765498577786686,
"main_score": 0.7592537313432837
},
"en-ext": {
"accuracy": 0.7609445277361319,
"accuracy_stderr": 0.0345209520573379,
"ap": 0.246315943240905,
"ap_stderr": 0.022227483310595396,
"f1": 0.632326892272477,
"f1_stderr": 0.02772938313195089,
"main_score": 0.7609445277361319
},
"evaluation_time": 22.05,
"ja": {
"accuracy": 0.7642398286937901,
"accuracy_stderr": 0.04516323427211123,
"ap": 0.2325244723742558,
"ap_stderr": 0.027583146965977287,
"f1": 0.6269181103749226,
"f1_stderr": 0.03649669919135856,
"main_score": 0.7642398286937901
}
},
"mteb_dataset_name": "AmazonCounterfactualClassification",
"dataset_revision": "2d8a100785abf0ae21420d2a55b0c56e3e1ea996"
}