results / results /LASER2 /AmazonCounterfactualClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
1.57 kB
{
"mteb_version": "0.0.2",
"test": {
"de": {
"accuracy": 0.6781584582441113,
"accuracy_stderr": 0.060279798858073545,
"ap": 0.8036240553535807,
"ap_stderr": 0.03476499899077643,
"f1": 0.6628493463277175,
"f1_stderr": 0.05804533112556245,
"main_score": 0.6781584582441113
},
"en": {
"accuracy": 0.7683582089552239,
"accuracy_stderr": 0.03737785483516161,
"ap": 0.40076479274021654,
"ap_stderr": 0.05081532982471566,
"f1": 0.70787800776529,
"f1_stderr": 0.03884967003850526,
"main_score": 0.7683582089552239
},
"en-ext": {
"accuracy": 0.7616941529235383,
"accuracy_stderr": 0.05609726317155699,
"ap": 0.23620239901382217,
"ap_stderr": 0.055900376704944924,
"f1": 0.6259005944326002,
"f1_stderr": 0.057023255773266515,
"main_score": 0.7616941529235383
},
"evaluation_time": 162.63,
"ja": {
"accuracy": 0.6875802997858672,
"accuracy_stderr": 0.057291619728276316,
"ap": 0.18157282477398815,
"ap_stderr": 0.0359805625991896,
"f1": 0.5601658468471795,
"f1_stderr": 0.047780178480722454,
"main_score": 0.6875802997858672
}
},
"mteb_dataset_name": "AmazonCounterfactualClassification",
"dataset_revision": "2d8a100785abf0ae21420d2a55b0c56e3e1ea996"
}