leaderboard / results /LASER2 /AmazonCounterfactualClassification.json
Muennighoff's picture
Add external models
2c63c2f
raw
history blame
1.47 kB
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"de": {
"accuracy": 0.6781584582441113,
"accuracy_stderr": 0.060279798858073545,
"ap": 0.8036240553535807,
"ap_stderr": 0.03476499899077643,
"f1": 0.6628493463277175,
"f1_stderr": 0.05804533112556245,
"main_score": 0.6781584582441113
},
"en": {
"accuracy": 0.7683582089552239,
"accuracy_stderr": 0.03737785483516161,
"ap": 0.40076479274021654,
"ap_stderr": 0.05081532982471566,
"f1": 0.70787800776529,
"f1_stderr": 0.03884967003850526,
"main_score": 0.7683582089552239
},
"en-ext": {
"accuracy": 0.7616941529235383,
"accuracy_stderr": 0.05609726317155699,
"ap": 0.23620239901382217,
"ap_stderr": 0.055900376704944924,
"f1": 0.6259005944326002,
"f1_stderr": 0.057023255773266515,
"main_score": 0.7616941529235383
},
"evaluation_time": 162.63,
"ja": {
"accuracy": 0.6875802997858672,
"accuracy_stderr": 0.057291619728276316,
"ap": 0.18157282477398815,
"ap_stderr": 0.0359805625991896,
"f1": 0.5601658468471795,
"f1_stderr": 0.047780178480722454,
"main_score": 0.6875802997858672
}
}
}