results / results /distiluse-base-multilingual-cased-v2 /AmazonCounterfactualClassification.json
Muennighoff's picture
add results - distiluse-base-multilingual-cased-v2 (#7)
f022761
raw history blame
No virus
2.54 kB
{
"dataset_revision": "e8379541af4e31359cca9fbcf4b00f2671dba205",
"mteb_dataset_name": "AmazonCounterfactualClassification",
"mteb_version": "1.0.2",
"test": {
"de": {
"accuracy": 0.6813704496788009,
"accuracy_stderr": 0.024612230046488805,
"ap": 0.806706553308835,
"ap_stderr": 0.016448423566835353,
"f1": 0.666468090116337,
"f1_stderr": 0.023789062068309465,
"main_score": 0.6813704496788009
},
"en": {
"accuracy": 0.7180597014925373,
"accuracy_stderr": 0.029021696280176958,
"ap": 0.3370263085714158,
"ap_stderr": 0.02417710728701376,
"f1": 0.6544989712268762,
"f1_stderr": 0.02527413136493158,
"main_score": 0.7180597014925373
},
"en-ext": {
"accuracy": 0.7296101949025486,
"accuracy_stderr": 0.03403669840829533,
"ap": 0.22209148737301962,
"ap_stderr": 0.02260533582227882,
"f1": 0.6042877542046691,
"f1_stderr": 0.02743732409029803,
"main_score": 0.7296101949025486
},
"evaluation_time": 18.51,
"ja": {
"accuracy": 0.6538543897216275,
"accuracy_stderr": 0.023329909403407633,
"ap": 0.16135900323284472,
"ap_stderr": 0.010525937964819992,
"f1": 0.5320720298606364,
"f1_stderr": 0.015164867003193814,
"main_score": 0.6538543897216275
}
},
"validation": {
"de": {
"accuracy": 0.7025751072961374,
"accuracy_stderr": 0.03413568897021146,
"ap": 0.8195432721434586,
"ap_stderr": 0.01855963604439618,
"f1": 0.6865634929258071,
"f1_stderr": 0.03146723543038631,
"main_score": 0.7025751072961374
},
"en": {
"accuracy": 0.7,
"accuracy_stderr": 0.03497126720555219,
"ap": 0.2959161477290448,
"ap_stderr": 0.029220638413959707,
"f1": 0.6269667251442652,
"f1_stderr": 0.029306318263514188,
"main_score": 0.7
},
"en-ext": {
"accuracy": 0.6995495495495495,
"accuracy_stderr": 0.040297626151526884,
"ap": 0.19518020632989744,
"ap_stderr": 0.027017350830205716,
"f1": 0.574144858941869,
"f1_stderr": 0.03304127410974345,
"main_score": 0.6995495495495495
},
"evaluation_time": 21.33,
"ja": {
"accuracy": 0.6564377682403434,
"accuracy_stderr": 0.030821457446519807,
"ap": 0.14325859760128967,
"ap_stderr": 0.013381854116899206,
"f1": 0.5201054934403554,
"f1_stderr": 0.019875056929254228,
"main_score": 0.6564377682403434
}
}
}