results / results /SGPT-125M-weightedmean-msmarco-specb-bitfit /AmazonCounterfactualClassification.json
Muennighoff's picture
Update results
4951470
{
"test": {
"de": {
"accuracy": 0.5688436830835117,
"accuracy_stderr": 0.029093897773991517,
"ap": 0.7267279104379771,
"ap_stderr": 0.010217580709985108,
"f1": 0.5444984024378641,
"f1_stderr": 0.02389292222743725,
"main_score": 0.7267279104379771
},
"en": {
"accuracy": 0.6123880597014926,
"accuracy_stderr": 0.031379155023926214,
"ap": 0.25854431650388643,
"ap_stderr": 0.016981404964613887,
"f1": 0.557518627628186,
"f1_stderr": 0.024487163893960967,
"main_score": 0.25854431650388643
},
"en-ext": {
"accuracy": 0.5827586206896551,
"accuracy_stderr": 0.048062843086995724,
"ap": 0.14067357642500386,
"ap_stderr": 0.013693676529546861,
"f1": 0.4817231851869133,
"f1_stderr": 0.030208933707168584,
"main_score": 0.14067357642500386
},
"evaluation_time": 52.9,
"ja": {
"accuracy": 0.5464668094218414,
"accuracy_stderr": 0.04850756984311426,
"ap": 0.11776694555054965,
"ap_stderr": 0.006424556406403228,
"f1": 0.44526622834078766,
"f1_stderr": 0.029061809459351582,
"main_score": 0.11776694555054965
}
},
"validation": {
"de": {
"accuracy": 0.5918454935622317,
"accuracy_stderr": 0.024350314081552426,
"ap": 0.7372017677238578,
"ap_stderr": 0.010683816932118138,
"f1": 0.5642882005866563,
"f1_stderr": 0.02007457192846663,
"main_score": 0.7372017677238578
},
"en": {
"accuracy": 0.6113432835820894,
"accuracy_stderr": 0.038501677223608914,
"ap": 0.2227104848944061,
"ap_stderr": 0.01397442904182502,
"f1": 0.5393378654542855,
"f1_stderr": 0.027447873327993155,
"main_score": 0.2227104848944061
},
"en-ext": {
"accuracy": 0.5848348348348348,
"accuracy_stderr": 0.043238290090035825,
"ap": 0.13520924912279636,
"ap_stderr": 0.011096096922529254,
"f1": 0.4794399892152111,
"f1_stderr": 0.02584441348504658,
"main_score": 0.13520924912279636
},
"evaluation_time": 43.78,
"ja": {
"accuracy": 0.5523605150214592,
"accuracy_stderr": 0.04902106769175921,
"ap": 0.11348392156508444,
"ap_stderr": 0.012428212574320868,
"f1": 0.4441089123442944,
"f1_stderr": 0.03136061225772234,
"main_score": 0.11348392156508444
}
},
"mteb_version": "0.0.2",
"mteb_dataset_name": "AmazonCounterfactualClassification",
"dataset_revision": "2d8a100785abf0ae21420d2a55b0c56e3e1ea996"
}