results / results /SGPT-125M-weightedmean-nli-bitfit /AmazonCounterfactualClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
2.99 kB
{
"test": {
"de": {
"accuracy": 0.5907922912205568,
"accuracy_stderr": 0.04697225567223964,
"ap": 0.7391887421019034,
"ap_stderr": 0.018359046750299576,
"f1": 0.566316368658711,
"f1_stderr": 0.03933410491374647,
"main_score": 0.7391887421019034
},
"en": {
"accuracy": 0.6588059701492537,
"accuracy_stderr": 0.03858259475982057,
"ap": 0.28685493163579784,
"ap_stderr": 0.028416239884151956,
"f1": 0.5979951005816335,
"f1_stderr": 0.03460211337220667,
"main_score": 0.28685493163579784
},
"en-ext": {
"accuracy": 0.6491754122938531,
"accuracy_stderr": 0.02969121438063036,
"ap": 0.16360681214864226,
"ap_stderr": 0.012928918731728899,
"f1": 0.5312659206152377,
"f1_stderr": 0.02214429165830661,
"main_score": 0.16360681214864226
},
"evaluation_time": 44.11,
"ja": {
"accuracy": 0.56423982869379,
"accuracy_stderr": 0.038775180466092,
"ap": 0.12143003571907898,
"ap_stderr": 0.007479750448516404,
"f1": 0.45763637779874716,
"f1_stderr": 0.024110002963520918,
"main_score": 0.12143003571907898
}
},
"validation": {
"de": {
"accuracy": 0.6160944206008584,
"accuracy_stderr": 0.04579873754119935,
"ap": 0.7525361869353705,
"ap_stderr": 0.01987949199006841,
"f1": 0.5900293167119509,
"f1_stderr": 0.040612936028294924,
"main_score": 0.7525361869353705
},
"en": {
"accuracy": 0.6256716417910448,
"accuracy_stderr": 0.02891673083776081,
"ap": 0.24837825589538176,
"ap_stderr": 0.016734180789259385,
"f1": 0.5632101726341471,
"f1_stderr": 0.02210735627349875,
"main_score": 0.24837825589538176
},
"en-ext": {
"accuracy": 0.6403903903903903,
"accuracy_stderr": 0.03572974184471711,
"ap": 0.15586540049538755,
"ap_stderr": 0.01508708261860955,
"f1": 0.5215642093830889,
"f1_stderr": 0.02503239509049787,
"main_score": 0.15586540049538755
},
"evaluation_time": 41.76,
"ja": {
"accuracy": 0.5763948497854077,
"accuracy_stderr": 0.04542265102705669,
"ap": 0.11942986436107943,
"ap_stderr": 0.011582399622870151,
"f1": 0.46236261204701795,
"f1_stderr": 0.029102640030030654,
"main_score": 0.11942986436107943
}
},
"mteb_version": "0.0.2",
"mteb_dataset_name": "AmazonCounterfactualClassification",
"dataset_revision": "2d8a100785abf0ae21420d2a55b0c56e3e1ea996"
}