File size: 2,990 Bytes
653fa27
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
088d502
4951470
 
653fa27
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
{
    "test": {
        "de": {
            "accuracy": 0.5907922912205568,
            "accuracy_stderr": 0.04697225567223964,
            "ap": 0.7391887421019034,
            "ap_stderr": 0.018359046750299576,
            "f1": 0.566316368658711,
            "f1_stderr": 0.03933410491374647,
            "main_score": 0.7391887421019034
        },
        "en": {
            "accuracy": 0.6588059701492537,
            "accuracy_stderr": 0.03858259475982057,
            "ap": 0.28685493163579784,
            "ap_stderr": 0.028416239884151956,
            "f1": 0.5979951005816335,
            "f1_stderr": 0.03460211337220667,
            "main_score": 0.28685493163579784
        },
        "en-ext": {
            "accuracy": 0.6491754122938531,
            "accuracy_stderr": 0.02969121438063036,
            "ap": 0.16360681214864226,
            "ap_stderr": 0.012928918731728899,
            "f1": 0.5312659206152377,
            "f1_stderr": 0.02214429165830661,
            "main_score": 0.16360681214864226
        },
        "evaluation_time": 44.11,
        "ja": {
            "accuracy": 0.56423982869379,
            "accuracy_stderr": 0.038775180466092,
            "ap": 0.12143003571907898,
            "ap_stderr": 0.007479750448516404,
            "f1": 0.45763637779874716,
            "f1_stderr": 0.024110002963520918,
            "main_score": 0.12143003571907898
        }
    },
    "validation": {
        "de": {
            "accuracy": 0.6160944206008584,
            "accuracy_stderr": 0.04579873754119935,
            "ap": 0.7525361869353705,
            "ap_stderr": 0.01987949199006841,
            "f1": 0.5900293167119509,
            "f1_stderr": 0.040612936028294924,
            "main_score": 0.7525361869353705
        },
        "en": {
            "accuracy": 0.6256716417910448,
            "accuracy_stderr": 0.02891673083776081,
            "ap": 0.24837825589538176,
            "ap_stderr": 0.016734180789259385,
            "f1": 0.5632101726341471,
            "f1_stderr": 0.02210735627349875,
            "main_score": 0.24837825589538176
        },
        "en-ext": {
            "accuracy": 0.6403903903903903,
            "accuracy_stderr": 0.03572974184471711,
            "ap": 0.15586540049538755,
            "ap_stderr": 0.01508708261860955,
            "f1": 0.5215642093830889,
            "f1_stderr": 0.02503239509049787,
            "main_score": 0.15586540049538755
        },
        "evaluation_time": 41.76,
        "ja": {
            "accuracy": 0.5763948497854077,
            "accuracy_stderr": 0.04542265102705669,
            "ap": 0.11942986436107943,
            "ap_stderr": 0.011582399622870151,
            "f1": 0.46236261204701795,
            "f1_stderr": 0.029102640030030654,
            "main_score": 0.11942986436107943
        }
    },
    "mteb_version": "0.0.2",
    "mteb_dataset_name": "AmazonCounterfactualClassification",
    "dataset_revision": "2d8a100785abf0ae21420d2a55b0c56e3e1ea996"
}