File size: 1,566 Bytes
c0b350d
088d502
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c0b350d
4951470
 
c0b350d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
{
    "mteb_version": "0.0.2",
    "test": {
        "de": {
            "accuracy": 0.7316916488222698,
            "accuracy_stderr": 0.023889002602476356,
            "ap": 0.8290076420386528,
            "ap_stderr": 0.012660161052184985,
            "f1": 0.7130862059785664,
            "f1_stderr": 0.02181189033885791,
            "main_score": 0.7316916488222698
        },
        "en": {
            "accuracy": 0.7592537313432837,
            "accuracy_stderr": 0.03223362313231533,
            "ap": 0.3878982837837993,
            "ap_stderr": 0.027793090363084784,
            "f1": 0.6983638841265855,
            "f1_stderr": 0.02765498577786686,
            "main_score": 0.7592537313432837
        },
        "en-ext": {
            "accuracy": 0.7609445277361319,
            "accuracy_stderr": 0.0345209520573379,
            "ap": 0.246315943240905,
            "ap_stderr": 0.022227483310595396,
            "f1": 0.632326892272477,
            "f1_stderr": 0.02772938313195089,
            "main_score": 0.7609445277361319
        },
        "evaluation_time": 22.05,
        "ja": {
            "accuracy": 0.7642398286937901,
            "accuracy_stderr": 0.04516323427211123,
            "ap": 0.2325244723742558,
            "ap_stderr": 0.027583146965977287,
            "f1": 0.6269181103749226,
            "f1_stderr": 0.03649669919135856,
            "main_score": 0.7642398286937901
        }
    },
    "mteb_dataset_name": "AmazonCounterfactualClassification",
    "dataset_revision": "2d8a100785abf0ae21420d2a55b0c56e3e1ea996"
}