File size: 1,569 Bytes
65c1ed2
088d502
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65c1ed2
4951470
 
65c1ed2
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
{
    "mteb_version": "0.0.2",
    "test": {
        "de": {
            "accuracy": 0.5709850107066381,
            "accuracy_stderr": 0.032418490921355646,
            "ap": 0.7338224986285773,
            "ap_stderr": 0.014519500829907068,
            "f1": 0.5518332251622343,
            "f1_stderr": 0.0287737138699505,
            "main_score": 0.5709850107066381
        },
        "en": {
            "accuracy": 0.6528358208955224,
            "accuracy_stderr": 0.047227427834394854,
            "ap": 0.2802247873560022,
            "ap_stderr": 0.019305982032960956,
            "f1": 0.5909977445939425,
            "f1_stderr": 0.03411747847368747,
            "main_score": 0.6528358208955224
        },
        "en-ext": {
            "accuracy": 0.6724137931034483,
            "accuracy_stderr": 0.05483855378474212,
            "ap": 0.1793337056203553,
            "ap_stderr": 0.021671691977739136,
            "f1": 0.5520071109085884,
            "f1_stderr": 0.0394159690305972,
            "main_score": 0.6724137931034483
        },
        "evaluation_time": 11.57,
        "ja": {
            "accuracy": 0.5991434689507494,
            "accuracy_stderr": 0.04765885308177663,
            "ap": 0.13610920446878455,
            "ap_stderr": 0.009570182009962704,
            "f1": 0.4870464699796398,
            "f1_stderr": 0.02863549580159552,
            "main_score": 0.5991434689507494
        }
    },
    "mteb_dataset_name": "AmazonCounterfactualClassification",
    "dataset_revision": "2d8a100785abf0ae21420d2a55b0c56e3e1ea996"
}