File size: 3,175 Bytes
653fa27
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
088d502
4951470
 
653fa27
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
{
    "test": {
        "de": {
            "accuracy": 0.24516,
            "accuracy_stderr": 0.013653365885377858,
            "f1": 0.2421748200448397,
            "f1_stderr": 0.014364828062848542,
            "main_score": 0.24516
        },
        "en": {
            "accuracy": 0.35098,
            "accuracy_stderr": 0.016031331822403287,
            "f1": 0.34732656514357263,
            "f1_stderr": 0.014226778450962598,
            "main_score": 0.35098
        },
        "es": {
            "accuracy": 0.29097999999999996,
            "accuracy_stderr": 0.012527234331647189,
            "f1": 0.28620040162757093,
            "f1_stderr": 0.012203369115937461,
            "main_score": 0.29097999999999996
        },
        "evaluation_time": 458.68,
        "fr": {
            "accuracy": 0.27396,
            "accuracy_stderr": 0.019808240709361352,
            "f1": 0.27146888644986283,
            "f1_stderr": 0.019022355135502007,
            "main_score": 0.27396
        },
        "ja": {
            "accuracy": 0.21724000000000002,
            "accuracy_stderr": 0.010203842413522466,
            "f1": 0.2137230564276654,
            "f1_stderr": 0.01242184804054769,
            "main_score": 0.21724000000000002
        },
        "zh": {
            "accuracy": 0.23975999999999997,
            "accuracy_stderr": 0.012464605890279888,
            "f1": 0.23741137981755484,
            "f1_stderr": 0.01276710263102806,
            "main_score": 0.23975999999999997
        }
    },
    "validation": {
        "de": {
            "accuracy": 0.24498000000000003,
            "accuracy_stderr": 0.014547425889139284,
            "f1": 0.24228823605944716,
            "f1_stderr": 0.01555608008031814,
            "main_score": 0.24498000000000003
        },
        "en": {
            "accuracy": 0.35306,
            "accuracy_stderr": 0.0140618775417794,
            "f1": 0.3497346805075623,
            "f1_stderr": 0.012226804543653744,
            "main_score": 0.35306
        },
        "es": {
            "accuracy": 0.29144,
            "accuracy_stderr": 0.010667633289535211,
            "f1": 0.28677674422287674,
            "f1_stderr": 0.010680354361873633,
            "main_score": 0.29144
        },
        "evaluation_time": 451.13,
        "fr": {
            "accuracy": 0.27192,
            "accuracy_stderr": 0.01950327152043985,
            "f1": 0.2689250735785097,
            "f1_stderr": 0.01790250686687033,
            "main_score": 0.27192
        },
        "ja": {
            "accuracy": 0.21986,
            "accuracy_stderr": 0.008387156848420087,
            "f1": 0.21661564151001683,
            "f1_stderr": 0.009895205366288119,
            "main_score": 0.21986
        },
        "zh": {
            "accuracy": 0.23889999999999997,
            "accuracy_stderr": 0.007997124483212702,
            "f1": 0.2370552262227123,
            "f1_stderr": 0.008576952995377527,
            "main_score": 0.23889999999999997
        }
    },
    "mteb_version": "0.0.2",
    "mteb_dataset_name": "AmazonReviewsClassification",
    "dataset_revision": "c379a6705fec24a2493fa68e011692605f44e119"
}