File size: 1,669 Bytes
65c1ed2
088d502
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65c1ed2
4951470
 
65c1ed2
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
{
    "mteb_version": "0.0.2",
    "test": {
        "de": {
            "accuracy": 0.25908,
            "accuracy_stderr": 0.00901119303977004,
            "f1": 0.25538149526380544,
            "f1_stderr": 0.011673588756159523,
            "main_score": 0.25908
        },
        "en": {
            "accuracy": 0.30792,
            "accuracy_stderr": 0.016674819339351176,
            "f1": 0.3025456531557544,
            "f1_stderr": 0.01789579738481092,
            "main_score": 0.30792
        },
        "es": {
            "accuracy": 0.27634000000000003,
            "accuracy_stderr": 0.01265165601808711,
            "f1": 0.27287076320171727,
            "f1_stderr": 0.01282496330105531,
            "main_score": 0.27634000000000003
        },
        "evaluation_time": 41.37,
        "fr": {
            "accuracy": 0.27540000000000003,
            "accuracy_stderr": 0.017711013522664368,
            "f1": 0.2721486019130574,
            "f1_stderr": 0.016500699285740404,
            "main_score": 0.27540000000000003
        },
        "ja": {
            "accuracy": 0.23566000000000004,
            "accuracy_stderr": 0.01216817159642319,
            "f1": 0.23349265077190498,
            "f1_stderr": 0.012794109244058882,
            "main_score": 0.23566000000000004
        },
        "zh": {
            "accuracy": 0.2299,
            "accuracy_stderr": 0.011964698073917285,
            "f1": 0.2247175043426865,
            "f1_stderr": 0.011696175211431542,
            "main_score": 0.2299
        }
    },
    "mteb_dataset_name": "AmazonReviewsClassification",
    "dataset_revision": "c379a6705fec24a2493fa68e011692605f44e119"
}