File size: 3,325 Bytes
653fa27
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
088d502
4951470
 
653fa27
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
{
    "test": {
        "de": {
            "accuracy": 0.6052127359819667,
            "accuracy_stderr": 0.012551527227328881,
            "f1": 0.42704929703392036,
            "f1_stderr": 0.010369009045735761,
            "main_score": 0.6052127359819667
        },
        "en": {
            "accuracy": 0.7068171454628362,
            "accuracy_stderr": 0.009424601836915932,
            "f1": 0.5257188062729139,
            "f1_stderr": 0.015312591660539578,
            "main_score": 0.7068171454628362
        },
        "es": {
            "accuracy": 0.6432288192128086,
            "accuracy_stderr": 0.010330753800565872,
            "f1": 0.4597360620220273,
            "f1_stderr": 0.011806363434591616,
            "main_score": 0.6432288192128086
        },
        "evaluation_time": 5581.07,
        "fr": {
            "accuracy": 0.5867209520826808,
            "accuracy_stderr": 0.017149393395627235,
            "f1": 0.42828449913045785,
            "f1_stderr": 0.010471346165480486,
            "main_score": 0.5867209520826808
        },
        "hi": {
            "accuracy": 0.4195769092864826,
            "accuracy_stderr": 0.01957887925029074,
            "f1": 0.28914127631431263,
            "f1_stderr": 0.010830982206998033,
            "main_score": 0.4195769092864826
        },
        "th": {
            "accuracy": 0.5528390596745028,
            "accuracy_stderr": 0.011074469399591896,
            "f1": 0.38338992505612896,
            "f1_stderr": 0.010174785247741841,
            "main_score": 0.5528390596745028
        }
    },
    "validation": {
        "de": {
            "accuracy": 0.6017630853994491,
            "accuracy_stderr": 0.014046453493859136,
            "f1": 0.38094362238194013,
            "f1_stderr": 0.010446387904213672,
            "main_score": 0.6017630853994491
        },
        "en": {
            "accuracy": 0.7174496644295302,
            "accuracy_stderr": 0.011202224904003405,
            "f1": 0.5380630593853175,
            "f1_stderr": 0.01523441100390282,
            "main_score": 0.7174496644295302
        },
        "es": {
            "accuracy": 0.6634577603143418,
            "accuracy_stderr": 0.011295283839362461,
            "f1": 0.44183658941719994,
            "f1_stderr": 0.013192401912497148,
            "main_score": 0.6634577603143418
        },
        "evaluation_time": 3932.59,
        "fr": {
            "accuracy": 0.5963221306277742,
            "accuracy_stderr": 0.013826624396607598,
            "f1": 0.40052018679365287,
            "f1_stderr": 0.007599681546611861,
            "main_score": 0.5963221306277742
        },
        "hi": {
            "accuracy": 0.3997017892644136,
            "accuracy_stderr": 0.018412215496717586,
            "f1": 0.26738485926945066,
            "f1_stderr": 0.008586423049120084,
            "main_score": 0.3997017892644136
        },
        "th": {
            "accuracy": 0.5450029922202274,
            "accuracy_stderr": 0.015685742432894414,
            "f1": 0.37233885191310057,
            "f1_stderr": 0.013972330835399397,
            "main_score": 0.5450029922202274
        }
    },
    "mteb_version": "0.0.2",
    "mteb_dataset_name": "MTOPIntentClassification",
    "dataset_revision": "6299947a7777084cc2d4b64235bf7190381ce755"
}