File size: 3,307 Bytes
653fa27
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
088d502
4951470
 
653fa27
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
{
    "test": {
        "de": {
            "accuracy": 0.6037475345167653,
            "accuracy_stderr": 0.013460239029994478,
            "f1": 0.5845264937551703,
            "f1_stderr": 0.013734670702604352,
            "main_score": 0.6037475345167653
        },
        "en": {
            "accuracy": 0.8151846785225718,
            "accuracy_stderr": 0.011461847240672736,
            "f1": 0.81648869152345,
            "f1_stderr": 0.010871461184853548,
            "main_score": 0.8151846785225718
        },
        "es": {
            "accuracy": 0.6736824549699799,
            "accuracy_stderr": 0.01407356066869247,
            "f1": 0.6535927434998515,
            "f1_stderr": 0.01413855160644313,
            "main_score": 0.6736824549699799
        },
        "evaluation_time": 168.65,
        "fr": {
            "accuracy": 0.6312871907297212,
            "accuracy_stderr": 0.02473728093465062,
            "f1": 0.6137620329272278,
            "f1_stderr": 0.022860658324147663,
            "main_score": 0.6312871907297212
        },
        "hi": {
            "accuracy": 0.47045536034420943,
            "accuracy_stderr": 0.019444896819423425,
            "f1": 0.46203899126445613,
            "f1_stderr": 0.01750055815526211,
            "main_score": 0.47045536034420943
        },
        "th": {
            "accuracy": 0.5228209764918625,
            "accuracy_stderr": 0.030553280316660377,
            "f1": 0.5075489206473579,
            "f1_stderr": 0.02674134220327261,
            "main_score": 0.5228209764918625
        }
    },
    "validation": {
        "de": {
            "accuracy": 0.5924517906336088,
            "accuracy_stderr": 0.010919798846375912,
            "f1": 0.5786788916095694,
            "f1_stderr": 0.012936802476599582,
            "main_score": 0.5924517906336088
        },
        "en": {
            "accuracy": 0.8214317673378077,
            "accuracy_stderr": 0.01212263771708471,
            "f1": 0.8270274918463174,
            "f1_stderr": 0.011321392162566895,
            "main_score": 0.8214317673378077
        },
        "es": {
            "accuracy": 0.6726915520628685,
            "accuracy_stderr": 0.017154326092686127,
            "f1": 0.6610458353302849,
            "f1_stderr": 0.01789675352267575,
            "main_score": 0.6726915520628685
        },
        "evaluation_time": 105.77,
        "fr": {
            "accuracy": 0.6336715282181358,
            "accuracy_stderr": 0.026483031232693195,
            "f1": 0.6251191192231544,
            "f1_stderr": 0.02294236420005774,
            "main_score": 0.6336715282181358
        },
        "hi": {
            "accuracy": 0.4635685884691848,
            "accuracy_stderr": 0.01708910577573112,
            "f1": 0.46090151154109493,
            "f1_stderr": 0.01766759890065127,
            "main_score": 0.4635685884691848
        },
        "th": {
            "accuracy": 0.5128067025733094,
            "accuracy_stderr": 0.020409398758064606,
            "f1": 0.5018323841221017,
            "f1_stderr": 0.01679125970157935,
            "main_score": 0.5128067025733094
        }
    },
    "mteb_version": "0.0.2",
    "mteb_dataset_name": "MTOPDomainClassification",
    "dataset_revision": "a7e2a951126a26fc8c6a69f835f33a346ba259e3"
}