File size: 1,559 Bytes
5fd5d7c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
{
  "dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7",
  "mteb_dataset_name": "MassiveIntentClassification",
  "mteb_version": "1.0.3.dev0",
  "test": {
    "da": {
      "accuracy": 0.42286482851378615,
      "accuracy_stderr": 0.012383040984379165,
      "f1": 0.4045450712997618,
      "f1_stderr": 0.011450412901048949,
      "main_score": 0.42286482851378615
    },
    "evaluation_time": 526.53,
    "nb": {
      "accuracy": 0.4063214525891056,
      "accuracy_stderr": 0.014827865675936818,
      "f1": 0.3789828222208245,
      "f1_stderr": 0.012769924322664489,
      "main_score": 0.4063214525891056
    },
    "sv": {
      "accuracy": 0.4068594485541358,
      "accuracy_stderr": 0.016741613025560997,
      "f1": 0.3868424150540931,
      "f1_stderr": 0.014421268110823617,
      "main_score": 0.4068594485541358
    }
  },
  "validation": {
    "da": {
      "accuracy": 0.42193802262666014,
      "accuracy_stderr": 0.010585908699213985,
      "f1": 0.39827047128659854,
      "f1_stderr": 0.010040840264160085,
      "main_score": 0.42193802262666014
    },
    "evaluation_time": 469.62,
    "nb": {
      "accuracy": 0.39163797343826856,
      "accuracy_stderr": 0.012746913290575083,
      "f1": 0.3589018215724903,
      "f1_stderr": 0.014090109687736802,
      "main_score": 0.39163797343826856
    },
    "sv": {
      "accuracy": 0.40270536153467784,
      "accuracy_stderr": 0.018089093686221885,
      "f1": 0.3700168488921628,
      "f1_stderr": 0.015663183947950413,
      "main_score": 0.40270536153467784
    }
  }
}