File size: 1,558 Bytes
bda9a2d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
{
  "dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7",
  "mteb_dataset_name": "MassiveIntentClassification",
  "mteb_version": "1.0.3.dev0",
  "test": {
    "da": {
      "accuracy": 0.5468392737054472,
      "accuracy_stderr": 0.011536817088604236,
      "f1": 0.5039000994078626,
      "f1_stderr": 0.006389390451655079,
      "main_score": 0.5468392737054472
    },
    "evaluation_time": 207.42,
    "nb": {
      "accuracy": 0.45379959650302626,
      "accuracy_stderr": 0.012549015365232539,
      "f1": 0.4271217484445252,
      "f1_stderr": 0.011229470448672847,
      "main_score": 0.45379959650302626
    },
    "sv": {
      "accuracy": 0.40817081371889713,
      "accuracy_stderr": 0.006134927155520256,
      "f1": 0.3841954212578219,
      "f1_stderr": 0.008971093419384164,
      "main_score": 0.40817081371889713
    }
  },
  "validation": {
    "da": {
      "accuracy": 0.5582390555828824,
      "accuracy_stderr": 0.011006445314683956,
      "f1": 0.5151428356070841,
      "f1_stderr": 0.008861586598311164,
      "main_score": 0.5582390555828824
    },
    "evaluation_time": 189.86,
    "nb": {
      "accuracy": 0.46242006886374815,
      "accuracy_stderr": 0.013734839963464364,
      "f1": 0.43005970598528265,
      "f1_stderr": 0.013601150407790828,
      "main_score": 0.46242006886374815
    },
    "sv": {
      "accuracy": 0.4048696507624201,
      "accuracy_stderr": 0.009431994322367262,
      "f1": 0.38552685366886996,
      "f1_stderr": 0.007369118783426967,
      "main_score": 0.4048696507624201
    }
  }
}