File size: 1,550 Bytes
bda9a2d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
{
  "dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634",
  "mteb_dataset_name": "MassiveScenarioClassification",
  "mteb_version": "1.0.3.dev0",
  "test": {
    "da": {
      "accuracy": 0.40437121721587077,
      "accuracy_stderr": 0.0217415653668484,
      "f1": 0.3723288747252761,
      "f1_stderr": 0.019283422341780296,
      "main_score": 0.40437121721587077
    },
    "evaluation_time": 95.92,
    "nb": {
      "accuracy": 0.35763281775386685,
      "accuracy_stderr": 0.02580146772548029,
      "f1": 0.3412599346756071,
      "f1_stderr": 0.0237566412933957,
      "main_score": 0.35763281775386685
    },
    "sv": {
      "accuracy": 0.5609280430396772,
      "accuracy_stderr": 0.024760603110657826,
      "f1": 0.547979935150986,
      "f1_stderr": 0.02207147319053608,
      "main_score": 0.5609280430396772
    }
  },
  "validation": {
    "da": {
      "accuracy": 0.39454008853910477,
      "accuracy_stderr": 0.021005746966988717,
      "f1": 0.3718680770887862,
      "f1_stderr": 0.019314996889392557,
      "main_score": 0.39454008853910477
    },
    "evaluation_time": 79.78,
    "nb": {
      "accuracy": 0.35125430398425966,
      "accuracy_stderr": 0.023884953034132034,
      "f1": 0.33753794511461643,
      "f1_stderr": 0.021553023585863994,
      "main_score": 0.35125430398425966
    },
    "sv": {
      "accuracy": 0.5571077225774717,
      "accuracy_stderr": 0.02686172612607831,
      "f1": 0.5479549119696884,
      "f1_stderr": 0.02461664558075998,
      "main_score": 0.5571077225774717
    }
  }
}