File size: 1,548 Bytes
bda9a2d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
{
  "dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634",
  "mteb_dataset_name": "MassiveScenarioClassification",
  "mteb_version": "1.0.3.dev0",
  "test": {
    "da": {
      "accuracy": 0.5955615332885004,
      "accuracy_stderr": 0.01577699539582055,
      "f1": 0.5794034655925369,
      "f1_stderr": 0.013384379870283205,
      "main_score": 0.5955615332885004
    },
    "evaluation_time": 95.61,
    "nb": {
      "accuracy": 0.47552118359112316,
      "accuracy_stderr": 0.018872921265933037,
      "f1": 0.4640926498110063,
      "f1_stderr": 0.01476824680252287,
      "main_score": 0.47552118359112316
    },
    "sv": {
      "accuracy": 0.40141223940820436,
      "accuracy_stderr": 0.01808423841391776,
      "f1": 0.3822692785842296,
      "f1_stderr": 0.013452966848637938,
      "main_score": 0.40141223940820436
    }
  },
  "validation": {
    "da": {
      "accuracy": 0.594392523364486,
      "accuracy_stderr": 0.01934448803580611,
      "f1": 0.5796578284338747,
      "f1_stderr": 0.01787448188066123,
      "main_score": 0.594392523364486
    },
    "evaluation_time": 75.72,
    "nb": {
      "accuracy": 0.4664043285784555,
      "accuracy_stderr": 0.018904987337221752,
      "f1": 0.4617714246937276,
      "f1_stderr": 0.013988693425569182,
      "main_score": 0.4664043285784555
    },
    "sv": {
      "accuracy": 0.39596655189375307,
      "accuracy_stderr": 0.015554735170528184,
      "f1": 0.379545671770527,
      "f1_stderr": 0.013408542819112435,
      "main_score": 0.39596655189375307
    }
  }
}