File size: 2,535 Bytes
f022761
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
{
  "dataset_revision": "e8379541af4e31359cca9fbcf4b00f2671dba205",
  "mteb_dataset_name": "AmazonCounterfactualClassification",
  "mteb_version": "1.0.2",
  "test": {
    "de": {
      "accuracy": 0.6813704496788009,
      "accuracy_stderr": 0.024612230046488805,
      "ap": 0.806706553308835,
      "ap_stderr": 0.016448423566835353,
      "f1": 0.666468090116337,
      "f1_stderr": 0.023789062068309465,
      "main_score": 0.6813704496788009
    },
    "en": {
      "accuracy": 0.7180597014925373,
      "accuracy_stderr": 0.029021696280176958,
      "ap": 0.3370263085714158,
      "ap_stderr": 0.02417710728701376,
      "f1": 0.6544989712268762,
      "f1_stderr": 0.02527413136493158,
      "main_score": 0.7180597014925373
    },
    "en-ext": {
      "accuracy": 0.7296101949025486,
      "accuracy_stderr": 0.03403669840829533,
      "ap": 0.22209148737301962,
      "ap_stderr": 0.02260533582227882,
      "f1": 0.6042877542046691,
      "f1_stderr": 0.02743732409029803,
      "main_score": 0.7296101949025486
    },
    "evaluation_time": 18.51,
    "ja": {
      "accuracy": 0.6538543897216275,
      "accuracy_stderr": 0.023329909403407633,
      "ap": 0.16135900323284472,
      "ap_stderr": 0.010525937964819992,
      "f1": 0.5320720298606364,
      "f1_stderr": 0.015164867003193814,
      "main_score": 0.6538543897216275
    }
  },
  "validation": {
    "de": {
      "accuracy": 0.7025751072961374,
      "accuracy_stderr": 0.03413568897021146,
      "ap": 0.8195432721434586,
      "ap_stderr": 0.01855963604439618,
      "f1": 0.6865634929258071,
      "f1_stderr": 0.03146723543038631,
      "main_score": 0.7025751072961374
    },
    "en": {
      "accuracy": 0.7,
      "accuracy_stderr": 0.03497126720555219,
      "ap": 0.2959161477290448,
      "ap_stderr": 0.029220638413959707,
      "f1": 0.6269667251442652,
      "f1_stderr": 0.029306318263514188,
      "main_score": 0.7
    },
    "en-ext": {
      "accuracy": 0.6995495495495495,
      "accuracy_stderr": 0.040297626151526884,
      "ap": 0.19518020632989744,
      "ap_stderr": 0.027017350830205716,
      "f1": 0.574144858941869,
      "f1_stderr": 0.03304127410974345,
      "main_score": 0.6995495495495495
    },
    "evaluation_time": 21.33,
    "ja": {
      "accuracy": 0.6564377682403434,
      "accuracy_stderr": 0.030821457446519807,
      "ap": 0.14325859760128967,
      "ap_stderr": 0.013381854116899206,
      "f1": 0.5201054934403554,
      "f1_stderr": 0.019875056929254228,
      "main_score": 0.6564377682403434
    }
  }
}