File size: 2,639 Bytes
f022761
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
{
  "dataset_revision": "1399c76144fd37290681b995c656ef9b2e06e26d",
  "mteb_dataset_name": "AmazonReviewsClassification",
  "mteb_version": "1.0.2",
  "test": {
    "de": {
      "accuracy": 0.35032,
      "accuracy_stderr": 0.024274793510965235,
      "f1": 0.33939764470643535,
      "f1_stderr": 0.02282070229582006,
      "main_score": 0.35032
    },
    "en": {
      "accuracy": 0.35454,
      "accuracy_stderr": 0.022858880112551442,
      "f1": 0.35019589142407004,
      "f1_stderr": 0.02054968398529996,
      "main_score": 0.35454
    },
    "es": {
      "accuracy": 0.36242,
      "accuracy_stderr": 0.010545691063178358,
      "f1": 0.3498879083946539,
      "f1_stderr": 0.01321622772317041,
      "main_score": 0.36242
    },
    "evaluation_time": 62.08,
    "fr": {
      "accuracy": 0.357,
      "accuracy_stderr": 0.022324336496299283,
      "f1": 0.3474911268048424,
      "f1_stderr": 0.022703709419436332,
      "main_score": 0.357
    },
    "ja": {
      "accuracy": 0.31076,
      "accuracy_stderr": 0.01528968279592484,
      "f1": 0.30525865114811995,
      "f1_stderr": 0.01261552083352967,
      "main_score": 0.31076
    },
    "zh": {
      "accuracy": 0.33894,
      "accuracy_stderr": 0.013566149048274542,
      "f1": 0.32638513658296125,
      "f1_stderr": 0.01590406062662379,
      "main_score": 0.33894
    }
  },
  "validation": {
    "de": {
      "accuracy": 0.35140000000000005,
      "accuracy_stderr": 0.03167813125801457,
      "f1": 0.33995664837547496,
      "f1_stderr": 0.029651988528941266,
      "main_score": 0.35140000000000005
    },
    "en": {
      "accuracy": 0.35572,
      "accuracy_stderr": 0.02285260597831241,
      "f1": 0.35182678284471613,
      "f1_stderr": 0.021081362317817858,
      "main_score": 0.35572
    },
    "es": {
      "accuracy": 0.36605999999999994,
      "accuracy_stderr": 0.01066547701699273,
      "f1": 0.35388988527330134,
      "f1_stderr": 0.011414525934042363,
      "main_score": 0.36605999999999994
    },
    "evaluation_time": 64.9,
    "fr": {
      "accuracy": 0.35146,
      "accuracy_stderr": 0.021103374137800814,
      "f1": 0.3420240760686955,
      "f1_stderr": 0.020450899496203788,
      "main_score": 0.35146
    },
    "ja": {
      "accuracy": 0.31432,
      "accuracy_stderr": 0.01748123565426655,
      "f1": 0.3085361473217993,
      "f1_stderr": 0.01532611041574829,
      "main_score": 0.31432
    },
    "zh": {
      "accuracy": 0.33161999999999997,
      "accuracy_stderr": 0.011498330313571618,
      "f1": 0.31936819236765673,
      "f1_stderr": 0.011252640258108498,
      "main_score": 0.33161999999999997
    }
  }
}