Muennighoff commited on
Commit
bb38d52
1 Parent(s): b06dfba

Update results/LaBSE/MassiveIntentClassification.json

Browse files
results/LaBSE/MassiveIntentClassification.json CHANGED
@@ -1,15 +1,365 @@
1
  {
2
- "dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7",
3
- "mteb_dataset_name": "MassiveIntentClassification",
4
- "mteb_version": "1.1.1",
5
- "test": {
6
- "evaluation_time": 17.87,
7
- "pl": {
8
- "accuracy": 0.5971082716879623,
9
- "accuracy_stderr": 0.017186687184420946,
10
- "f1": 0.5592281243395716,
11
- "f1_stderr": 0.015609644382721707,
12
- "main_score": 0.5971082716879623
13
- }
14
- }
15
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  {
2
+ "mteb_version": "0.0.2",
3
+ "test": {
4
+ "af": {
5
+ "accuracy": 0.5612306657700067,
6
+ "accuracy_stderr": 0.013440186472898265,
7
+ "f1": 0.5358216966086121,
8
+ "f1_stderr": 0.010486904410670291,
9
+ "main_score": 0.5612306657700067
10
+ },
11
+ "am": {
12
+ "accuracy": 0.5570611970410222,
13
+ "accuracy_stderr": 0.01174177926911691,
14
+ "f1": 0.5244062979837288,
15
+ "f1_stderr": 0.014436554287472555,
16
+ "main_score": 0.5570611970410222
17
+ },
18
+ "ar": {
19
+ "accuracy": 0.5085743106926699,
20
+ "accuracy_stderr": 0.015479988139583763,
21
+ "f1": 0.462722443097226,
22
+ "f1_stderr": 0.012385569226408709,
23
+ "main_score": 0.5085743106926699
24
+ },
25
+ "az": {
26
+ "accuracy": 0.5897108271687961,
27
+ "accuracy_stderr": 0.016129227599473647,
28
+ "f1": 0.5620698514518965,
29
+ "f1_stderr": 0.01545358922223129,
30
+ "main_score": 0.5897108271687961
31
+ },
32
+ "bn": {
33
+ "accuracy": 0.5822461331540014,
34
+ "accuracy_stderr": 0.00960656161586536,
35
+ "f1": 0.5570781405282086,
36
+ "f1_stderr": 0.010470016821507137,
37
+ "main_score": 0.5822461331540014
38
+ },
39
+ "cy": {
40
+ "accuracy": 0.5015803631472765,
41
+ "accuracy_stderr": 0.010658620761678627,
42
+ "f1": 0.4649616640155205,
43
+ "f1_stderr": 0.010521408267101212,
44
+ "main_score": 0.5015803631472765
45
+ },
46
+ "da": {
47
+ "accuracy": 0.5824815063887021,
48
+ "accuracy_stderr": 0.01329436805671309,
49
+ "f1": 0.5522587192541039,
50
+ "f1_stderr": 0.015025445398245869,
51
+ "main_score": 0.5824815063887021
52
+ },
53
+ "de": {
54
+ "accuracy": 0.5621049092131809,
55
+ "accuracy_stderr": 0.015696707929600026,
56
+ "f1": 0.5267197009900415,
57
+ "f1_stderr": 0.01390739006837515,
58
+ "main_score": 0.5621049092131809
59
+ },
60
+ "el": {
61
+ "accuracy": 0.5702757229320781,
62
+ "accuracy_stderr": 0.016330692276732946,
63
+ "f1": 0.5375952775846228,
64
+ "f1_stderr": 0.01559077045432514,
65
+ "main_score": 0.5702757229320781
66
+ },
67
+ "en": {
68
+ "accuracy": 0.6145931405514458,
69
+ "accuracy_stderr": 0.013490020856815716,
70
+ "f1": 0.5925338353697315,
71
+ "f1_stderr": 0.011508523303786091,
72
+ "main_score": 0.6145931405514458
73
+ },
74
+ "es": {
75
+ "accuracy": 0.5832212508406187,
76
+ "accuracy_stderr": 0.00970148272385852,
77
+ "f1": 0.5725073011599459,
78
+ "f1_stderr": 0.011986458648242465,
79
+ "main_score": 0.5832212508406187
80
+ },
81
+ "evaluation_time": 557.62,
82
+ "fa": {
83
+ "accuracy": 0.6233355749831876,
84
+ "accuracy_stderr": 0.017493143766390443,
85
+ "f1": 0.5950266489784709,
86
+ "f1_stderr": 0.014875161031338064,
87
+ "main_score": 0.6233355749831876
88
+ },
89
+ "fi": {
90
+ "accuracy": 0.6012441156691325,
91
+ "accuracy_stderr": 0.01758445721869734,
92
+ "f1": 0.5757289757482428,
93
+ "f1_stderr": 0.016792664887178142,
94
+ "main_score": 0.6012441156691325
95
+ },
96
+ "fr": {
97
+ "accuracy": 0.6046738399462004,
98
+ "accuracy_stderr": 0.014049391807507904,
99
+ "f1": 0.5843108706697745,
100
+ "f1_stderr": 0.015868335599764864,
101
+ "main_score": 0.6046738399462004
102
+ },
103
+ "he": {
104
+ "accuracy": 0.56546738399462,
105
+ "accuracy_stderr": 0.011801328473946665,
106
+ "f1": 0.5306816954288663,
107
+ "f1_stderr": 0.012434011710602471,
108
+ "main_score": 0.56546738399462
109
+ },
110
+ "hi": {
111
+ "accuracy": 0.5940147948890384,
112
+ "accuracy_stderr": 0.010370604879406354,
113
+ "f1": 0.5683809659220684,
114
+ "f1_stderr": 0.01368488308932195,
115
+ "main_score": 0.5940147948890384
116
+ },
117
+ "hu": {
118
+ "accuracy": 0.5951916610625421,
119
+ "accuracy_stderr": 0.011325551829228732,
120
+ "f1": 0.5634840803786808,
121
+ "f1_stderr": 0.014012032413505226,
122
+ "main_score": 0.5951916610625421
123
+ },
124
+ "hy": {
125
+ "accuracy": 0.562037659717552,
126
+ "accuracy_stderr": 0.013929942089090338,
127
+ "f1": 0.5313604649630413,
128
+ "f1_stderr": 0.01408050114576085,
129
+ "main_score": 0.562037659717552
130
+ },
131
+ "id": {
132
+ "accuracy": 0.6112306657700067,
133
+ "accuracy_stderr": 0.015025259100881967,
134
+ "f1": 0.5877570455836887,
135
+ "f1_stderr": 0.013840500555853834,
136
+ "main_score": 0.6112306657700067
137
+ },
138
+ "is": {
139
+ "accuracy": 0.5490248823133826,
140
+ "accuracy_stderr": 0.01202224633391224,
141
+ "f1": 0.5226105505369143,
142
+ "f1_stderr": 0.015218166744086662,
143
+ "main_score": 0.5490248823133826
144
+ },
145
+ "it": {
146
+ "accuracy": 0.598285137861466,
147
+ "accuracy_stderr": 0.010445396802049828,
148
+ "f1": 0.5725168641613417,
149
+ "f1_stderr": 0.012076955578834528,
150
+ "main_score": 0.598285137861466
151
+ },
152
+ "ja": {
153
+ "accuracy": 0.6311365164761265,
154
+ "accuracy_stderr": 0.014717502568441695,
155
+ "f1": 0.6094159906452462,
156
+ "f1_stderr": 0.01124624417371721,
157
+ "main_score": 0.6311365164761265
158
+ },
159
+ "jv": {
160
+ "accuracy": 0.5097848016139879,
161
+ "accuracy_stderr": 0.014019100583288223,
162
+ "f1": 0.4822786745328559,
163
+ "f1_stderr": 0.011893132974704962,
164
+ "main_score": 0.5097848016139879
165
+ },
166
+ "ka": {
167
+ "accuracy": 0.4834566240753194,
168
+ "accuracy_stderr": 0.01633747570696889,
169
+ "f1": 0.4639326870637087,
170
+ "f1_stderr": 0.01247367254273298,
171
+ "main_score": 0.4834566240753194
172
+ },
173
+ "km": {
174
+ "accuracy": 0.4855413584398117,
175
+ "accuracy_stderr": 0.017862939592922206,
176
+ "f1": 0.4537848387287295,
177
+ "f1_stderr": 0.009523794100206508,
178
+ "main_score": 0.4855413584398117
179
+ },
180
+ "kn": {
181
+ "accuracy": 0.562373907195696,
182
+ "accuracy_stderr": 0.011027943352605016,
183
+ "f1": 0.5291568802305611,
184
+ "f1_stderr": 0.010643445803447104,
185
+ "main_score": 0.562373907195696
186
+ },
187
+ "ko": {
188
+ "accuracy": 0.6098520511096166,
189
+ "accuracy_stderr": 0.012574082233649144,
190
+ "f1": 0.5950151814064207,
191
+ "f1_stderr": 0.012316004540479531,
192
+ "main_score": 0.6098520511096166
193
+ },
194
+ "lv": {
195
+ "accuracy": 0.5709818426361801,
196
+ "accuracy_stderr": 0.015320086034823086,
197
+ "f1": 0.5473663098355673,
198
+ "f1_stderr": 0.01758522953532659,
199
+ "main_score": 0.5709818426361801
200
+ },
201
+ "ml": {
202
+ "accuracy": 0.5790854068594486,
203
+ "accuracy_stderr": 0.012264324277117346,
204
+ "f1": 0.5469417576260037,
205
+ "f1_stderr": 0.016150220908837616,
206
+ "main_score": 0.5790854068594486
207
+ },
208
+ "mn": {
209
+ "accuracy": 0.5850369872225958,
210
+ "accuracy_stderr": 0.014632329716333195,
211
+ "f1": 0.5582753773432731,
212
+ "f1_stderr": 0.01489691440397955,
213
+ "main_score": 0.5850369872225958
214
+ },
215
+ "ms": {
216
+ "accuracy": 0.5860121049092132,
217
+ "accuracy_stderr": 0.01518245798426847,
218
+ "f1": 0.5593631713341884,
219
+ "f1_stderr": 0.01417879954692122,
220
+ "main_score": 0.5860121049092132
221
+ },
222
+ "my": {
223
+ "accuracy": 0.5735036987222595,
224
+ "accuracy_stderr": 0.01392421878126612,
225
+ "f1": 0.5406872293372428,
226
+ "f1_stderr": 0.0126232059716452,
227
+ "main_score": 0.5735036987222595
228
+ },
229
+ "nb": {
230
+ "accuracy": 0.5790517821116341,
231
+ "accuracy_stderr": 0.013544096836515878,
232
+ "f1": 0.5530143553035799,
233
+ "f1_stderr": 0.010954019815373053,
234
+ "main_score": 0.5790517821116341
235
+ },
236
+ "nl": {
237
+ "accuracy": 0.5937121721587089,
238
+ "accuracy_stderr": 0.007841201792964088,
239
+ "f1": 0.5667832466814933,
240
+ "f1_stderr": 0.010203767408945623,
241
+ "main_score": 0.5937121721587089
242
+ },
243
+ "pl": {
244
+ "accuracy": 0.5971082716879623,
245
+ "accuracy_stderr": 0.017186687184420946,
246
+ "f1": 0.5592281243395716,
247
+ "f1_stderr": 0.015609644382721707,
248
+ "main_score": 0.5971082716879623
249
+ }
250
+ "pt": {
251
+ "accuracy": 0.6016139878950908,
252
+ "accuracy_stderr": 0.008012009063714964,
253
+ "f1": 0.5889624762651047,
254
+ "f1_stderr": 0.011166107593853575,
255
+ "main_score": 0.6016139878950908
256
+ },
257
+ "ro": {
258
+ "accuracy": 0.5791526563550773,
259
+ "accuracy_stderr": 0.010338940169994695,
260
+ "f1": 0.5617963999840901,
261
+ "f1_stderr": 0.011994600182347773,
262
+ "main_score": 0.5791526563550773
263
+ },
264
+ "ru": {
265
+ "accuracy": 0.6067249495628783,
266
+ "accuracy_stderr": 0.010971358982936441,
267
+ "f1": 0.5795712477045238,
268
+ "f1_stderr": 0.008949259431186126,
269
+ "main_score": 0.6067249495628783
270
+ },
271
+ "sl": {
272
+ "accuracy": 0.5936785474108944,
273
+ "accuracy_stderr": 0.011152269980081092,
274
+ "f1": 0.5733645022629082,
275
+ "f1_stderr": 0.012101026367563827,
276
+ "main_score": 0.5936785474108944
277
+ },
278
+ "sq": {
279
+ "accuracy": 0.580329522528581,
280
+ "accuracy_stderr": 0.014561068826937765,
281
+ "f1": 0.5474808365304084,
282
+ "f1_stderr": 0.017803216093163897,
283
+ "main_score": 0.580329522528581
284
+ },
285
+ "sv": {
286
+ "accuracy": 0.5966039004707465,
287
+ "accuracy_stderr": 0.011471953857008974,
288
+ "f1": 0.5657088621093,
289
+ "f1_stderr": 0.015056395109668616,
290
+ "main_score": 0.5966039004707465
291
+ },
292
+ "sw": {
293
+ "accuracy": 0.5162071284465366,
294
+ "accuracy_stderr": 0.012697338784849677,
295
+ "f1": 0.4872803907346097,
296
+ "f1_stderr": 0.01305188616552868,
297
+ "main_score": 0.5162071284465366
298
+ },
299
+ "ta": {
300
+ "accuracy": 0.5504371217215872,
301
+ "accuracy_stderr": 0.008161883751687311,
302
+ "f1": 0.5198442799786219,
303
+ "f1_stderr": 0.011015545578145537,
304
+ "main_score": 0.5504371217215872
305
+ },
306
+ "te": {
307
+ "accuracy": 0.5831876260928043,
308
+ "accuracy_stderr": 0.011753472761503295,
309
+ "f1": 0.5520606642688273,
310
+ "f1_stderr": 0.01314932810180897,
311
+ "main_score": 0.5831876260928043
312
+ },
313
+ "th": {
314
+ "accuracy": 0.5658036314727639,
315
+ "accuracy_stderr": 0.009267324547097177,
316
+ "f1": 0.5574220144038379,
317
+ "f1_stderr": 0.010598154952192414,
318
+ "main_score": 0.5658036314727639
319
+ },
320
+ "tl": {
321
+ "accuracy": 0.5527572293207801,
322
+ "accuracy_stderr": 0.011293561222686774,
323
+ "f1": 0.5263329528670508,
324
+ "f1_stderr": 0.011954257824619789,
325
+ "main_score": 0.5527572293207801
326
+ },
327
+ "tr": {
328
+ "accuracy": 0.6090786819098857,
329
+ "accuracy_stderr": 0.013632917539651452,
330
+ "f1": 0.574834952665804,
331
+ "f1_stderr": 0.012683738895067095,
332
+ "main_score": 0.6090786819098857
333
+ },
334
+ "ur": {
335
+ "accuracy": 0.5670477471418964,
336
+ "accuracy_stderr": 0.01621131355943765,
337
+ "f1": 0.5387589431307667,
338
+ "f1_stderr": 0.01918631093725334,
339
+ "main_score": 0.5670477471418964
340
+ },
341
+ "vi": {
342
+ "accuracy": 0.5666778749159381,
343
+ "accuracy_stderr": 0.01104003450055037,
344
+ "f1": 0.5411447173756183,
345
+ "f1_stderr": 0.011177426115609497,
346
+ "main_score": 0.5666778749159381
347
+ },
348
+ "zh-CN": {
349
+ "accuracy": 0.6386348352387357,
350
+ "accuracy_stderr": 0.01226732002160834,
351
+ "f1": 0.6091533295184478,
352
+ "f1_stderr": 0.009225505741718603,
353
+ "main_score": 0.6386348352387357
354
+ },
355
+ "zh-TW": {
356
+ "accuracy": 0.5950571620712846,
357
+ "accuracy_stderr": 0.011335131405517016,
358
+ "f1": 0.5825705458922187,
359
+ "f1_stderr": 0.010144597056238235,
360
+ "main_score": 0.5950571620712846
361
+ }
362
+ },
363
+ "mteb_dataset_name": "MassiveIntentClassification",
364
+ "dataset_revision": "072a486a144adf7f4479a4a0dddb2152e161e1ea"
365
+ }