Muennighoff commited on
Commit
50006b3
1 Parent(s): 8d6ec5e

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +68 -0
README.md CHANGED
@@ -14,6 +14,7 @@ model-index:
14
  name: MTEB AmazonCounterfactualClassification (en)
15
  config: en
16
  split: test
 
17
  metrics:
18
  - type: accuracy
19
  value: 65.20895522388061
@@ -28,6 +29,7 @@ model-index:
28
  name: MTEB AmazonPolarityClassification
29
  config: default
30
  split: test
 
31
  metrics:
32
  - type: accuracy
33
  value: 73.20565
@@ -42,6 +44,7 @@ model-index:
42
  name: MTEB AmazonReviewsClassification (en)
43
  config: en
44
  split: test
 
45
  metrics:
46
  - type: accuracy
47
  value: 34.955999999999996
@@ -54,6 +57,7 @@ model-index:
54
  name: MTEB ArguAna
55
  config: default
56
  split: test
 
57
  metrics:
58
  - type: map_at_1
59
  value: 26.101999999999997
@@ -122,6 +126,7 @@ model-index:
122
  name: MTEB ArxivClusteringP2P
123
  config: default
124
  split: test
 
125
  metrics:
126
  - type: v_measure
127
  value: 43.384194916953774
@@ -132,6 +137,7 @@ model-index:
132
  name: MTEB ArxivClusteringS2S
133
  config: default
134
  split: test
 
135
  metrics:
136
  - type: v_measure
137
  value: 33.70962633433912
@@ -142,6 +148,7 @@ model-index:
142
  name: MTEB AskUbuntuDupQuestions
143
  config: default
144
  split: test
 
145
  metrics:
146
  - type: map
147
  value: 58.133058996870076
@@ -154,6 +161,7 @@ model-index:
154
  name: MTEB BIOSSES
155
  config: default
156
  split: test
 
157
  metrics:
158
  - type: cos_sim_pearson
159
  value: 86.62153841660047
@@ -174,6 +182,7 @@ model-index:
174
  name: MTEB Banking77Classification
175
  config: default
176
  split: test
 
177
  metrics:
178
  - type: accuracy
179
  value: 82.05844155844156
@@ -186,6 +195,7 @@ model-index:
186
  name: MTEB BiorxivClusteringP2P
187
  config: default
188
  split: test
 
189
  metrics:
190
  - type: v_measure
191
  value: 35.05918333141837
@@ -196,6 +206,7 @@ model-index:
196
  name: MTEB BiorxivClusteringS2S
197
  config: default
198
  split: test
 
199
  metrics:
200
  - type: v_measure
201
  value: 30.71055028830579
@@ -206,6 +217,7 @@ model-index:
206
  name: MTEB CQADupstackAndroidRetrieval
207
  config: default
208
  split: test
 
209
  metrics:
210
  - type: map_at_1
211
  value: 26.519
@@ -274,6 +286,7 @@ model-index:
274
  name: MTEB CQADupstackEnglishRetrieval
275
  config: default
276
  split: test
 
277
  metrics:
278
  - type: map_at_1
279
  value: 25.356
@@ -342,6 +355,7 @@ model-index:
342
  name: MTEB CQADupstackGamingRetrieval
343
  config: default
344
  split: test
 
345
  metrics:
346
  - type: map_at_1
347
  value: 32.759
@@ -410,6 +424,7 @@ model-index:
410
  name: MTEB CQADupstackGisRetrieval
411
  config: default
412
  split: test
 
413
  metrics:
414
  - type: map_at_1
415
  value: 18.962
@@ -478,6 +493,7 @@ model-index:
478
  name: MTEB CQADupstackMathematicaRetrieval
479
  config: default
480
  split: test
 
481
  metrics:
482
  - type: map_at_1
483
  value: 11.24
@@ -546,6 +562,7 @@ model-index:
546
  name: MTEB CQADupstackPhysicsRetrieval
547
  config: default
548
  split: test
 
549
  metrics:
550
  - type: map_at_1
551
  value: 23.012
@@ -614,6 +631,7 @@ model-index:
614
  name: MTEB CQADupstackProgrammersRetrieval
615
  config: default
616
  split: test
 
617
  metrics:
618
  - type: map_at_1
619
  value: 21.624
@@ -682,6 +700,7 @@ model-index:
682
  name: MTEB CQADupstackRetrieval
683
  config: default
684
  split: test
 
685
  metrics:
686
  - type: map_at_1
687
  value: 20.67566666666667
@@ -750,6 +769,7 @@ model-index:
750
  name: MTEB CQADupstackStatsRetrieval
751
  config: default
752
  split: test
 
753
  metrics:
754
  - type: map_at_1
755
  value: 18.34
@@ -818,6 +838,7 @@ model-index:
818
  name: MTEB CQADupstackTexRetrieval
819
  config: default
820
  split: test
 
821
  metrics:
822
  - type: map_at_1
823
  value: 12.327
@@ -886,6 +907,7 @@ model-index:
886
  name: MTEB CQADupstackUnixRetrieval
887
  config: default
888
  split: test
 
889
  metrics:
890
  - type: map_at_1
891
  value: 20.594
@@ -954,6 +976,7 @@ model-index:
954
  name: MTEB CQADupstackWebmastersRetrieval
955
  config: default
956
  split: test
 
957
  metrics:
958
  - type: map_at_1
959
  value: 20.855999999999998
@@ -1022,6 +1045,7 @@ model-index:
1022
  name: MTEB CQADupstackWordpressRetrieval
1023
  config: default
1024
  split: test
 
1025
  metrics:
1026
  - type: map_at_1
1027
  value: 16.519000000000002
@@ -1090,6 +1114,7 @@ model-index:
1090
  name: MTEB ClimateFEVER
1091
  config: default
1092
  split: test
 
1093
  metrics:
1094
  - type: map_at_1
1095
  value: 10.363
@@ -1158,6 +1183,7 @@ model-index:
1158
  name: MTEB DBPedia
1159
  config: default
1160
  split: test
 
1161
  metrics:
1162
  - type: map_at_1
1163
  value: 7.436
@@ -1226,6 +1252,7 @@ model-index:
1226
  name: MTEB EmotionClassification
1227
  config: default
1228
  split: test
 
1229
  metrics:
1230
  - type: accuracy
1231
  value: 46.39
@@ -1238,6 +1265,7 @@ model-index:
1238
  name: MTEB FEVER
1239
  config: default
1240
  split: test
 
1241
  metrics:
1242
  - type: map_at_1
1243
  value: 50.916
@@ -1306,6 +1334,7 @@ model-index:
1306
  name: MTEB FiQA2018
1307
  config: default
1308
  split: test
 
1309
  metrics:
1310
  - type: map_at_1
1311
  value: 13.568
@@ -1374,6 +1403,7 @@ model-index:
1374
  name: MTEB HotpotQA
1375
  config: default
1376
  split: test
 
1377
  metrics:
1378
  - type: map_at_1
1379
  value: 30.878
@@ -1442,6 +1472,7 @@ model-index:
1442
  name: MTEB ImdbClassification
1443
  config: default
1444
  split: test
 
1445
  metrics:
1446
  - type: accuracy
1447
  value: 64.04799999999999
@@ -1456,6 +1487,7 @@ model-index:
1456
  name: MTEB MSMARCO
1457
  config: default
1458
  split: validation
 
1459
  metrics:
1460
  - type: map_at_1
1461
  value: 18.9
@@ -1524,6 +1556,7 @@ model-index:
1524
  name: MTEB MTOPDomainClassification (en)
1525
  config: en
1526
  split: test
 
1527
  metrics:
1528
  - type: accuracy
1529
  value: 92.07706338349293
@@ -1536,6 +1569,7 @@ model-index:
1536
  name: MTEB MTOPIntentClassification (en)
1537
  config: en
1538
  split: test
 
1539
  metrics:
1540
  - type: accuracy
1541
  value: 71.18559051527589
@@ -1548,6 +1582,7 @@ model-index:
1548
  name: MTEB MassiveIntentClassification (en)
1549
  config: en
1550
  split: test
 
1551
  metrics:
1552
  - type: accuracy
1553
  value: 68.64828513786148
@@ -1560,6 +1595,7 @@ model-index:
1560
  name: MTEB MassiveScenarioClassification (en)
1561
  config: en
1562
  split: test
 
1563
  metrics:
1564
  - type: accuracy
1565
  value: 76.04236718224612
@@ -1572,6 +1608,7 @@ model-index:
1572
  name: MTEB MedrxivClusteringP2P
1573
  config: default
1574
  split: test
 
1575
  metrics:
1576
  - type: v_measure
1577
  value: 32.0840369055247
@@ -1582,6 +1619,7 @@ model-index:
1582
  name: MTEB MedrxivClusteringS2S
1583
  config: default
1584
  split: test
 
1585
  metrics:
1586
  - type: v_measure
1587
  value: 29.448729560244537
@@ -1592,6 +1630,7 @@ model-index:
1592
  name: MTEB MindSmallReranking
1593
  config: default
1594
  split: test
 
1595
  metrics:
1596
  - type: map
1597
  value: 31.340856463122375
@@ -1604,6 +1643,7 @@ model-index:
1604
  name: MTEB NFCorpus
1605
  config: default
1606
  split: test
 
1607
  metrics:
1608
  - type: map_at_1
1609
  value: 5.526
@@ -1672,6 +1712,7 @@ model-index:
1672
  name: MTEB NQ
1673
  config: default
1674
  split: test
 
1675
  metrics:
1676
  - type: map_at_1
1677
  value: 23.467
@@ -1740,6 +1781,7 @@ model-index:
1740
  name: MTEB QuoraRetrieval
1741
  config: default
1742
  split: test
 
1743
  metrics:
1744
  - type: map_at_1
1745
  value: 67.51700000000001
@@ -1808,6 +1850,7 @@ model-index:
1808
  name: MTEB RedditClustering
1809
  config: default
1810
  split: test
 
1811
  metrics:
1812
  - type: v_measure
1813
  value: 48.225994608749915
@@ -1818,6 +1861,7 @@ model-index:
1818
  name: MTEB RedditClusteringP2P
1819
  config: default
1820
  split: test
 
1821
  metrics:
1822
  - type: v_measure
1823
  value: 53.17635557157765
@@ -1828,6 +1872,7 @@ model-index:
1828
  name: MTEB SCIDOCS
1829
  config: default
1830
  split: test
 
1831
  metrics:
1832
  - type: map_at_1
1833
  value: 3.988
@@ -1896,6 +1941,7 @@ model-index:
1896
  name: MTEB SICK-R
1897
  config: default
1898
  split: test
 
1899
  metrics:
1900
  - type: cos_sim_pearson
1901
  value: 77.29330379162072
@@ -1916,6 +1962,7 @@ model-index:
1916
  name: MTEB STS12
1917
  config: default
1918
  split: test
 
1919
  metrics:
1920
  - type: cos_sim_pearson
1921
  value: 75.40943196466576
@@ -1936,6 +1983,7 @@ model-index:
1936
  name: MTEB STS13
1937
  config: default
1938
  split: test
 
1939
  metrics:
1940
  - type: cos_sim_pearson
1941
  value: 77.08302398877518
@@ -1956,6 +2004,7 @@ model-index:
1956
  name: MTEB STS14
1957
  config: default
1958
  split: test
 
1959
  metrics:
1960
  - type: cos_sim_pearson
1961
  value: 77.46886184932168
@@ -1976,6 +2025,7 @@ model-index:
1976
  name: MTEB STS15
1977
  config: default
1978
  split: test
 
1979
  metrics:
1980
  - type: cos_sim_pearson
1981
  value: 80.093017609484
@@ -1996,6 +2046,7 @@ model-index:
1996
  name: MTEB STS16
1997
  config: default
1998
  split: test
 
1999
  metrics:
2000
  - type: cos_sim_pearson
2001
  value: 77.98998347238742
@@ -2016,6 +2067,7 @@ model-index:
2016
  name: MTEB STS17 (en-en)
2017
  config: en-en
2018
  split: test
 
2019
  metrics:
2020
  - type: cos_sim_pearson
2021
  value: 85.63510653472044
@@ -2036,6 +2088,7 @@ model-index:
2036
  name: MTEB STS22 (en)
2037
  config: en
2038
  split: test
 
2039
  metrics:
2040
  - type: cos_sim_pearson
2041
  value: 66.7257987615171
@@ -2056,6 +2109,7 @@ model-index:
2056
  name: MTEB STSBenchmark
2057
  config: default
2058
  split: test
 
2059
  metrics:
2060
  - type: cos_sim_pearson
2061
  value: 79.37322139418472
@@ -2076,6 +2130,7 @@ model-index:
2076
  name: MTEB SciDocsRR
2077
  config: default
2078
  split: test
 
2079
  metrics:
2080
  - type: map
2081
  value: 77.21233007730808
@@ -2088,6 +2143,7 @@ model-index:
2088
  name: MTEB SciFact
2089
  config: default
2090
  split: test
 
2091
  metrics:
2092
  - type: map_at_1
2093
  value: 54.567
@@ -2156,6 +2212,7 @@ model-index:
2156
  name: MTEB SprintDuplicateQuestions
2157
  config: default
2158
  split: test
 
2159
  metrics:
2160
  - type: cos_sim_accuracy
2161
  value: 99.74455445544554
@@ -2210,6 +2267,7 @@ model-index:
2210
  name: MTEB StackExchangeClustering
2211
  config: default
2212
  split: test
 
2213
  metrics:
2214
  - type: v_measure
2215
  value: 60.85593925770172
@@ -2220,6 +2278,7 @@ model-index:
2220
  name: MTEB StackExchangeClusteringP2P
2221
  config: default
2222
  split: test
 
2223
  metrics:
2224
  - type: v_measure
2225
  value: 32.356772998237496
@@ -2230,6 +2289,7 @@ model-index:
2230
  name: MTEB StackOverflowDupQuestions
2231
  config: default
2232
  split: test
 
2233
  metrics:
2234
  - type: map
2235
  value: 49.320607035290735
@@ -2242,6 +2302,7 @@ model-index:
2242
  name: MTEB SummEval
2243
  config: default
2244
  split: test
 
2245
  metrics:
2246
  - type: cos_sim_pearson
2247
  value: 25.57602918901377
@@ -2258,6 +2319,7 @@ model-index:
2258
  name: MTEB TRECCOVID
2259
  config: default
2260
  split: test
 
2261
  metrics:
2262
  - type: map_at_1
2263
  value: 0.22100000000000003
@@ -2326,6 +2388,7 @@ model-index:
2326
  name: MTEB Touche2020
2327
  config: default
2328
  split: test
 
2329
  metrics:
2330
  - type: map_at_1
2331
  value: 2.5
@@ -2394,6 +2457,7 @@ model-index:
2394
  name: MTEB ToxicConversationsClassification
2395
  config: default
2396
  split: test
 
2397
  metrics:
2398
  - type: accuracy
2399
  value: 68.7272
@@ -2408,6 +2472,7 @@ model-index:
2408
  name: MTEB TweetSentimentExtractionClassification
2409
  config: default
2410
  split: test
 
2411
  metrics:
2412
  - type: accuracy
2413
  value: 55.6677985285795
@@ -2420,6 +2485,7 @@ model-index:
2420
  name: MTEB TwentyNewsgroupsClustering
2421
  config: default
2422
  split: test
 
2423
  metrics:
2424
  - type: v_measure
2425
  value: 40.05809562275603
@@ -2430,6 +2496,7 @@ model-index:
2430
  name: MTEB TwitterSemEval2015
2431
  config: default
2432
  split: test
 
2433
  metrics:
2434
  - type: cos_sim_accuracy
2435
  value: 82.76807534124099
@@ -2484,6 +2551,7 @@ model-index:
2484
  name: MTEB TwitterURLCorpus
2485
  config: default
2486
  split: test
 
2487
  metrics:
2488
  - type: cos_sim_accuracy
2489
  value: 87.97881010594946
 
14
  name: MTEB AmazonCounterfactualClassification (en)
15
  config: en
16
  split: test
17
+ revision: 2d8a100785abf0ae21420d2a55b0c56e3e1ea996
18
  metrics:
19
  - type: accuracy
20
  value: 65.20895522388061
 
29
  name: MTEB AmazonPolarityClassification
30
  config: default
31
  split: test
32
+ revision: 80714f8dcf8cefc218ef4f8c5a966dd83f75a0e1
33
  metrics:
34
  - type: accuracy
35
  value: 73.20565
 
44
  name: MTEB AmazonReviewsClassification (en)
45
  config: en
46
  split: test
47
+ revision: c379a6705fec24a2493fa68e011692605f44e119
48
  metrics:
49
  - type: accuracy
50
  value: 34.955999999999996
 
57
  name: MTEB ArguAna
58
  config: default
59
  split: test
60
+ revision: 5b3e3697907184a9b77a3c99ee9ea1a9cbb1e4e3
61
  metrics:
62
  - type: map_at_1
63
  value: 26.101999999999997
 
126
  name: MTEB ArxivClusteringP2P
127
  config: default
128
  split: test
129
+ revision: 0bbdb47bcbe3a90093699aefeed338a0f28a7ee8
130
  metrics:
131
  - type: v_measure
132
  value: 43.384194916953774
 
137
  name: MTEB ArxivClusteringS2S
138
  config: default
139
  split: test
140
+ revision: b73bd54100e5abfa6e3a23dcafb46fe4d2438dc3
141
  metrics:
142
  - type: v_measure
143
  value: 33.70962633433912
 
148
  name: MTEB AskUbuntuDupQuestions
149
  config: default
150
  split: test
151
+ revision: 4d853f94cd57d85ec13805aeeac3ae3e5eb4c49c
152
  metrics:
153
  - type: map
154
  value: 58.133058996870076
 
161
  name: MTEB BIOSSES
162
  config: default
163
  split: test
164
+ revision: 9ee918f184421b6bd48b78f6c714d86546106103
165
  metrics:
166
  - type: cos_sim_pearson
167
  value: 86.62153841660047
 
182
  name: MTEB Banking77Classification
183
  config: default
184
  split: test
185
+ revision: 44fa15921b4c889113cc5df03dd4901b49161ab7
186
  metrics:
187
  - type: accuracy
188
  value: 82.05844155844156
 
195
  name: MTEB BiorxivClusteringP2P
196
  config: default
197
  split: test
198
+ revision: 11d0121201d1f1f280e8cc8f3d98fb9c4d9f9c55
199
  metrics:
200
  - type: v_measure
201
  value: 35.05918333141837
 
206
  name: MTEB BiorxivClusteringS2S
207
  config: default
208
  split: test
209
+ revision: c0fab014e1bcb8d3a5e31b2088972a1e01547dc1
210
  metrics:
211
  - type: v_measure
212
  value: 30.71055028830579
 
217
  name: MTEB CQADupstackAndroidRetrieval
218
  config: default
219
  split: test
220
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
221
  metrics:
222
  - type: map_at_1
223
  value: 26.519
 
286
  name: MTEB CQADupstackEnglishRetrieval
287
  config: default
288
  split: test
289
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
290
  metrics:
291
  - type: map_at_1
292
  value: 25.356
 
355
  name: MTEB CQADupstackGamingRetrieval
356
  config: default
357
  split: test
358
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
359
  metrics:
360
  - type: map_at_1
361
  value: 32.759
 
424
  name: MTEB CQADupstackGisRetrieval
425
  config: default
426
  split: test
427
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
428
  metrics:
429
  - type: map_at_1
430
  value: 18.962
 
493
  name: MTEB CQADupstackMathematicaRetrieval
494
  config: default
495
  split: test
496
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
497
  metrics:
498
  - type: map_at_1
499
  value: 11.24
 
562
  name: MTEB CQADupstackPhysicsRetrieval
563
  config: default
564
  split: test
565
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
566
  metrics:
567
  - type: map_at_1
568
  value: 23.012
 
631
  name: MTEB CQADupstackProgrammersRetrieval
632
  config: default
633
  split: test
634
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
635
  metrics:
636
  - type: map_at_1
637
  value: 21.624
 
700
  name: MTEB CQADupstackRetrieval
701
  config: default
702
  split: test
703
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
704
  metrics:
705
  - type: map_at_1
706
  value: 20.67566666666667
 
769
  name: MTEB CQADupstackStatsRetrieval
770
  config: default
771
  split: test
772
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
773
  metrics:
774
  - type: map_at_1
775
  value: 18.34
 
838
  name: MTEB CQADupstackTexRetrieval
839
  config: default
840
  split: test
841
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
842
  metrics:
843
  - type: map_at_1
844
  value: 12.327
 
907
  name: MTEB CQADupstackUnixRetrieval
908
  config: default
909
  split: test
910
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
911
  metrics:
912
  - type: map_at_1
913
  value: 20.594
 
976
  name: MTEB CQADupstackWebmastersRetrieval
977
  config: default
978
  split: test
979
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
980
  metrics:
981
  - type: map_at_1
982
  value: 20.855999999999998
 
1045
  name: MTEB CQADupstackWordpressRetrieval
1046
  config: default
1047
  split: test
1048
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
1049
  metrics:
1050
  - type: map_at_1
1051
  value: 16.519000000000002
 
1114
  name: MTEB ClimateFEVER
1115
  config: default
1116
  split: test
1117
+ revision: 392b78eb68c07badcd7c2cd8f39af108375dfcce
1118
  metrics:
1119
  - type: map_at_1
1120
  value: 10.363
 
1183
  name: MTEB DBPedia
1184
  config: default
1185
  split: test
1186
+ revision: f097057d03ed98220bc7309ddb10b71a54d667d6
1187
  metrics:
1188
  - type: map_at_1
1189
  value: 7.436
 
1252
  name: MTEB EmotionClassification
1253
  config: default
1254
  split: test
1255
+ revision: 829147f8f75a25f005913200eb5ed41fae320aa1
1256
  metrics:
1257
  - type: accuracy
1258
  value: 46.39
 
1265
  name: MTEB FEVER
1266
  config: default
1267
  split: test
1268
+ revision: 1429cf27e393599b8b359b9b72c666f96b2525f9
1269
  metrics:
1270
  - type: map_at_1
1271
  value: 50.916
 
1334
  name: MTEB FiQA2018
1335
  config: default
1336
  split: test
1337
+ revision: 41b686a7f28c59bcaaa5791efd47c67c8ebe28be
1338
  metrics:
1339
  - type: map_at_1
1340
  value: 13.568
 
1403
  name: MTEB HotpotQA
1404
  config: default
1405
  split: test
1406
+ revision: 766870b35a1b9ca65e67a0d1913899973551fc6c
1407
  metrics:
1408
  - type: map_at_1
1409
  value: 30.878
 
1472
  name: MTEB ImdbClassification
1473
  config: default
1474
  split: test
1475
+ revision: 8d743909f834c38949e8323a8a6ce8721ea6c7f4
1476
  metrics:
1477
  - type: accuracy
1478
  value: 64.04799999999999
 
1487
  name: MTEB MSMARCO
1488
  config: default
1489
  split: validation
1490
+ revision: e6838a846e2408f22cf5cc337ebc83e0bcf77849
1491
  metrics:
1492
  - type: map_at_1
1493
  value: 18.9
 
1556
  name: MTEB MTOPDomainClassification (en)
1557
  config: en
1558
  split: test
1559
+ revision: a7e2a951126a26fc8c6a69f835f33a346ba259e3
1560
  metrics:
1561
  - type: accuracy
1562
  value: 92.07706338349293
 
1569
  name: MTEB MTOPIntentClassification (en)
1570
  config: en
1571
  split: test
1572
+ revision: 6299947a7777084cc2d4b64235bf7190381ce755
1573
  metrics:
1574
  - type: accuracy
1575
  value: 71.18559051527589
 
1582
  name: MTEB MassiveIntentClassification (en)
1583
  config: en
1584
  split: test
1585
+ revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
1586
  metrics:
1587
  - type: accuracy
1588
  value: 68.64828513786148
 
1595
  name: MTEB MassiveScenarioClassification (en)
1596
  config: en
1597
  split: test
1598
+ revision: 7d571f92784cd94a019292a1f45445077d0ef634
1599
  metrics:
1600
  - type: accuracy
1601
  value: 76.04236718224612
 
1608
  name: MTEB MedrxivClusteringP2P
1609
  config: default
1610
  split: test
1611
+ revision: dcefc037ef84348e49b0d29109e891c01067226b
1612
  metrics:
1613
  - type: v_measure
1614
  value: 32.0840369055247
 
1619
  name: MTEB MedrxivClusteringS2S
1620
  config: default
1621
  split: test
1622
+ revision: 3cd0e71dfbe09d4de0f9e5ecba43e7ce280959dc
1623
  metrics:
1624
  - type: v_measure
1625
  value: 29.448729560244537
 
1630
  name: MTEB MindSmallReranking
1631
  config: default
1632
  split: test
1633
+ revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69
1634
  metrics:
1635
  - type: map
1636
  value: 31.340856463122375
 
1643
  name: MTEB NFCorpus
1644
  config: default
1645
  split: test
1646
+ revision: 7eb63cc0c1eb59324d709ebed25fcab851fa7610
1647
  metrics:
1648
  - type: map_at_1
1649
  value: 5.526
 
1712
  name: MTEB NQ
1713
  config: default
1714
  split: test
1715
+ revision: 6062aefc120bfe8ece5897809fb2e53bfe0d128c
1716
  metrics:
1717
  - type: map_at_1
1718
  value: 23.467
 
1781
  name: MTEB QuoraRetrieval
1782
  config: default
1783
  split: test
1784
+ revision: 6205996560df11e3a3da9ab4f926788fc30a7db4
1785
  metrics:
1786
  - type: map_at_1
1787
  value: 67.51700000000001
 
1850
  name: MTEB RedditClustering
1851
  config: default
1852
  split: test
1853
+ revision: b2805658ae38990172679479369a78b86de8c390
1854
  metrics:
1855
  - type: v_measure
1856
  value: 48.225994608749915
 
1861
  name: MTEB RedditClusteringP2P
1862
  config: default
1863
  split: test
1864
+ revision: 385e3cb46b4cfa89021f56c4380204149d0efe33
1865
  metrics:
1866
  - type: v_measure
1867
  value: 53.17635557157765
 
1872
  name: MTEB SCIDOCS
1873
  config: default
1874
  split: test
1875
+ revision: 5c59ef3e437a0a9651c8fe6fde943e7dce59fba5
1876
  metrics:
1877
  - type: map_at_1
1878
  value: 3.988
 
1941
  name: MTEB SICK-R
1942
  config: default
1943
  split: test
1944
+ revision: 20a6d6f312dd54037fe07a32d58e5e168867909d
1945
  metrics:
1946
  - type: cos_sim_pearson
1947
  value: 77.29330379162072
 
1962
  name: MTEB STS12
1963
  config: default
1964
  split: test
1965
+ revision: fdf84275bb8ce4b49c971d02e84dd1abc677a50f
1966
  metrics:
1967
  - type: cos_sim_pearson
1968
  value: 75.40943196466576
 
1983
  name: MTEB STS13
1984
  config: default
1985
  split: test
1986
+ revision: 1591bfcbe8c69d4bf7fe2a16e2451017832cafb9
1987
  metrics:
1988
  - type: cos_sim_pearson
1989
  value: 77.08302398877518
 
2004
  name: MTEB STS14
2005
  config: default
2006
  split: test
2007
+ revision: e2125984e7df8b7871f6ae9949cf6b6795e7c54b
2008
  metrics:
2009
  - type: cos_sim_pearson
2010
  value: 77.46886184932168
 
2025
  name: MTEB STS15
2026
  config: default
2027
  split: test
2028
+ revision: 1cd7298cac12a96a373b6a2f18738bb3e739a9b6
2029
  metrics:
2030
  - type: cos_sim_pearson
2031
  value: 80.093017609484
 
2046
  name: MTEB STS16
2047
  config: default
2048
  split: test
2049
+ revision: 360a0b2dff98700d09e634a01e1cc1624d3e42cd
2050
  metrics:
2051
  - type: cos_sim_pearson
2052
  value: 77.98998347238742
 
2067
  name: MTEB STS17 (en-en)
2068
  config: en-en
2069
  split: test
2070
+ revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
2071
  metrics:
2072
  - type: cos_sim_pearson
2073
  value: 85.63510653472044
 
2088
  name: MTEB STS22 (en)
2089
  config: en
2090
  split: test
2091
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2092
  metrics:
2093
  - type: cos_sim_pearson
2094
  value: 66.7257987615171
 
2109
  name: MTEB STSBenchmark
2110
  config: default
2111
  split: test
2112
+ revision: 8913289635987208e6e7c72789e4be2fe94b6abd
2113
  metrics:
2114
  - type: cos_sim_pearson
2115
  value: 79.37322139418472
 
2130
  name: MTEB SciDocsRR
2131
  config: default
2132
  split: test
2133
+ revision: 56a6d0140cf6356659e2a7c1413286a774468d44
2134
  metrics:
2135
  - type: map
2136
  value: 77.21233007730808
 
2143
  name: MTEB SciFact
2144
  config: default
2145
  split: test
2146
+ revision: a75ae049398addde9b70f6b268875f5cbce99089
2147
  metrics:
2148
  - type: map_at_1
2149
  value: 54.567
 
2212
  name: MTEB SprintDuplicateQuestions
2213
  config: default
2214
  split: test
2215
+ revision: 5a8256d0dff9c4bd3be3ba3e67e4e70173f802ea
2216
  metrics:
2217
  - type: cos_sim_accuracy
2218
  value: 99.74455445544554
 
2267
  name: MTEB StackExchangeClustering
2268
  config: default
2269
  split: test
2270
+ revision: 70a89468f6dccacc6aa2b12a6eac54e74328f235
2271
  metrics:
2272
  - type: v_measure
2273
  value: 60.85593925770172
 
2278
  name: MTEB StackExchangeClusteringP2P
2279
  config: default
2280
  split: test
2281
+ revision: d88009ab563dd0b16cfaf4436abaf97fa3550cf0
2282
  metrics:
2283
  - type: v_measure
2284
  value: 32.356772998237496
 
2289
  name: MTEB StackOverflowDupQuestions
2290
  config: default
2291
  split: test
2292
+ revision: ef807ea29a75ec4f91b50fd4191cb4ee4589a9f9
2293
  metrics:
2294
  - type: map
2295
  value: 49.320607035290735
 
2302
  name: MTEB SummEval
2303
  config: default
2304
  split: test
2305
+ revision: 8753c2788d36c01fc6f05d03fe3f7268d63f9122
2306
  metrics:
2307
  - type: cos_sim_pearson
2308
  value: 25.57602918901377
 
2319
  name: MTEB TRECCOVID
2320
  config: default
2321
  split: test
2322
+ revision: 2c8041b2c07a79b6f7ba8fe6acc72e5d9f92d217
2323
  metrics:
2324
  - type: map_at_1
2325
  value: 0.22100000000000003
 
2388
  name: MTEB Touche2020
2389
  config: default
2390
  split: test
2391
+ revision: 527b7d77e16e343303e68cb6af11d6e18b9f7b3b
2392
  metrics:
2393
  - type: map_at_1
2394
  value: 2.5
 
2457
  name: MTEB ToxicConversationsClassification
2458
  config: default
2459
  split: test
2460
+ revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de
2461
  metrics:
2462
  - type: accuracy
2463
  value: 68.7272
 
2472
  name: MTEB TweetSentimentExtractionClassification
2473
  config: default
2474
  split: test
2475
+ revision: 62146448f05be9e52a36b8ee9936447ea787eede
2476
  metrics:
2477
  - type: accuracy
2478
  value: 55.6677985285795
 
2485
  name: MTEB TwentyNewsgroupsClustering
2486
  config: default
2487
  split: test
2488
+ revision: 091a54f9a36281ce7d6590ec8c75dd485e7e01d4
2489
  metrics:
2490
  - type: v_measure
2491
  value: 40.05809562275603
 
2496
  name: MTEB TwitterSemEval2015
2497
  config: default
2498
  split: test
2499
+ revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1
2500
  metrics:
2501
  - type: cos_sim_accuracy
2502
  value: 82.76807534124099
 
2551
  name: MTEB TwitterURLCorpus
2552
  config: default
2553
  split: test
2554
+ revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf
2555
  metrics:
2556
  - type: cos_sim_accuracy
2557
  value: 87.97881010594946