Sentence Similarity
sentence-transformers
PyTorch
TensorFlow
Rust
Safetensors
Transformers
English
bert
feature-extraction
Inference Endpoints
5 papers
Muennighoff commited on
Commit
6d61935
1 Parent(s): 97ef89b
Files changed (1) hide show
  1. README.md +96 -2
README.md CHANGED
@@ -39,6 +39,7 @@ model-index:
39
  name: MTEB AmazonCounterfactualClassification (en)
40
  config: en
41
  split: test
 
42
  metrics:
43
  - type: accuracy
44
  value: 64.14925373134331
@@ -53,6 +54,7 @@ model-index:
53
  name: MTEB AmazonPolarityClassification
54
  config: default
55
  split: test
 
56
  metrics:
57
  - type: accuracy
58
  value: 62.582975
@@ -67,6 +69,7 @@ model-index:
67
  name: MTEB AmazonReviewsClassification (en)
68
  config: en
69
  split: test
 
70
  metrics:
71
  - type: accuracy
72
  value: 31.785999999999998
@@ -79,6 +82,7 @@ model-index:
79
  name: MTEB ArguAna
80
  config: default
81
  split: test
 
82
  metrics:
83
  - type: map_at_1
84
  value: 25.605
@@ -135,6 +139,7 @@ model-index:
135
  name: MTEB ArxivClusteringP2P
136
  config: default
137
  split: test
 
138
  metrics:
139
  - type: v_measure
140
  value: 46.54595079050156
@@ -145,6 +150,7 @@ model-index:
145
  name: MTEB ArxivClusteringS2S
146
  config: default
147
  split: test
 
148
  metrics:
149
  - type: v_measure
150
  value: 37.85709823840442
@@ -155,6 +161,7 @@ model-index:
155
  name: MTEB AskUbuntuDupQuestions
156
  config: default
157
  split: test
 
158
  metrics:
159
  - type: map
160
  value: 63.47681681237331
@@ -167,6 +174,7 @@ model-index:
167
  name: MTEB BIOSSES
168
  config: default
169
  split: test
 
170
  metrics:
171
  - type: cos_sim_pearson
172
  value: 84.41897516342782
@@ -187,6 +195,7 @@ model-index:
187
  name: MTEB Banking77Classification
188
  config: default
189
  split: test
 
190
  metrics:
191
  - type: accuracy
192
  value: 79.75000000000001
@@ -199,6 +208,7 @@ model-index:
199
  name: MTEB BiorxivClusteringP2P
200
  config: default
201
  split: test
 
202
  metrics:
203
  - type: v_measure
204
  value: 38.48301914135123
@@ -209,6 +219,7 @@ model-index:
209
  name: MTEB BiorxivClusteringS2S
210
  config: default
211
  split: test
 
212
  metrics:
213
  - type: v_measure
214
  value: 33.170209943399804
@@ -219,6 +230,7 @@ model-index:
219
  name: MTEB CQADupstackAndroidRetrieval
220
  config: default
221
  split: test
 
222
  metrics:
223
  - type: map_at_1
224
  value: 34.660000000000004
@@ -275,6 +287,7 @@ model-index:
275
  name: MTEB CQADupstackEnglishRetrieval
276
  config: default
277
  split: test
 
278
  metrics:
279
  - type: map_at_1
280
  value: 31.180999999999997
@@ -331,6 +344,7 @@ model-index:
331
  name: MTEB CQADupstackGamingRetrieval
332
  config: default
333
  split: test
 
334
  metrics:
335
  - type: map_at_1
336
  value: 38.732
@@ -387,6 +401,7 @@ model-index:
387
  name: MTEB CQADupstackGisRetrieval
388
  config: default
389
  split: test
 
390
  metrics:
391
  - type: map_at_1
392
  value: 26.837
@@ -443,6 +458,7 @@ model-index:
443
  name: MTEB CQADupstackMathematicaRetrieval
444
  config: default
445
  split: test
 
446
  metrics:
447
  - type: map_at_1
448
  value: 15.142
@@ -499,6 +515,7 @@ model-index:
499
  name: MTEB CQADupstackPhysicsRetrieval
500
  config: default
501
  split: test
 
502
  metrics:
503
  - type: map_at_1
504
  value: 29.142000000000003
@@ -555,6 +572,7 @@ model-index:
555
  name: MTEB CQADupstackProgrammersRetrieval
556
  config: default
557
  split: test
 
558
  metrics:
559
  - type: map_at_1
560
  value: 22.081999999999997
@@ -611,6 +629,7 @@ model-index:
611
  name: MTEB CQADupstackRetrieval
612
  config: default
613
  split: test
 
614
  metrics:
615
  - type: map_at_1
616
  value: 25.825750000000003
@@ -667,6 +686,7 @@ model-index:
667
  name: MTEB CQADupstackStatsRetrieval
668
  config: default
669
  split: test
 
670
  metrics:
671
  - type: map_at_1
672
  value: 23.147000000000002
@@ -723,6 +743,7 @@ model-index:
723
  name: MTEB CQADupstackTexRetrieval
724
  config: default
725
  split: test
 
726
  metrics:
727
  - type: map_at_1
728
  value: 17.573
@@ -779,6 +800,7 @@ model-index:
779
  name: MTEB CQADupstackUnixRetrieval
780
  config: default
781
  split: test
 
782
  metrics:
783
  - type: map_at_1
784
  value: 25.393
@@ -835,6 +857,7 @@ model-index:
835
  name: MTEB CQADupstackWebmastersRetrieval
836
  config: default
837
  split: test
 
838
  metrics:
839
  - type: map_at_1
840
  value: 25.219
@@ -891,6 +914,7 @@ model-index:
891
  name: MTEB CQADupstackWordpressRetrieval
892
  config: default
893
  split: test
 
894
  metrics:
895
  - type: map_at_1
896
  value: 20.801
@@ -947,6 +971,7 @@ model-index:
947
  name: MTEB ClimateFEVER
948
  config: default
949
  split: test
 
950
  metrics:
951
  - type: map_at_1
952
  value: 7.9159999999999995
@@ -1003,6 +1028,7 @@ model-index:
1003
  name: MTEB DBPedia
1004
  config: default
1005
  split: test
 
1006
  metrics:
1007
  - type: map_at_1
1008
  value: 7.172000000000001
@@ -1059,6 +1085,7 @@ model-index:
1059
  name: MTEB EmotionClassification
1060
  config: default
1061
  split: test
 
1062
  metrics:
1063
  - type: accuracy
1064
  value: 38.43
@@ -1071,6 +1098,7 @@ model-index:
1071
  name: MTEB FEVER
1072
  config: default
1073
  split: test
 
1074
  metrics:
1075
  - type: map_at_1
1076
  value: 34.076
@@ -1127,6 +1155,7 @@ model-index:
1127
  name: MTEB FiQA2018
1128
  config: default
1129
  split: test
 
1130
  metrics:
1131
  - type: map_at_1
1132
  value: 17.14
@@ -1183,6 +1212,7 @@ model-index:
1183
  name: MTEB HotpotQA
1184
  config: default
1185
  split: test
 
1186
  metrics:
1187
  - type: map_at_1
1188
  value: 27.717999999999996
@@ -1239,6 +1269,7 @@ model-index:
1239
  name: MTEB ImdbClassification
1240
  config: default
1241
  split: test
 
1242
  metrics:
1243
  - type: accuracy
1244
  value: 60.6612
@@ -1253,6 +1284,7 @@ model-index:
1253
  name: MTEB MSMARCO
1254
  config: default
1255
  split: dev
 
1256
  metrics:
1257
  - type: map_at_1
1258
  value: 18.715
@@ -1309,6 +1341,7 @@ model-index:
1309
  name: MTEB MTOPDomainClassification (en)
1310
  config: en
1311
  split: test
 
1312
  metrics:
1313
  - type: accuracy
1314
  value: 91.56178750569997
@@ -1321,6 +1354,7 @@ model-index:
1321
  name: MTEB MTOPIntentClassification (en)
1322
  config: en
1323
  split: test
 
1324
  metrics:
1325
  - type: accuracy
1326
  value: 62.18194254445966
@@ -1333,6 +1367,7 @@ model-index:
1333
  name: MTEB MassiveIntentClassification (en)
1334
  config: en
1335
  split: test
 
1336
  metrics:
1337
  - type: accuracy
1338
  value: 67.404169468729
@@ -1345,6 +1380,7 @@ model-index:
1345
  name: MTEB MassiveScenarioClassification (en)
1346
  config: en
1347
  split: test
 
1348
  metrics:
1349
  - type: accuracy
1350
  value: 75.75655682582381
@@ -1357,6 +1393,7 @@ model-index:
1357
  name: MTEB MedrxivClusteringP2P
1358
  config: default
1359
  split: test
 
1360
  metrics:
1361
  - type: v_measure
1362
  value: 34.40873490143895
@@ -1367,6 +1404,7 @@ model-index:
1367
  name: MTEB MedrxivClusteringS2S
1368
  config: default
1369
  split: test
 
1370
  metrics:
1371
  - type: v_measure
1372
  value: 32.292207500530914
@@ -1377,6 +1415,7 @@ model-index:
1377
  name: MTEB MindSmallReranking
1378
  config: default
1379
  split: test
 
1380
  metrics:
1381
  - type: map
1382
  value: 30.798042020200267
@@ -1389,6 +1428,7 @@ model-index:
1389
  name: MTEB NFCorpus
1390
  config: default
1391
  split: test
 
1392
  metrics:
1393
  - type: map_at_1
1394
  value: 4.3229999999999995
@@ -1445,6 +1485,7 @@ model-index:
1445
  name: MTEB NQ
1446
  config: default
1447
  split: test
 
1448
  metrics:
1449
  - type: map_at_1
1450
  value: 22.644000000000002
@@ -1501,6 +1542,7 @@ model-index:
1501
  name: MTEB QuoraRetrieval
1502
  config: default
1503
  split: test
 
1504
  metrics:
1505
  - type: map_at_1
1506
  value: 69.76
@@ -1557,6 +1599,7 @@ model-index:
1557
  name: MTEB RedditClustering
1558
  config: default
1559
  split: test
 
1560
  metrics:
1561
  - type: v_measure
1562
  value: 50.66969274980475
@@ -1567,6 +1610,7 @@ model-index:
1567
  name: MTEB RedditClusteringP2P
1568
  config: default
1569
  split: test
 
1570
  metrics:
1571
  - type: v_measure
1572
  value: 54.15176409632201
@@ -1577,6 +1621,7 @@ model-index:
1577
  name: MTEB SCIDOCS
1578
  config: default
1579
  split: test
 
1580
  metrics:
1581
  - type: map_at_1
1582
  value: 4.853
@@ -1633,6 +1678,7 @@ model-index:
1633
  name: MTEB SICK-R
1634
  config: default
1635
  split: test
 
1636
  metrics:
1637
  - type: cos_sim_pearson
1638
  value: 83.91595834747078
@@ -1653,6 +1699,7 @@ model-index:
1653
  name: MTEB STS12
1654
  config: default
1655
  split: test
 
1656
  metrics:
1657
  - type: cos_sim_pearson
1658
  value: 81.35998585185463
@@ -1673,6 +1720,7 @@ model-index:
1673
  name: MTEB STS13
1674
  config: default
1675
  split: test
 
1676
  metrics:
1677
  - type: cos_sim_pearson
1678
  value: 80.15192226911441
@@ -1693,6 +1741,7 @@ model-index:
1693
  name: MTEB STS14
1694
  config: default
1695
  split: test
 
1696
  metrics:
1697
  - type: cos_sim_pearson
1698
  value: 80.80137749134273
@@ -1713,6 +1762,7 @@ model-index:
1713
  name: MTEB STS15
1714
  config: default
1715
  split: test
 
1716
  metrics:
1717
  - type: cos_sim_pearson
1718
  value: 84.73605558012511
@@ -1733,6 +1783,7 @@ model-index:
1733
  name: MTEB STS16
1734
  config: default
1735
  split: test
 
1736
  metrics:
1737
  - type: cos_sim_pearson
1738
  value: 77.93667023468089
@@ -1753,6 +1804,7 @@ model-index:
1753
  name: MTEB STS17 (ko-ko)
1754
  config: ko-ko
1755
  split: test
 
1756
  metrics:
1757
  - type: cos_sim_pearson
1758
  value: 38.02556869388448
@@ -1773,6 +1825,7 @@ model-index:
1773
  name: MTEB STS17 (ar-ar)
1774
  config: ar-ar
1775
  split: test
 
1776
  metrics:
1777
  - type: cos_sim_pearson
1778
  value: 50.19733275252325
@@ -1793,6 +1846,7 @@ model-index:
1793
  name: MTEB STS17 (en-ar)
1794
  config: en-ar
1795
  split: test
 
1796
  metrics:
1797
  - type: cos_sim_pearson
1798
  value: -5.346248828225636
@@ -1813,6 +1867,7 @@ model-index:
1813
  name: MTEB STS17 (en-de)
1814
  config: en-de
1815
  split: test
 
1816
  metrics:
1817
  - type: cos_sim_pearson
1818
  value: 37.0025013483991
@@ -1833,6 +1888,7 @@ model-index:
1833
  name: MTEB STS17 (en-en)
1834
  config: en-en
1835
  split: test
 
1836
  metrics:
1837
  - type: cos_sim_pearson
1838
  value: 88.02366672243191
@@ -1853,6 +1909,7 @@ model-index:
1853
  name: MTEB STS17 (en-tr)
1854
  config: en-tr
1855
  split: test
 
1856
  metrics:
1857
  - type: cos_sim_pearson
1858
  value: 6.928208810824121
@@ -1873,6 +1930,7 @@ model-index:
1873
  name: MTEB STS17 (es-en)
1874
  config: es-en
1875
  split: test
 
1876
  metrics:
1877
  - type: cos_sim_pearson
1878
  value: 17.49363358339176
@@ -1893,6 +1951,7 @@ model-index:
1893
  name: MTEB STS17 (es-es)
1894
  config: es-es
1895
  split: test
 
1896
  metrics:
1897
  - type: cos_sim_pearson
1898
  value: 77.04145671005833
@@ -1913,6 +1972,7 @@ model-index:
1913
  name: MTEB STS17 (fr-en)
1914
  config: fr-en
1915
  split: test
 
1916
  metrics:
1917
  - type: cos_sim_pearson
1918
  value: 37.9961687967439
@@ -1933,6 +1993,7 @@ model-index:
1933
  name: MTEB STS17 (it-en)
1934
  config: it-en
1935
  split: test
 
1936
  metrics:
1937
  - type: cos_sim_pearson
1938
  value: 26.739991134614716
@@ -1953,6 +2014,7 @@ model-index:
1953
  name: MTEB STS17 (nl-en)
1954
  config: nl-en
1955
  split: test
 
1956
  metrics:
1957
  - type: cos_sim_pearson
1958
  value: 32.71761762628939
@@ -1973,6 +2035,7 @@ model-index:
1973
  name: MTEB STS22 (en)
1974
  config: en
1975
  split: test
 
1976
  metrics:
1977
  - type: cos_sim_pearson
1978
  value: 67.09882753030891
@@ -1993,6 +2056,7 @@ model-index:
1993
  name: MTEB STS22 (de)
1994
  config: de
1995
  split: test
 
1996
  metrics:
1997
  - type: cos_sim_pearson
1998
  value: 26.596033966146116
@@ -2013,6 +2077,7 @@ model-index:
2013
  name: MTEB STS22 (es)
2014
  config: es
2015
  split: test
 
2016
  metrics:
2017
  - type: cos_sim_pearson
2018
  value: 44.33815143022264
@@ -2033,6 +2098,7 @@ model-index:
2033
  name: MTEB STS22 (pl)
2034
  config: pl
2035
  split: test
 
2036
  metrics:
2037
  - type: cos_sim_pearson
2038
  value: 8.000336595206134
@@ -2053,6 +2119,7 @@ model-index:
2053
  name: MTEB STS22 (tr)
2054
  config: tr
2055
  split: test
 
2056
  metrics:
2057
  - type: cos_sim_pearson
2058
  value: 20.597902459466386
@@ -2073,6 +2140,7 @@ model-index:
2073
  name: MTEB STS22 (ar)
2074
  config: ar
2075
  split: test
 
2076
  metrics:
2077
  - type: cos_sim_pearson
2078
  value: 5.006610360999117
@@ -2093,6 +2161,7 @@ model-index:
2093
  name: MTEB STS22 (ru)
2094
  config: ru
2095
  split: test
 
2096
  metrics:
2097
  - type: cos_sim_pearson
2098
  value: 0.03100716792233671
@@ -2113,6 +2182,7 @@ model-index:
2113
  name: MTEB STS22 (zh)
2114
  config: zh
2115
  split: test
 
2116
  metrics:
2117
  - type: cos_sim_pearson
2118
  value: 23.127885111414432
@@ -2133,6 +2203,7 @@ model-index:
2133
  name: MTEB STS22 (fr)
2134
  config: fr
2135
  split: test
 
2136
  metrics:
2137
  - type: cos_sim_pearson
2138
  value: 70.64344773137496
@@ -2153,6 +2224,7 @@ model-index:
2153
  name: MTEB STS22 (de-en)
2154
  config: de-en
2155
  split: test
 
2156
  metrics:
2157
  - type: cos_sim_pearson
2158
  value: 47.54531236654512
@@ -2173,6 +2245,7 @@ model-index:
2173
  name: MTEB STS22 (es-en)
2174
  config: es-en
2175
  split: test
 
2176
  metrics:
2177
  - type: cos_sim_pearson
2178
  value: 49.93601240112664
@@ -2193,6 +2266,7 @@ model-index:
2193
  name: MTEB STS22 (it)
2194
  config: it
2195
  split: test
 
2196
  metrics:
2197
  - type: cos_sim_pearson
2198
  value: 57.4312835830767
@@ -2213,6 +2287,7 @@ model-index:
2213
  name: MTEB STS22 (pl-en)
2214
  config: pl-en
2215
  split: test
 
2216
  metrics:
2217
  - type: cos_sim_pearson
2218
  value: 35.08730015173829
@@ -2233,6 +2308,7 @@ model-index:
2233
  name: MTEB STS22 (zh-en)
2234
  config: zh-en
2235
  split: test
 
2236
  metrics:
2237
  - type: cos_sim_pearson
2238
  value: 37.41111741585122
@@ -2253,6 +2329,7 @@ model-index:
2253
  name: MTEB STS22 (es-it)
2254
  config: es-it
2255
  split: test
 
2256
  metrics:
2257
  - type: cos_sim_pearson
2258
  value: 42.568537775842245
@@ -2273,6 +2350,7 @@ model-index:
2273
  name: MTEB STS22 (de-fr)
2274
  config: de-fr
2275
  split: test
 
2276
  metrics:
2277
  - type: cos_sim_pearson
2278
  value: 26.472844763068938
@@ -2293,6 +2371,7 @@ model-index:
2293
  name: MTEB STS22 (de-pl)
2294
  config: de-pl
2295
  split: test
 
2296
  metrics:
2297
  - type: cos_sim_pearson
2298
  value: 7.026566971631159
@@ -2313,6 +2392,7 @@ model-index:
2313
  name: MTEB STS22 (fr-pl)
2314
  config: fr-pl
2315
  split: test
 
2316
  metrics:
2317
  - type: cos_sim_pearson
2318
  value: 54.305559003968206
@@ -2333,6 +2413,7 @@ model-index:
2333
  name: MTEB STSBenchmark
2334
  config: default
2335
  split: test
 
2336
  metrics:
2337
  - type: cos_sim_pearson
2338
  value: 82.7406424090513
@@ -2353,6 +2434,7 @@ model-index:
2353
  name: MTEB SciDocsRR
2354
  config: default
2355
  split: test
 
2356
  metrics:
2357
  - type: map
2358
  value: 87.11941318470207
@@ -2365,6 +2447,7 @@ model-index:
2365
  name: MTEB SciFact
2366
  config: default
2367
  split: test
 
2368
  metrics:
2369
  - type: map_at_1
2370
  value: 48.233
@@ -2421,6 +2504,7 @@ model-index:
2421
  name: MTEB SprintDuplicateQuestions
2422
  config: default
2423
  split: test
 
2424
  metrics:
2425
  - type: cos_sim_accuracy
2426
  value: 99.78514851485149
@@ -2475,6 +2559,7 @@ model-index:
2475
  name: MTEB StackExchangeClustering
2476
  config: default
2477
  split: test
 
2478
  metrics:
2479
  - type: v_measure
2480
  value: 53.361421662036015
@@ -2485,6 +2570,7 @@ model-index:
2485
  name: MTEB StackExchangeClusteringP2P
2486
  config: default
2487
  split: test
 
2488
  metrics:
2489
  - type: v_measure
2490
  value: 38.001825627800976
@@ -2495,6 +2581,7 @@ model-index:
2495
  name: MTEB StackOverflowDupQuestions
2496
  config: default
2497
  split: test
 
2498
  metrics:
2499
  - type: map
2500
  value: 50.762134384316084
@@ -2507,6 +2594,7 @@ model-index:
2507
  name: MTEB SummEval
2508
  config: default
2509
  split: test
 
2510
  metrics:
2511
  - type: cos_sim_pearson
2512
  value: 30.508420334813536
@@ -2523,6 +2611,7 @@ model-index:
2523
  name: MTEB TRECCOVID
2524
  config: default
2525
  split: test
 
2526
  metrics:
2527
  - type: map_at_1
2528
  value: 0.169
@@ -2579,6 +2668,7 @@ model-index:
2579
  name: MTEB Touche2020
2580
  config: default
2581
  split: test
 
2582
  metrics:
2583
  - type: map_at_1
2584
  value: 1.49
@@ -2635,6 +2725,7 @@ model-index:
2635
  name: MTEB ToxicConversationsClassification
2636
  config: default
2637
  split: test
 
2638
  metrics:
2639
  - type: accuracy
2640
  value: 66.9918
@@ -2649,6 +2740,7 @@ model-index:
2649
  name: MTEB TweetSentimentExtractionClassification
2650
  config: default
2651
  split: test
 
2652
  metrics:
2653
  - type: accuracy
2654
  value: 55.410299943406905
@@ -2661,6 +2753,7 @@ model-index:
2661
  name: MTEB TwentyNewsgroupsClustering
2662
  config: default
2663
  split: test
 
2664
  metrics:
2665
  - type: v_measure
2666
  value: 46.860271427647774
@@ -2671,6 +2764,7 @@ model-index:
2671
  name: MTEB TwitterSemEval2015
2672
  config: default
2673
  split: test
 
2674
  metrics:
2675
  - type: cos_sim_accuracy
2676
  value: 84.1151576563152
@@ -2725,6 +2819,7 @@ model-index:
2725
  name: MTEB TwitterURLCorpus
2726
  config: default
2727
  split: test
 
2728
  metrics:
2729
  - type: cos_sim_accuracy
2730
  value: 88.2504754142896
@@ -2774,7 +2869,6 @@ model-index:
2774
  value: 76.57057281916886
2775
  ---
2776
 
2777
-
2778
  # all-MiniLM-L6-v2
2779
  This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 384 dimensional dense vector space and can be used for tasks like clustering or semantic search.
2780
 
@@ -2836,7 +2930,7 @@ print(sentence_embeddings)
2836
 
2837
  ## Evaluation Results
2838
 
2839
- For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=sentence-transformers/all-MiniLM-L6-v2)
2840
 
2841
  ------
2842
 
 
39
  name: MTEB AmazonCounterfactualClassification (en)
40
  config: en
41
  split: test
42
+ revision: 2d8a100785abf0ae21420d2a55b0c56e3e1ea996
43
  metrics:
44
  - type: accuracy
45
  value: 64.14925373134331
 
54
  name: MTEB AmazonPolarityClassification
55
  config: default
56
  split: test
57
+ revision: 80714f8dcf8cefc218ef4f8c5a966dd83f75a0e1
58
  metrics:
59
  - type: accuracy
60
  value: 62.582975
 
69
  name: MTEB AmazonReviewsClassification (en)
70
  config: en
71
  split: test
72
+ revision: c379a6705fec24a2493fa68e011692605f44e119
73
  metrics:
74
  - type: accuracy
75
  value: 31.785999999999998
 
82
  name: MTEB ArguAna
83
  config: default
84
  split: test
85
+ revision: 5b3e3697907184a9b77a3c99ee9ea1a9cbb1e4e3
86
  metrics:
87
  - type: map_at_1
88
  value: 25.605
 
139
  name: MTEB ArxivClusteringP2P
140
  config: default
141
  split: test
142
+ revision: 0bbdb47bcbe3a90093699aefeed338a0f28a7ee8
143
  metrics:
144
  - type: v_measure
145
  value: 46.54595079050156
 
150
  name: MTEB ArxivClusteringS2S
151
  config: default
152
  split: test
153
+ revision: b73bd54100e5abfa6e3a23dcafb46fe4d2438dc3
154
  metrics:
155
  - type: v_measure
156
  value: 37.85709823840442
 
161
  name: MTEB AskUbuntuDupQuestions
162
  config: default
163
  split: test
164
+ revision: 4d853f94cd57d85ec13805aeeac3ae3e5eb4c49c
165
  metrics:
166
  - type: map
167
  value: 63.47681681237331
 
174
  name: MTEB BIOSSES
175
  config: default
176
  split: test
177
+ revision: 9ee918f184421b6bd48b78f6c714d86546106103
178
  metrics:
179
  - type: cos_sim_pearson
180
  value: 84.41897516342782
 
195
  name: MTEB Banking77Classification
196
  config: default
197
  split: test
198
+ revision: 44fa15921b4c889113cc5df03dd4901b49161ab7
199
  metrics:
200
  - type: accuracy
201
  value: 79.75000000000001
 
208
  name: MTEB BiorxivClusteringP2P
209
  config: default
210
  split: test
211
+ revision: 11d0121201d1f1f280e8cc8f3d98fb9c4d9f9c55
212
  metrics:
213
  - type: v_measure
214
  value: 38.48301914135123
 
219
  name: MTEB BiorxivClusteringS2S
220
  config: default
221
  split: test
222
+ revision: c0fab014e1bcb8d3a5e31b2088972a1e01547dc1
223
  metrics:
224
  - type: v_measure
225
  value: 33.170209943399804
 
230
  name: MTEB CQADupstackAndroidRetrieval
231
  config: default
232
  split: test
233
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
234
  metrics:
235
  - type: map_at_1
236
  value: 34.660000000000004
 
287
  name: MTEB CQADupstackEnglishRetrieval
288
  config: default
289
  split: test
290
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
291
  metrics:
292
  - type: map_at_1
293
  value: 31.180999999999997
 
344
  name: MTEB CQADupstackGamingRetrieval
345
  config: default
346
  split: test
347
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
348
  metrics:
349
  - type: map_at_1
350
  value: 38.732
 
401
  name: MTEB CQADupstackGisRetrieval
402
  config: default
403
  split: test
404
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
405
  metrics:
406
  - type: map_at_1
407
  value: 26.837
 
458
  name: MTEB CQADupstackMathematicaRetrieval
459
  config: default
460
  split: test
461
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
462
  metrics:
463
  - type: map_at_1
464
  value: 15.142
 
515
  name: MTEB CQADupstackPhysicsRetrieval
516
  config: default
517
  split: test
518
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
519
  metrics:
520
  - type: map_at_1
521
  value: 29.142000000000003
 
572
  name: MTEB CQADupstackProgrammersRetrieval
573
  config: default
574
  split: test
575
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
576
  metrics:
577
  - type: map_at_1
578
  value: 22.081999999999997
 
629
  name: MTEB CQADupstackRetrieval
630
  config: default
631
  split: test
632
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
633
  metrics:
634
  - type: map_at_1
635
  value: 25.825750000000003
 
686
  name: MTEB CQADupstackStatsRetrieval
687
  config: default
688
  split: test
689
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
690
  metrics:
691
  - type: map_at_1
692
  value: 23.147000000000002
 
743
  name: MTEB CQADupstackTexRetrieval
744
  config: default
745
  split: test
746
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
747
  metrics:
748
  - type: map_at_1
749
  value: 17.573
 
800
  name: MTEB CQADupstackUnixRetrieval
801
  config: default
802
  split: test
803
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
804
  metrics:
805
  - type: map_at_1
806
  value: 25.393
 
857
  name: MTEB CQADupstackWebmastersRetrieval
858
  config: default
859
  split: test
860
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
861
  metrics:
862
  - type: map_at_1
863
  value: 25.219
 
914
  name: MTEB CQADupstackWordpressRetrieval
915
  config: default
916
  split: test
917
+ revision: 2b9f5791698b5be7bc5e10535c8690f20043c3db
918
  metrics:
919
  - type: map_at_1
920
  value: 20.801
 
971
  name: MTEB ClimateFEVER
972
  config: default
973
  split: test
974
+ revision: 392b78eb68c07badcd7c2cd8f39af108375dfcce
975
  metrics:
976
  - type: map_at_1
977
  value: 7.9159999999999995
 
1028
  name: MTEB DBPedia
1029
  config: default
1030
  split: test
1031
+ revision: f097057d03ed98220bc7309ddb10b71a54d667d6
1032
  metrics:
1033
  - type: map_at_1
1034
  value: 7.172000000000001
 
1085
  name: MTEB EmotionClassification
1086
  config: default
1087
  split: test
1088
+ revision: 829147f8f75a25f005913200eb5ed41fae320aa1
1089
  metrics:
1090
  - type: accuracy
1091
  value: 38.43
 
1098
  name: MTEB FEVER
1099
  config: default
1100
  split: test
1101
+ revision: 1429cf27e393599b8b359b9b72c666f96b2525f9
1102
  metrics:
1103
  - type: map_at_1
1104
  value: 34.076
 
1155
  name: MTEB FiQA2018
1156
  config: default
1157
  split: test
1158
+ revision: 41b686a7f28c59bcaaa5791efd47c67c8ebe28be
1159
  metrics:
1160
  - type: map_at_1
1161
  value: 17.14
 
1212
  name: MTEB HotpotQA
1213
  config: default
1214
  split: test
1215
+ revision: 766870b35a1b9ca65e67a0d1913899973551fc6c
1216
  metrics:
1217
  - type: map_at_1
1218
  value: 27.717999999999996
 
1269
  name: MTEB ImdbClassification
1270
  config: default
1271
  split: test
1272
+ revision: 8d743909f834c38949e8323a8a6ce8721ea6c7f4
1273
  metrics:
1274
  - type: accuracy
1275
  value: 60.6612
 
1284
  name: MTEB MSMARCO
1285
  config: default
1286
  split: dev
1287
+ revision: e6838a846e2408f22cf5cc337ebc83e0bcf77849
1288
  metrics:
1289
  - type: map_at_1
1290
  value: 18.715
 
1341
  name: MTEB MTOPDomainClassification (en)
1342
  config: en
1343
  split: test
1344
+ revision: a7e2a951126a26fc8c6a69f835f33a346ba259e3
1345
  metrics:
1346
  - type: accuracy
1347
  value: 91.56178750569997
 
1354
  name: MTEB MTOPIntentClassification (en)
1355
  config: en
1356
  split: test
1357
+ revision: 6299947a7777084cc2d4b64235bf7190381ce755
1358
  metrics:
1359
  - type: accuracy
1360
  value: 62.18194254445966
 
1367
  name: MTEB MassiveIntentClassification (en)
1368
  config: en
1369
  split: test
1370
+ revision: 072a486a144adf7f4479a4a0dddb2152e161e1ea
1371
  metrics:
1372
  - type: accuracy
1373
  value: 67.404169468729
 
1380
  name: MTEB MassiveScenarioClassification (en)
1381
  config: en
1382
  split: test
1383
+ revision: 7d571f92784cd94a019292a1f45445077d0ef634
1384
  metrics:
1385
  - type: accuracy
1386
  value: 75.75655682582381
 
1393
  name: MTEB MedrxivClusteringP2P
1394
  config: default
1395
  split: test
1396
+ revision: dcefc037ef84348e49b0d29109e891c01067226b
1397
  metrics:
1398
  - type: v_measure
1399
  value: 34.40873490143895
 
1404
  name: MTEB MedrxivClusteringS2S
1405
  config: default
1406
  split: test
1407
+ revision: 3cd0e71dfbe09d4de0f9e5ecba43e7ce280959dc
1408
  metrics:
1409
  - type: v_measure
1410
  value: 32.292207500530914
 
1415
  name: MTEB MindSmallReranking
1416
  config: default
1417
  split: test
1418
+ revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69
1419
  metrics:
1420
  - type: map
1421
  value: 30.798042020200267
 
1428
  name: MTEB NFCorpus
1429
  config: default
1430
  split: test
1431
+ revision: 7eb63cc0c1eb59324d709ebed25fcab851fa7610
1432
  metrics:
1433
  - type: map_at_1
1434
  value: 4.3229999999999995
 
1485
  name: MTEB NQ
1486
  config: default
1487
  split: test
1488
+ revision: 6062aefc120bfe8ece5897809fb2e53bfe0d128c
1489
  metrics:
1490
  - type: map_at_1
1491
  value: 22.644000000000002
 
1542
  name: MTEB QuoraRetrieval
1543
  config: default
1544
  split: test
1545
+ revision: 6205996560df11e3a3da9ab4f926788fc30a7db4
1546
  metrics:
1547
  - type: map_at_1
1548
  value: 69.76
 
1599
  name: MTEB RedditClustering
1600
  config: default
1601
  split: test
1602
+ revision: b2805658ae38990172679479369a78b86de8c390
1603
  metrics:
1604
  - type: v_measure
1605
  value: 50.66969274980475
 
1610
  name: MTEB RedditClusteringP2P
1611
  config: default
1612
  split: test
1613
+ revision: 385e3cb46b4cfa89021f56c4380204149d0efe33
1614
  metrics:
1615
  - type: v_measure
1616
  value: 54.15176409632201
 
1621
  name: MTEB SCIDOCS
1622
  config: default
1623
  split: test
1624
+ revision: 5c59ef3e437a0a9651c8fe6fde943e7dce59fba5
1625
  metrics:
1626
  - type: map_at_1
1627
  value: 4.853
 
1678
  name: MTEB SICK-R
1679
  config: default
1680
  split: test
1681
+ revision: 20a6d6f312dd54037fe07a32d58e5e168867909d
1682
  metrics:
1683
  - type: cos_sim_pearson
1684
  value: 83.91595834747078
 
1699
  name: MTEB STS12
1700
  config: default
1701
  split: test
1702
+ revision: fdf84275bb8ce4b49c971d02e84dd1abc677a50f
1703
  metrics:
1704
  - type: cos_sim_pearson
1705
  value: 81.35998585185463
 
1720
  name: MTEB STS13
1721
  config: default
1722
  split: test
1723
+ revision: 1591bfcbe8c69d4bf7fe2a16e2451017832cafb9
1724
  metrics:
1725
  - type: cos_sim_pearson
1726
  value: 80.15192226911441
 
1741
  name: MTEB STS14
1742
  config: default
1743
  split: test
1744
+ revision: e2125984e7df8b7871f6ae9949cf6b6795e7c54b
1745
  metrics:
1746
  - type: cos_sim_pearson
1747
  value: 80.80137749134273
 
1762
  name: MTEB STS15
1763
  config: default
1764
  split: test
1765
+ revision: 1cd7298cac12a96a373b6a2f18738bb3e739a9b6
1766
  metrics:
1767
  - type: cos_sim_pearson
1768
  value: 84.73605558012511
 
1783
  name: MTEB STS16
1784
  config: default
1785
  split: test
1786
+ revision: 360a0b2dff98700d09e634a01e1cc1624d3e42cd
1787
  metrics:
1788
  - type: cos_sim_pearson
1789
  value: 77.93667023468089
 
1804
  name: MTEB STS17 (ko-ko)
1805
  config: ko-ko
1806
  split: test
1807
+ revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
1808
  metrics:
1809
  - type: cos_sim_pearson
1810
  value: 38.02556869388448
 
1825
  name: MTEB STS17 (ar-ar)
1826
  config: ar-ar
1827
  split: test
1828
+ revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
1829
  metrics:
1830
  - type: cos_sim_pearson
1831
  value: 50.19733275252325
 
1846
  name: MTEB STS17 (en-ar)
1847
  config: en-ar
1848
  split: test
1849
+ revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
1850
  metrics:
1851
  - type: cos_sim_pearson
1852
  value: -5.346248828225636
 
1867
  name: MTEB STS17 (en-de)
1868
  config: en-de
1869
  split: test
1870
+ revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
1871
  metrics:
1872
  - type: cos_sim_pearson
1873
  value: 37.0025013483991
 
1888
  name: MTEB STS17 (en-en)
1889
  config: en-en
1890
  split: test
1891
+ revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
1892
  metrics:
1893
  - type: cos_sim_pearson
1894
  value: 88.02366672243191
 
1909
  name: MTEB STS17 (en-tr)
1910
  config: en-tr
1911
  split: test
1912
+ revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
1913
  metrics:
1914
  - type: cos_sim_pearson
1915
  value: 6.928208810824121
 
1930
  name: MTEB STS17 (es-en)
1931
  config: es-en
1932
  split: test
1933
+ revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
1934
  metrics:
1935
  - type: cos_sim_pearson
1936
  value: 17.49363358339176
 
1951
  name: MTEB STS17 (es-es)
1952
  config: es-es
1953
  split: test
1954
+ revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
1955
  metrics:
1956
  - type: cos_sim_pearson
1957
  value: 77.04145671005833
 
1972
  name: MTEB STS17 (fr-en)
1973
  config: fr-en
1974
  split: test
1975
+ revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
1976
  metrics:
1977
  - type: cos_sim_pearson
1978
  value: 37.9961687967439
 
1993
  name: MTEB STS17 (it-en)
1994
  config: it-en
1995
  split: test
1996
+ revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
1997
  metrics:
1998
  - type: cos_sim_pearson
1999
  value: 26.739991134614716
 
2014
  name: MTEB STS17 (nl-en)
2015
  config: nl-en
2016
  split: test
2017
+ revision: 9fc37e8c632af1c87a3d23e685d49552a02582a0
2018
  metrics:
2019
  - type: cos_sim_pearson
2020
  value: 32.71761762628939
 
2035
  name: MTEB STS22 (en)
2036
  config: en
2037
  split: test
2038
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2039
  metrics:
2040
  - type: cos_sim_pearson
2041
  value: 67.09882753030891
 
2056
  name: MTEB STS22 (de)
2057
  config: de
2058
  split: test
2059
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2060
  metrics:
2061
  - type: cos_sim_pearson
2062
  value: 26.596033966146116
 
2077
  name: MTEB STS22 (es)
2078
  config: es
2079
  split: test
2080
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2081
  metrics:
2082
  - type: cos_sim_pearson
2083
  value: 44.33815143022264
 
2098
  name: MTEB STS22 (pl)
2099
  config: pl
2100
  split: test
2101
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2102
  metrics:
2103
  - type: cos_sim_pearson
2104
  value: 8.000336595206134
 
2119
  name: MTEB STS22 (tr)
2120
  config: tr
2121
  split: test
2122
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2123
  metrics:
2124
  - type: cos_sim_pearson
2125
  value: 20.597902459466386
 
2140
  name: MTEB STS22 (ar)
2141
  config: ar
2142
  split: test
2143
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2144
  metrics:
2145
  - type: cos_sim_pearson
2146
  value: 5.006610360999117
 
2161
  name: MTEB STS22 (ru)
2162
  config: ru
2163
  split: test
2164
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2165
  metrics:
2166
  - type: cos_sim_pearson
2167
  value: 0.03100716792233671
 
2182
  name: MTEB STS22 (zh)
2183
  config: zh
2184
  split: test
2185
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2186
  metrics:
2187
  - type: cos_sim_pearson
2188
  value: 23.127885111414432
 
2203
  name: MTEB STS22 (fr)
2204
  config: fr
2205
  split: test
2206
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2207
  metrics:
2208
  - type: cos_sim_pearson
2209
  value: 70.64344773137496
 
2224
  name: MTEB STS22 (de-en)
2225
  config: de-en
2226
  split: test
2227
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2228
  metrics:
2229
  - type: cos_sim_pearson
2230
  value: 47.54531236654512
 
2245
  name: MTEB STS22 (es-en)
2246
  config: es-en
2247
  split: test
2248
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2249
  metrics:
2250
  - type: cos_sim_pearson
2251
  value: 49.93601240112664
 
2266
  name: MTEB STS22 (it)
2267
  config: it
2268
  split: test
2269
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2270
  metrics:
2271
  - type: cos_sim_pearson
2272
  value: 57.4312835830767
 
2287
  name: MTEB STS22 (pl-en)
2288
  config: pl-en
2289
  split: test
2290
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2291
  metrics:
2292
  - type: cos_sim_pearson
2293
  value: 35.08730015173829
 
2308
  name: MTEB STS22 (zh-en)
2309
  config: zh-en
2310
  split: test
2311
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2312
  metrics:
2313
  - type: cos_sim_pearson
2314
  value: 37.41111741585122
 
2329
  name: MTEB STS22 (es-it)
2330
  config: es-it
2331
  split: test
2332
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2333
  metrics:
2334
  - type: cos_sim_pearson
2335
  value: 42.568537775842245
 
2350
  name: MTEB STS22 (de-fr)
2351
  config: de-fr
2352
  split: test
2353
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2354
  metrics:
2355
  - type: cos_sim_pearson
2356
  value: 26.472844763068938
 
2371
  name: MTEB STS22 (de-pl)
2372
  config: de-pl
2373
  split: test
2374
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2375
  metrics:
2376
  - type: cos_sim_pearson
2377
  value: 7.026566971631159
 
2392
  name: MTEB STS22 (fr-pl)
2393
  config: fr-pl
2394
  split: test
2395
+ revision: 2de6ce8c1921b71a755b262c6b57fef195dd7906
2396
  metrics:
2397
  - type: cos_sim_pearson
2398
  value: 54.305559003968206
 
2413
  name: MTEB STSBenchmark
2414
  config: default
2415
  split: test
2416
+ revision: 8913289635987208e6e7c72789e4be2fe94b6abd
2417
  metrics:
2418
  - type: cos_sim_pearson
2419
  value: 82.7406424090513
 
2434
  name: MTEB SciDocsRR
2435
  config: default
2436
  split: test
2437
+ revision: 56a6d0140cf6356659e2a7c1413286a774468d44
2438
  metrics:
2439
  - type: map
2440
  value: 87.11941318470207
 
2447
  name: MTEB SciFact
2448
  config: default
2449
  split: test
2450
+ revision: a75ae049398addde9b70f6b268875f5cbce99089
2451
  metrics:
2452
  - type: map_at_1
2453
  value: 48.233
 
2504
  name: MTEB SprintDuplicateQuestions
2505
  config: default
2506
  split: test
2507
+ revision: 5a8256d0dff9c4bd3be3ba3e67e4e70173f802ea
2508
  metrics:
2509
  - type: cos_sim_accuracy
2510
  value: 99.78514851485149
 
2559
  name: MTEB StackExchangeClustering
2560
  config: default
2561
  split: test
2562
+ revision: 70a89468f6dccacc6aa2b12a6eac54e74328f235
2563
  metrics:
2564
  - type: v_measure
2565
  value: 53.361421662036015
 
2570
  name: MTEB StackExchangeClusteringP2P
2571
  config: default
2572
  split: test
2573
+ revision: d88009ab563dd0b16cfaf4436abaf97fa3550cf0
2574
  metrics:
2575
  - type: v_measure
2576
  value: 38.001825627800976
 
2581
  name: MTEB StackOverflowDupQuestions
2582
  config: default
2583
  split: test
2584
+ revision: ef807ea29a75ec4f91b50fd4191cb4ee4589a9f9
2585
  metrics:
2586
  - type: map
2587
  value: 50.762134384316084
 
2594
  name: MTEB SummEval
2595
  config: default
2596
  split: test
2597
+ revision: 8753c2788d36c01fc6f05d03fe3f7268d63f9122
2598
  metrics:
2599
  - type: cos_sim_pearson
2600
  value: 30.508420334813536
 
2611
  name: MTEB TRECCOVID
2612
  config: default
2613
  split: test
2614
+ revision: 2c8041b2c07a79b6f7ba8fe6acc72e5d9f92d217
2615
  metrics:
2616
  - type: map_at_1
2617
  value: 0.169
 
2668
  name: MTEB Touche2020
2669
  config: default
2670
  split: test
2671
+ revision: 527b7d77e16e343303e68cb6af11d6e18b9f7b3b
2672
  metrics:
2673
  - type: map_at_1
2674
  value: 1.49
 
2725
  name: MTEB ToxicConversationsClassification
2726
  config: default
2727
  split: test
2728
+ revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de
2729
  metrics:
2730
  - type: accuracy
2731
  value: 66.9918
 
2740
  name: MTEB TweetSentimentExtractionClassification
2741
  config: default
2742
  split: test
2743
+ revision: 62146448f05be9e52a36b8ee9936447ea787eede
2744
  metrics:
2745
  - type: accuracy
2746
  value: 55.410299943406905
 
2753
  name: MTEB TwentyNewsgroupsClustering
2754
  config: default
2755
  split: test
2756
+ revision: 091a54f9a36281ce7d6590ec8c75dd485e7e01d4
2757
  metrics:
2758
  - type: v_measure
2759
  value: 46.860271427647774
 
2764
  name: MTEB TwitterSemEval2015
2765
  config: default
2766
  split: test
2767
+ revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1
2768
  metrics:
2769
  - type: cos_sim_accuracy
2770
  value: 84.1151576563152
 
2819
  name: MTEB TwitterURLCorpus
2820
  config: default
2821
  split: test
2822
+ revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf
2823
  metrics:
2824
  - type: cos_sim_accuracy
2825
  value: 88.2504754142896
 
2869
  value: 76.57057281916886
2870
  ---
2871
 
 
2872
  # all-MiniLM-L6-v2
2873
  This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 384 dimensional dense vector space and can be used for tasks like clustering or semantic search.
2874
 
 
2930
 
2931
  ## Evaluation Results
2932
 
2933
+ For an automated evaluation of this model, see *MTEB*: https://huggingface.co/spaces/mteb/leaderboard or the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=sentence-transformers/all-MiniLM-L12-v2)
2934
 
2935
  ------
2936