Upload README.md with huggingface_hub
Browse files
README.md
CHANGED
@@ -1,15 +1,21 @@
|
|
1 |
---
|
2 |
library_name: model2vec
|
3 |
license: mit
|
|
|
|
|
|
|
|
|
4 |
model-index:
|
5 |
- name: potion-base-2M
|
6 |
results:
|
7 |
-
-
|
8 |
-
|
|
|
9 |
name: MTEB AmazonCounterfactualClassification (en-ext)
|
10 |
-
revision: e8379541af4e31359cca9fbcf4b00f2671dba205
|
11 |
-
split: test
|
12 |
type: mteb/amazon_counterfactual
|
|
|
|
|
|
|
13 |
metrics:
|
14 |
- type: accuracy
|
15 |
value: 64.09295352323838
|
@@ -23,14 +29,14 @@ model-index:
|
|
23 |
value: 71.02083973787023
|
24 |
- type: main_score
|
25 |
value: 64.09295352323838
|
26 |
-
|
27 |
type: Classification
|
28 |
-
|
29 |
-
config: en
|
30 |
name: MTEB AmazonCounterfactualClassification (en)
|
31 |
-
revision: e8379541af4e31359cca9fbcf4b00f2671dba205
|
32 |
-
split: test
|
33 |
type: mteb/amazon_counterfactual
|
|
|
|
|
|
|
34 |
metrics:
|
35 |
- type: accuracy
|
36 |
value: 65.44776119402985
|
@@ -44,14 +50,14 @@ model-index:
|
|
44 |
value: 68.76825531256598
|
45 |
- type: main_score
|
46 |
value: 65.44776119402985
|
47 |
-
|
48 |
type: Classification
|
49 |
-
|
50 |
-
config: default
|
51 |
name: MTEB AmazonPolarityClassification (default)
|
52 |
-
revision: e2d317d38cd51312af73b3d32a06d1a08b442046
|
53 |
-
split: test
|
54 |
type: mteb/amazon_polarity
|
|
|
|
|
|
|
55 |
metrics:
|
56 |
- type: accuracy
|
57 |
value: 70.8279
|
@@ -65,14 +71,14 @@ model-index:
|
|
65 |
value: 70.5783166369514
|
66 |
- type: main_score
|
67 |
value: 70.8279
|
68 |
-
|
69 |
type: Classification
|
70 |
-
|
71 |
-
config: en
|
72 |
name: MTEB AmazonReviewsClassification (en)
|
73 |
-
revision: 1399c76144fd37290681b995c656ef9b2e06e26d
|
74 |
-
split: test
|
75 |
type: mteb/amazon_reviews_multi
|
|
|
|
|
|
|
76 |
metrics:
|
77 |
- type: accuracy
|
78 |
value: 32.996
|
@@ -82,14 +88,14 @@ model-index:
|
|
82 |
value: 32.31726739771067
|
83 |
- type: main_score
|
84 |
value: 32.996
|
85 |
-
|
86 |
-
type:
|
87 |
-
|
88 |
-
config: default
|
89 |
name: MTEB ArguAna (default)
|
90 |
-
revision: c22ab2a51041ffd869aaddef7af8d8215647e41a
|
91 |
-
split: test
|
92 |
type: mteb/arguana
|
|
|
|
|
|
|
93 |
metrics:
|
94 |
- type: main_score
|
95 |
value: 32.622
|
@@ -373,14 +379,14 @@ model-index:
|
|
373 |
value: 31.791999999999998
|
374 |
- type: recall_at_5
|
375 |
value: 40.114
|
376 |
-
|
377 |
-
type:
|
378 |
-
|
379 |
-
config: default
|
380 |
name: MTEB ArxivClusteringP2P (default)
|
381 |
-
revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d
|
382 |
-
split: test
|
383 |
type: mteb/arxiv-clustering-p2p
|
|
|
|
|
|
|
384 |
metrics:
|
385 |
- type: main_score
|
386 |
value: 29.870127302809124
|
@@ -388,14 +394,14 @@ model-index:
|
|
388 |
value: 29.870127302809124
|
389 |
- type: v_measure_std
|
390 |
value: 14.791231720290682
|
391 |
-
|
392 |
type: Clustering
|
393 |
-
|
394 |
-
config: default
|
395 |
name: MTEB ArxivClusteringS2S (default)
|
396 |
-
revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53
|
397 |
-
split: test
|
398 |
type: mteb/arxiv-clustering-s2s
|
|
|
|
|
|
|
399 |
metrics:
|
400 |
- type: main_score
|
401 |
value: 20.120157976895523
|
@@ -403,14 +409,14 @@ model-index:
|
|
403 |
value: 20.120157976895523
|
404 |
- type: v_measure_std
|
405 |
value: 15.985610307944178
|
406 |
-
|
407 |
-
type:
|
408 |
-
|
409 |
-
config: default
|
410 |
name: MTEB AskUbuntuDupQuestions (default)
|
411 |
-
revision: 2000358ca161889fa9c082cb41daa8dcfb161a54
|
412 |
-
split: test
|
413 |
type: mteb/askubuntudupquestions-reranking
|
|
|
|
|
|
|
414 |
metrics:
|
415 |
- type: main_score
|
416 |
value: 52.90637925416103
|
@@ -430,14 +436,14 @@ model-index:
|
|
430 |
value: 24.008690838618477
|
431 |
- type: nAUC_mrr_std
|
432 |
value: 4.127979271888478
|
433 |
-
|
434 |
-
type:
|
435 |
-
|
436 |
-
config: default
|
437 |
name: MTEB BIOSSES (default)
|
438 |
-
revision: d3fb88f8f02e40887cd149695127462bbcf29b4a
|
439 |
-
split: test
|
440 |
type: mteb/biosses-sts
|
|
|
|
|
|
|
441 |
metrics:
|
442 |
- type: cosine_pearson
|
443 |
value: 69.51991057082712
|
@@ -457,14 +463,14 @@ model-index:
|
|
457 |
value: 69.51991057082712
|
458 |
- type: spearman
|
459 |
value: 64.10808725228159
|
460 |
-
|
461 |
-
type:
|
462 |
-
|
463 |
-
config: default
|
464 |
name: MTEB Banking77Classification (default)
|
465 |
-
revision: 0fd18e25b25c072e09e0d92ab615fda904d66300
|
466 |
-
split: test
|
467 |
type: mteb/banking77
|
|
|
|
|
|
|
468 |
metrics:
|
469 |
- type: accuracy
|
470 |
value: 65.17207792207793
|
@@ -474,14 +480,14 @@ model-index:
|
|
474 |
value: 63.62144343754335
|
475 |
- type: main_score
|
476 |
value: 65.17207792207793
|
477 |
-
|
478 |
-
type:
|
479 |
-
|
480 |
-
config: default
|
481 |
name: MTEB BiorxivClusteringP2P (default)
|
482 |
-
revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40
|
483 |
-
split: test
|
484 |
type: mteb/biorxiv-clustering-p2p
|
|
|
|
|
|
|
485 |
metrics:
|
486 |
- type: main_score
|
487 |
value: 25.780291675770933
|
@@ -489,14 +495,14 @@ model-index:
|
|
489 |
value: 25.780291675770933
|
490 |
- type: v_measure_std
|
491 |
value: 0.5140442536046052
|
492 |
-
|
493 |
type: Clustering
|
494 |
-
|
495 |
-
config: default
|
496 |
name: MTEB BiorxivClusteringS2S (default)
|
497 |
-
revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908
|
498 |
-
split: test
|
499 |
type: mteb/biorxiv-clustering-s2s
|
|
|
|
|
|
|
500 |
metrics:
|
501 |
- type: main_score
|
502 |
value: 14.938305313404305
|
@@ -504,14 +510,14 @@ model-index:
|
|
504 |
value: 14.938305313404305
|
505 |
- type: v_measure_std
|
506 |
value: 0.6925176157191298
|
507 |
-
|
508 |
-
type:
|
509 |
-
|
510 |
-
config: default
|
511 |
name: MTEB CQADupstackAndroidRetrieval (default)
|
512 |
-
revision: f46a197baaae43b4f621051089b82a364682dfeb
|
513 |
-
split: test
|
514 |
type: mteb/cqadupstack-android
|
|
|
|
|
|
|
515 |
metrics:
|
516 |
- type: main_score
|
517 |
value: 25.330000000000002
|
@@ -795,14 +801,14 @@ model-index:
|
|
795 |
value: 21.951999999999998
|
796 |
- type: recall_at_5
|
797 |
value: 26.866
|
798 |
-
|
799 |
type: Retrieval
|
800 |
-
|
801 |
-
config: default
|
802 |
name: MTEB CQADupstackEnglishRetrieval (default)
|
803 |
-
revision: ad9991cb51e31e31e430383c75ffb2885547b5f0
|
804 |
-
split: test
|
805 |
type: mteb/cqadupstack-english
|
|
|
|
|
|
|
806 |
metrics:
|
807 |
- type: main_score
|
808 |
value: 19.03
|
@@ -1086,14 +1092,14 @@ model-index:
|
|
1086 |
value: 17.95
|
1087 |
- type: recall_at_5
|
1088 |
value: 20.605
|
1089 |
-
|
1090 |
type: Retrieval
|
1091 |
-
|
1092 |
-
config: default
|
1093 |
name: MTEB CQADupstackGamingRetrieval (default)
|
1094 |
-
revision: 4885aa143210c98657558c04aaf3dc47cfb54340
|
1095 |
-
split: test
|
1096 |
type: mteb/cqadupstack-gaming
|
|
|
|
|
|
|
1097 |
metrics:
|
1098 |
- type: main_score
|
1099 |
value: 29.583
|
@@ -1377,14 +1383,14 @@ model-index:
|
|
1377 |
value: 28.872999999999998
|
1378 |
- type: recall_at_5
|
1379 |
value: 33.771
|
1380 |
-
|
1381 |
type: Retrieval
|
1382 |
-
|
1383 |
-
config: default
|
1384 |
name: MTEB CQADupstackGisRetrieval (default)
|
1385 |
-
revision: 5003b3064772da1887988e05400cf3806fe491f2
|
1386 |
-
split: test
|
1387 |
type: mteb/cqadupstack-gis
|
|
|
|
|
|
|
1388 |
metrics:
|
1389 |
- type: main_score
|
1390 |
value: 14.183000000000002
|
@@ -1668,14 +1674,14 @@ model-index:
|
|
1668 |
value: 13.221
|
1669 |
- type: recall_at_5
|
1670 |
value: 14.895
|
1671 |
-
|
1672 |
type: Retrieval
|
1673 |
-
|
1674 |
-
config: default
|
1675 |
name: MTEB CQADupstackMathematicaRetrieval (default)
|
1676 |
-
revision: 90fceea13679c63fe563ded68f3b6f06e50061de
|
1677 |
-
split: test
|
1678 |
type: mteb/cqadupstack-mathematica
|
|
|
|
|
|
|
1679 |
metrics:
|
1680 |
- type: main_score
|
1681 |
value: 8.94
|
@@ -1959,14 +1965,14 @@ model-index:
|
|
1959 |
value: 8.260000000000002
|
1960 |
- type: recall_at_5
|
1961 |
value: 10.82
|
1962 |
-
|
1963 |
type: Retrieval
|
1964 |
-
|
1965 |
-
config: default
|
1966 |
name: MTEB CQADupstackPhysicsRetrieval (default)
|
1967 |
-
revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4
|
1968 |
-
split: test
|
1969 |
type: mteb/cqadupstack-physics
|
|
|
|
|
|
|
1970 |
metrics:
|
1971 |
- type: main_score
|
1972 |
value: 20.294999999999998
|
@@ -2250,14 +2256,14 @@ model-index:
|
|
2250 |
value: 19.167
|
2251 |
- type: recall_at_5
|
2252 |
value: 22.281000000000002
|
2253 |
-
|
2254 |
type: Retrieval
|
2255 |
-
|
2256 |
-
config: default
|
2257 |
name: MTEB CQADupstackProgrammersRetrieval (default)
|
2258 |
-
revision: 6184bc1440d2dbc7612be22b50686b8826d22b32
|
2259 |
-
split: test
|
2260 |
type: mteb/cqadupstack-programmers
|
|
|
|
|
|
|
2261 |
metrics:
|
2262 |
- type: main_score
|
2263 |
value: 14.094999999999999
|
@@ -2541,27 +2547,27 @@ model-index:
|
|
2541 |
value: 12.546
|
2542 |
- type: recall_at_5
|
2543 |
value: 15.453
|
2544 |
-
|
2545 |
type: Retrieval
|
2546 |
-
|
2547 |
-
config: default
|
2548 |
name: MTEB CQADupstackRetrieval (default)
|
2549 |
-
revision: CQADupstackRetrieval_is_a_combined_dataset
|
2550 |
-
split: test
|
2551 |
type: CQADupstackRetrieval_is_a_combined_dataset
|
|
|
|
|
|
|
2552 |
metrics:
|
2553 |
- type: main_score
|
2554 |
value: 16.292583333333337
|
2555 |
- type: ndcg_at_10
|
2556 |
value: 16.292583333333337
|
2557 |
-
|
2558 |
type: Retrieval
|
2559 |
-
|
2560 |
-
config: default
|
2561 |
name: MTEB CQADupstackStatsRetrieval (default)
|
2562 |
-
revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a
|
2563 |
-
split: test
|
2564 |
type: mteb/cqadupstack-stats
|
|
|
|
|
|
|
2565 |
metrics:
|
2566 |
- type: main_score
|
2567 |
value: 11.084
|
@@ -2845,14 +2851,14 @@ model-index:
|
|
2845 |
value: 9.052
|
2846 |
- type: recall_at_5
|
2847 |
value: 12.891
|
2848 |
-
|
2849 |
type: Retrieval
|
2850 |
-
|
2851 |
-
config: default
|
2852 |
name: MTEB CQADupstackTexRetrieval (default)
|
2853 |
-
revision: 46989137a86843e03a6195de44b09deda022eec7
|
2854 |
-
split: test
|
2855 |
type: mteb/cqadupstack-tex
|
|
|
|
|
|
|
2856 |
metrics:
|
2857 |
- type: main_score
|
2858 |
value: 9.0
|
@@ -3136,14 +3142,14 @@ model-index:
|
|
3136 |
value: 7.965999999999999
|
3137 |
- type: recall_at_5
|
3138 |
value: 9.795
|
3139 |
-
|
3140 |
type: Retrieval
|
3141 |
-
|
3142 |
-
config: default
|
3143 |
name: MTEB CQADupstackUnixRetrieval (default)
|
3144 |
-
revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53
|
3145 |
-
split: test
|
3146 |
type: mteb/cqadupstack-unix
|
|
|
|
|
|
|
3147 |
metrics:
|
3148 |
- type: main_score
|
3149 |
value: 14.642
|
@@ -3427,14 +3433,14 @@ model-index:
|
|
3427 |
value: 13.555
|
3428 |
- type: recall_at_5
|
3429 |
value: 16.259
|
3430 |
-
|
3431 |
type: Retrieval
|
3432 |
-
|
3433 |
-
config: default
|
3434 |
name: MTEB CQADupstackWebmastersRetrieval (default)
|
3435 |
-
revision: 160c094312a0e1facb97e55eeddb698c0abe3571
|
3436 |
-
split: test
|
3437 |
type: mteb/cqadupstack-webmasters
|
|
|
|
|
|
|
3438 |
metrics:
|
3439 |
- type: main_score
|
3440 |
value: 17.971999999999998
|
@@ -3718,14 +3724,14 @@ model-index:
|
|
3718 |
value: 15.906
|
3719 |
- type: recall_at_5
|
3720 |
value: 20.16
|
3721 |
-
|
3722 |
type: Retrieval
|
3723 |
-
|
3724 |
-
config: default
|
3725 |
name: MTEB CQADupstackWordpressRetrieval (default)
|
3726 |
-
revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4
|
3727 |
-
split: test
|
3728 |
type: mteb/cqadupstack-wordpress
|
|
|
|
|
|
|
3729 |
metrics:
|
3730 |
- type: main_score
|
3731 |
value: 11.357000000000001
|
@@ -4009,14 +4015,14 @@ model-index:
|
|
4009 |
value: 10.789
|
4010 |
- type: recall_at_5
|
4011 |
value: 14.116000000000001
|
4012 |
-
|
4013 |
type: Retrieval
|
4014 |
-
|
4015 |
-
config: default
|
4016 |
name: MTEB ClimateFEVER (default)
|
4017 |
-
revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380
|
4018 |
-
split: test
|
4019 |
type: mteb/climate-fever
|
|
|
|
|
|
|
4020 |
metrics:
|
4021 |
- type: main_score
|
4022 |
value: 8.665000000000001
|
@@ -4300,14 +4306,14 @@ model-index:
|
|
4300 |
value: 5.862
|
4301 |
- type: recall_at_5
|
4302 |
value: 7.595000000000001
|
4303 |
-
|
4304 |
type: Retrieval
|
4305 |
-
|
4306 |
-
config: default
|
4307 |
name: MTEB DBPedia (default)
|
4308 |
-
revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659
|
4309 |
-
split: test
|
4310 |
type: mteb/dbpedia
|
|
|
|
|
|
|
4311 |
metrics:
|
4312 |
- type: main_score
|
4313 |
value: 18.317
|
@@ -4591,14 +4597,14 @@ model-index:
|
|
4591 |
value: 6.6530000000000005
|
4592 |
- type: recall_at_5
|
4593 |
value: 7.93
|
4594 |
-
|
4595 |
-
type:
|
4596 |
-
|
4597 |
-
config: default
|
4598 |
name: MTEB EmotionClassification (default)
|
4599 |
-
revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37
|
4600 |
-
split: test
|
4601 |
type: mteb/emotion
|
|
|
|
|
|
|
4602 |
metrics:
|
4603 |
- type: accuracy
|
4604 |
value: 42.035
|
@@ -4608,14 +4614,14 @@ model-index:
|
|
4608 |
value: 44.414257303205105
|
4609 |
- type: main_score
|
4610 |
value: 42.035
|
4611 |
-
|
4612 |
-
type:
|
4613 |
-
|
4614 |
-
config: default
|
4615 |
name: MTEB FEVER (default)
|
4616 |
-
revision: bea83ef9e8fb933d90a2f1d5515737465d613e12
|
4617 |
-
split: test
|
4618 |
type: mteb/fever
|
|
|
|
|
|
|
4619 |
metrics:
|
4620 |
- type: main_score
|
4621 |
value: 18.761
|
@@ -4899,14 +4905,14 @@ model-index:
|
|
4899 |
value: 17.79
|
4900 |
- type: recall_at_5
|
4901 |
value: 21.956
|
4902 |
-
|
4903 |
type: Retrieval
|
4904 |
-
|
4905 |
-
config: default
|
4906 |
name: MTEB FiQA2018 (default)
|
4907 |
-
revision: 27a168819829fe9bcd655c2df245fb19452e8e06
|
4908 |
-
split: test
|
4909 |
type: mteb/fiqa
|
|
|
|
|
|
|
4910 |
metrics:
|
4911 |
- type: main_score
|
4912 |
value: 10.219000000000001
|
@@ -5190,14 +5196,14 @@ model-index:
|
|
5190 |
value: 7.8549999999999995
|
5191 |
- type: recall_at_5
|
5192 |
value: 9.861
|
5193 |
-
|
5194 |
type: Retrieval
|
5195 |
-
|
5196 |
-
config: default
|
5197 |
name: MTEB HotpotQA (default)
|
5198 |
-
revision: ab518f4d6fcca38d87c25209f94beba119d02014
|
5199 |
-
split: test
|
5200 |
type: mteb/hotpotqa
|
|
|
|
|
|
|
5201 |
metrics:
|
5202 |
- type: main_score
|
5203 |
value: 21.614
|
@@ -5481,14 +5487,14 @@ model-index:
|
|
5481 |
value: 17.873
|
5482 |
- type: recall_at_5
|
5483 |
value: 20.608
|
5484 |
-
|
5485 |
-
type:
|
5486 |
-
|
5487 |
-
config: default
|
5488 |
name: MTEB ImdbClassification (default)
|
5489 |
-
revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7
|
5490 |
-
split: test
|
5491 |
type: mteb/imdb
|
|
|
|
|
|
|
5492 |
metrics:
|
5493 |
- type: accuracy
|
5494 |
value: 70.35639999999998
|
@@ -5502,14 +5508,14 @@ model-index:
|
|
5502 |
value: 70.18257490051944
|
5503 |
- type: main_score
|
5504 |
value: 70.35639999999998
|
5505 |
-
|
5506 |
-
type:
|
5507 |
-
|
5508 |
-
config: default
|
5509 |
name: MTEB MSMARCO (default)
|
5510 |
-
revision: c5a29a104738b98a9e76336939199e264163d4a0
|
5511 |
-
split: test
|
5512 |
type: mteb/msmarco
|
|
|
|
|
|
|
5513 |
metrics:
|
5514 |
- type: main_score
|
5515 |
value: 29.474
|
@@ -5793,14 +5799,14 @@ model-index:
|
|
5793 |
value: 2.2800000000000002
|
5794 |
- type: recall_at_5
|
5795 |
value: 3.94
|
5796 |
-
|
5797 |
-
type:
|
5798 |
-
|
5799 |
-
config: en
|
5800 |
name: MTEB MTOPDomainClassification (en)
|
5801 |
-
revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf
|
5802 |
-
split: test
|
5803 |
type: mteb/mtop_domain
|
|
|
|
|
|
|
5804 |
metrics:
|
5805 |
- type: accuracy
|
5806 |
value: 79.27268581851345
|
@@ -5810,14 +5816,14 @@ model-index:
|
|
5810 |
value: 79.14088602852584
|
5811 |
- type: main_score
|
5812 |
value: 79.27268581851345
|
5813 |
-
|
5814 |
type: Classification
|
5815 |
-
|
5816 |
-
config: en
|
5817 |
name: MTEB MTOPIntentClassification (en)
|
5818 |
-
revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba
|
5819 |
-
split: test
|
5820 |
type: mteb/mtop_intent
|
|
|
|
|
|
|
5821 |
metrics:
|
5822 |
- type: accuracy
|
5823 |
value: 42.49886000911992
|
@@ -5827,14 +5833,14 @@ model-index:
|
|
5827 |
value: 46.29236281595424
|
5828 |
- type: main_score
|
5829 |
value: 42.49886000911992
|
5830 |
-
|
5831 |
type: Classification
|
5832 |
-
|
5833 |
-
config: en
|
5834 |
name: MTEB MassiveIntentClassification (en)
|
5835 |
-
revision: 4672e20407010da34463acc759c162ca9734bca6
|
5836 |
-
split: test
|
5837 |
type: mteb/amazon_massive_intent
|
|
|
|
|
|
|
5838 |
metrics:
|
5839 |
- type: accuracy
|
5840 |
value: 54.065232010759914
|
@@ -5844,14 +5850,14 @@ model-index:
|
|
5844 |
value: 52.69815077422998
|
5845 |
- type: main_score
|
5846 |
value: 54.065232010759914
|
5847 |
-
|
5848 |
type: Classification
|
5849 |
-
|
5850 |
-
config: en
|
5851 |
name: MTEB MassiveScenarioClassification (en)
|
5852 |
-
revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8
|
5853 |
-
split: test
|
5854 |
type: mteb/amazon_massive_scenario
|
|
|
|
|
|
|
5855 |
metrics:
|
5856 |
- type: accuracy
|
5857 |
value: 59.596503026227296
|
@@ -5861,14 +5867,14 @@ model-index:
|
|
5861 |
value: 59.23698301210568
|
5862 |
- type: main_score
|
5863 |
value: 59.596503026227296
|
5864 |
-
|
5865 |
-
type:
|
5866 |
-
|
5867 |
-
config: default
|
5868 |
name: MTEB MedrxivClusteringP2P (default)
|
5869 |
-
revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73
|
5870 |
-
split: test
|
5871 |
type: mteb/medrxiv-clustering-p2p
|
|
|
|
|
|
|
5872 |
metrics:
|
5873 |
- type: main_score
|
5874 |
value: 25.59161751046095
|
@@ -5876,14 +5882,14 @@ model-index:
|
|
5876 |
value: 25.59161751046095
|
5877 |
- type: v_measure_std
|
5878 |
value: 1.4816189134361553
|
5879 |
-
|
5880 |
type: Clustering
|
5881 |
-
|
5882 |
-
config: default
|
5883 |
name: MTEB MedrxivClusteringS2S (default)
|
5884 |
-
revision: 35191c8c0dca72d8ff3efcd72aa802307d469663
|
5885 |
-
split: test
|
5886 |
type: mteb/medrxiv-clustering-s2s
|
|
|
|
|
|
|
5887 |
metrics:
|
5888 |
- type: main_score
|
5889 |
value: 21.396391045777328
|
@@ -5891,14 +5897,14 @@ model-index:
|
|
5891 |
value: 21.396391045777328
|
5892 |
- type: v_measure_std
|
5893 |
value: 1.6103207158789596
|
5894 |
-
|
5895 |
-
type:
|
5896 |
-
|
5897 |
-
config: default
|
5898 |
name: MTEB MindSmallReranking (default)
|
5899 |
-
revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7
|
5900 |
-
split: test
|
5901 |
type: mteb/mind_small
|
|
|
|
|
|
|
5902 |
metrics:
|
5903 |
- type: main_score
|
5904 |
value: 28.017817065141404
|
@@ -5918,14 +5924,14 @@ model-index:
|
|
5918 |
value: -19.527290469919414
|
5919 |
- type: nAUC_mrr_std
|
5920 |
value: -6.772185014428633
|
5921 |
-
|
5922 |
-
type:
|
5923 |
-
|
5924 |
-
config: default
|
5925 |
name: MTEB NFCorpus (default)
|
5926 |
-
revision: ec0fa4fe99da2ff19ca1214b7966684033a58814
|
5927 |
-
split: test
|
5928 |
type: mteb/nfcorpus
|
|
|
|
|
|
|
5929 |
metrics:
|
5930 |
- type: main_score
|
5931 |
value: 18.958
|
@@ -6209,14 +6215,14 @@ model-index:
|
|
6209 |
value: 5.005
|
6210 |
- type: recall_at_5
|
6211 |
value: 6.3950000000000005
|
6212 |
-
|
6213 |
type: Retrieval
|
6214 |
-
|
6215 |
-
config: default
|
6216 |
name: MTEB NQ (default)
|
6217 |
-
revision: b774495ed302d8c44a3a7ea25c90dbce03968f31
|
6218 |
-
split: test
|
6219 |
type: mteb/nq
|
|
|
|
|
|
|
6220 |
metrics:
|
6221 |
- type: main_score
|
6222 |
value: 13.048000000000002
|
@@ -6500,14 +6506,14 @@ model-index:
|
|
6500 |
value: 11.584999999999999
|
6501 |
- type: recall_at_5
|
6502 |
value: 15.662
|
6503 |
-
|
6504 |
type: Retrieval
|
6505 |
-
|
6506 |
-
config: default
|
6507 |
name: MTEB QuoraRetrieval (default)
|
6508 |
-
revision: e4e08e0b7dbe3c8700f0daef558ff32256715259
|
6509 |
-
split: test
|
6510 |
type: mteb/quora
|
|
|
|
|
|
|
6511 |
metrics:
|
6512 |
- type: main_score
|
6513 |
value: 75.889
|
@@ -6791,14 +6797,14 @@ model-index:
|
|
6791 |
value: 74.507
|
6792 |
- type: recall_at_5
|
6793 |
value: 79.487
|
6794 |
-
|
6795 |
-
type:
|
6796 |
-
|
6797 |
-
config: default
|
6798 |
name: MTEB RedditClustering (default)
|
6799 |
-
revision: 24640382cdbf8abc73003fb0fa6d111a705499eb
|
6800 |
-
split: test
|
6801 |
type: mteb/reddit-clustering
|
|
|
|
|
|
|
6802 |
metrics:
|
6803 |
- type: main_score
|
6804 |
value: 29.134297978095674
|
@@ -6806,14 +6812,14 @@ model-index:
|
|
6806 |
value: 29.134297978095674
|
6807 |
- type: v_measure_std
|
6808 |
value: 3.9934034124121185
|
6809 |
-
|
6810 |
type: Clustering
|
6811 |
-
|
6812 |
-
config: default
|
6813 |
name: MTEB RedditClusteringP2P (default)
|
6814 |
-
revision: 385e3cb46b4cfa89021f56c4380204149d0efe33
|
6815 |
-
split: test
|
6816 |
type: mteb/reddit-clustering-p2p
|
|
|
|
|
|
|
6817 |
metrics:
|
6818 |
- type: main_score
|
6819 |
value: 39.215421675518
|
@@ -6821,14 +6827,14 @@ model-index:
|
|
6821 |
value: 39.215421675518
|
6822 |
- type: v_measure_std
|
6823 |
value: 10.607286582764162
|
6824 |
-
|
6825 |
-
type:
|
6826 |
-
|
6827 |
-
config: default
|
6828 |
name: MTEB SCIDOCS (default)
|
6829 |
-
revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88
|
6830 |
-
split: test
|
6831 |
type: mteb/scidocs
|
|
|
|
|
|
|
6832 |
metrics:
|
6833 |
- type: main_score
|
6834 |
value: 8.163
|
@@ -7112,14 +7118,14 @@ model-index:
|
|
7112 |
value: 4.35
|
7113 |
- type: recall_at_5
|
7114 |
value: 5.765
|
7115 |
-
|
7116 |
-
type:
|
7117 |
-
|
7118 |
-
config: default
|
7119 |
name: MTEB SICK-R (default)
|
7120 |
-
revision: 20a6d6f312dd54037fe07a32d58e5e168867909d
|
7121 |
-
split: test
|
7122 |
type: mteb/sickr-sts
|
|
|
|
|
|
|
7123 |
metrics:
|
7124 |
- type: cosine_pearson
|
7125 |
value: 74.06014749723313
|
@@ -7139,14 +7145,14 @@ model-index:
|
|
7139 |
value: 74.06014749723313
|
7140 |
- type: spearman
|
7141 |
value: 62.85583348143348
|
7142 |
-
|
7143 |
type: STS
|
7144 |
-
|
7145 |
-
config: default
|
7146 |
name: MTEB STS12 (default)
|
7147 |
-
revision: a0d554a64d88156834ff5ae9920b964011b16384
|
7148 |
-
split: test
|
7149 |
type: mteb/sts12-sts
|
|
|
|
|
|
|
7150 |
metrics:
|
7151 |
- type: cosine_pearson
|
7152 |
value: 71.71587397454503
|
@@ -7166,14 +7172,14 @@ model-index:
|
|
7166 |
value: 71.71587397454503
|
7167 |
- type: spearman
|
7168 |
value: 62.07913034464432
|
7169 |
-
|
7170 |
type: STS
|
7171 |
-
|
7172 |
-
config: default
|
7173 |
name: MTEB STS13 (default)
|
7174 |
-
revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca
|
7175 |
-
split: test
|
7176 |
type: mteb/sts13-sts
|
|
|
|
|
|
|
7177 |
metrics:
|
7178 |
- type: cosine_pearson
|
7179 |
value: 74.00146491973214
|
@@ -7193,14 +7199,14 @@ model-index:
|
|
7193 |
value: 74.00146491973214
|
7194 |
- type: spearman
|
7195 |
value: 75.73113726697468
|
7196 |
-
|
7197 |
type: STS
|
7198 |
-
|
7199 |
-
config: default
|
7200 |
name: MTEB STS14 (default)
|
7201 |
-
revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375
|
7202 |
-
split: test
|
7203 |
type: mteb/sts14-sts
|
|
|
|
|
|
|
7204 |
metrics:
|
7205 |
- type: cosine_pearson
|
7206 |
value: 73.18799052289306
|
@@ -7220,14 +7226,14 @@ model-index:
|
|
7220 |
value: 73.18799052289306
|
7221 |
- type: spearman
|
7222 |
value: 69.27997439795548
|
7223 |
-
|
7224 |
type: STS
|
7225 |
-
|
7226 |
-
config: default
|
7227 |
name: MTEB STS15 (default)
|
7228 |
-
revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3
|
7229 |
-
split: test
|
7230 |
type: mteb/sts15-sts
|
|
|
|
|
|
|
7231 |
metrics:
|
7232 |
- type: cosine_pearson
|
7233 |
value: 75.05240168700195
|
@@ -7247,14 +7253,14 @@ model-index:
|
|
7247 |
value: 75.05240168700195
|
7248 |
- type: spearman
|
7249 |
value: 76.32976845993336
|
7250 |
-
|
7251 |
type: STS
|
7252 |
-
|
7253 |
-
config: default
|
7254 |
name: MTEB STS16 (default)
|
7255 |
-
revision: 4d8694f8f0e0100860b497b999b3dbed754a0513
|
7256 |
-
split: test
|
7257 |
type: mteb/sts16-sts
|
|
|
|
|
|
|
7258 |
metrics:
|
7259 |
- type: cosine_pearson
|
7260 |
value: 71.35240308275529
|
@@ -7274,14 +7280,14 @@ model-index:
|
|
7274 |
value: 71.35240308275529
|
7275 |
- type: spearman
|
7276 |
value: 73.46659216141927
|
7277 |
-
|
7278 |
type: STS
|
7279 |
-
|
7280 |
-
config: en-en
|
7281 |
name: MTEB STS17 (en-en)
|
7282 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7283 |
-
split: test
|
7284 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7285 |
metrics:
|
7286 |
- type: cosine_pearson
|
7287 |
value: 80.32585308865436
|
@@ -7301,14 +7307,14 @@ model-index:
|
|
7301 |
value: 80.32585308865436
|
7302 |
- type: spearman
|
7303 |
value: 82.08042618874391
|
7304 |
-
|
7305 |
type: STS
|
7306 |
-
|
7307 |
-
config: fr-en
|
7308 |
name: MTEB STS17 (fr-en)
|
7309 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7310 |
-
split: test
|
7311 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7312 |
metrics:
|
7313 |
- type: cosine_pearson
|
7314 |
value: 26.492433454505004
|
@@ -7328,14 +7334,14 @@ model-index:
|
|
7328 |
value: 26.492433454505004
|
7329 |
- type: spearman
|
7330 |
value: 25.26192630209604
|
7331 |
-
|
7332 |
type: STS
|
7333 |
-
|
7334 |
-
config: en-ar
|
7335 |
name: MTEB STS17 (en-ar)
|
7336 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7337 |
-
split: test
|
7338 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7339 |
metrics:
|
7340 |
- type: cosine_pearson
|
7341 |
value: 4.849502004066215
|
@@ -7355,14 +7361,14 @@ model-index:
|
|
7355 |
value: 4.849502004066215
|
7356 |
- type: spearman
|
7357 |
value: 2.4221360201347566
|
7358 |
-
|
7359 |
type: STS
|
7360 |
-
|
7361 |
-
config: it-en
|
7362 |
name: MTEB STS17 (it-en)
|
7363 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7364 |
-
split: test
|
7365 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7366 |
metrics:
|
7367 |
- type: cosine_pearson
|
7368 |
value: 17.67430795163699
|
@@ -7382,14 +7388,14 @@ model-index:
|
|
7382 |
value: 17.67430795163699
|
7383 |
- type: spearman
|
7384 |
value: 14.138028269188412
|
7385 |
-
|
7386 |
type: STS
|
7387 |
-
|
7388 |
-
config: en-tr
|
7389 |
name: MTEB STS17 (en-tr)
|
7390 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7391 |
-
split: test
|
7392 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7393 |
metrics:
|
7394 |
- type: cosine_pearson
|
7395 |
value: 11.032677618214326
|
@@ -7409,14 +7415,14 @@ model-index:
|
|
7409 |
value: 11.032677618214326
|
7410 |
- type: spearman
|
7411 |
value: 8.819837594034183
|
7412 |
-
|
7413 |
type: STS
|
7414 |
-
|
7415 |
-
config: nl-en
|
7416 |
name: MTEB STS17 (nl-en)
|
7417 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7418 |
-
split: test
|
7419 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7420 |
metrics:
|
7421 |
- type: cosine_pearson
|
7422 |
value: 21.77242194085935
|
@@ -7436,14 +7442,14 @@ model-index:
|
|
7436 |
value: 21.77242194085935
|
7437 |
- type: spearman
|
7438 |
value: 19.564246863458028
|
7439 |
-
|
7440 |
type: STS
|
7441 |
-
|
7442 |
-
config: en-de
|
7443 |
name: MTEB STS17 (en-de)
|
7444 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7445 |
-
split: test
|
7446 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7447 |
metrics:
|
7448 |
- type: cosine_pearson
|
7449 |
value: 24.34392722914247
|
@@ -7463,14 +7469,14 @@ model-index:
|
|
7463 |
value: 24.34392722914247
|
7464 |
- type: spearman
|
7465 |
value: 22.516912091222096
|
7466 |
-
|
7467 |
type: STS
|
7468 |
-
|
7469 |
-
config: es-en
|
7470 |
name: MTEB STS17 (es-en)
|
7471 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7472 |
-
split: test
|
7473 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7474 |
metrics:
|
7475 |
- type: cosine_pearson
|
7476 |
value: 11.486165309912764
|
@@ -7490,14 +7496,14 @@ model-index:
|
|
7490 |
value: 11.486165309912764
|
7491 |
- type: spearman
|
7492 |
value: 10.139614392782256
|
7493 |
-
|
7494 |
type: STS
|
7495 |
-
|
7496 |
-
config: en
|
7497 |
name: MTEB STS22 (en)
|
7498 |
-
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7499 |
-
split: test
|
7500 |
type: mteb/sts22-crosslingual-sts
|
|
|
|
|
|
|
7501 |
metrics:
|
7502 |
- type: cosine_pearson
|
7503 |
value: 56.1393113780294
|
@@ -7517,14 +7523,14 @@ model-index:
|
|
7517 |
value: 56.1393113780294
|
7518 |
- type: spearman
|
7519 |
value: 62.64707232707212
|
7520 |
-
|
7521 |
type: STS
|
7522 |
-
|
7523 |
-
config: zh-en
|
7524 |
name: MTEB STS22 (zh-en)
|
7525 |
-
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7526 |
-
split: test
|
7527 |
type: mteb/sts22-crosslingual-sts
|
|
|
|
|
|
|
7528 |
metrics:
|
7529 |
- type: cosine_pearson
|
7530 |
value: 9.554093605202135
|
@@ -7544,14 +7550,14 @@ model-index:
|
|
7544 |
value: 9.554093605202135
|
7545 |
- type: spearman
|
7546 |
value: 11.788855140937605
|
7547 |
-
|
7548 |
type: STS
|
7549 |
-
|
7550 |
-
config: de-en
|
7551 |
name: MTEB STS22 (de-en)
|
7552 |
-
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7553 |
-
split: test
|
7554 |
type: mteb/sts22-crosslingual-sts
|
|
|
|
|
|
|
7555 |
metrics:
|
7556 |
- type: cosine_pearson
|
7557 |
value: 23.699968394697848
|
@@ -7571,14 +7577,14 @@ model-index:
|
|
7571 |
value: 23.699968394697848
|
7572 |
- type: spearman
|
7573 |
value: 25.635685273215014
|
7574 |
-
|
7575 |
type: STS
|
7576 |
-
|
7577 |
-
config: es-en
|
7578 |
name: MTEB STS22 (es-en)
|
7579 |
-
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7580 |
-
split: test
|
7581 |
type: mteb/sts22-crosslingual-sts
|
|
|
|
|
|
|
7582 |
metrics:
|
7583 |
- type: cosine_pearson
|
7584 |
value: 9.344287684864119
|
@@ -7598,14 +7604,14 @@ model-index:
|
|
7598 |
value: 9.344287684864119
|
7599 |
- type: spearman
|
7600 |
value: 10.042423712385212
|
7601 |
-
|
7602 |
type: STS
|
7603 |
-
|
7604 |
-
config: pl-en
|
7605 |
name: MTEB STS22 (pl-en)
|
7606 |
-
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7607 |
-
split: test
|
7608 |
type: mteb/sts22-crosslingual-sts
|
|
|
|
|
|
|
7609 |
metrics:
|
7610 |
- type: cosine_pearson
|
7611 |
value: 8.001041267374578
|
@@ -7625,14 +7631,14 @@ model-index:
|
|
7625 |
value: 8.001041267374578
|
7626 |
- type: spearman
|
7627 |
value: 15.127881072012025
|
7628 |
-
|
7629 |
type: STS
|
7630 |
-
|
7631 |
-
config: default
|
7632 |
name: MTEB STSBenchmark (default)
|
7633 |
-
revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831
|
7634 |
-
split: test
|
7635 |
type: mteb/stsbenchmark-sts
|
|
|
|
|
|
|
7636 |
metrics:
|
7637 |
- type: cosine_pearson
|
7638 |
value: 73.57677681061787
|
@@ -7652,14 +7658,14 @@ model-index:
|
|
7652 |
value: 73.57677681061787
|
7653 |
- type: spearman
|
7654 |
value: 72.80800903257308
|
7655 |
-
|
7656 |
-
type:
|
7657 |
-
|
7658 |
-
config: default
|
7659 |
name: MTEB SciDocsRR (default)
|
7660 |
-
revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab
|
7661 |
-
split: test
|
7662 |
type: mteb/scidocs-reranking
|
|
|
|
|
|
|
7663 |
metrics:
|
7664 |
- type: main_score
|
7665 |
value: 66.13944998572143
|
@@ -7679,14 +7685,14 @@ model-index:
|
|
7679 |
value: 69.96013858740152
|
7680 |
- type: nAUC_mrr_std
|
7681 |
value: 62.072046098925156
|
7682 |
-
|
7683 |
-
type:
|
7684 |
-
|
7685 |
-
config: default
|
7686 |
name: MTEB SciFact (default)
|
7687 |
-
revision: 0228b52cf27578f30900b9e5271d331663a030d7
|
7688 |
-
split: test
|
7689 |
type: mteb/scifact
|
|
|
|
|
|
|
7690 |
metrics:
|
7691 |
- type: main_score
|
7692 |
value: 36.55
|
@@ -7970,14 +7976,14 @@ model-index:
|
|
7970 |
value: 36.306
|
7971 |
- type: recall_at_5
|
7972 |
value: 43.389
|
7973 |
-
|
7974 |
-
type:
|
7975 |
-
|
7976 |
-
config: default
|
7977 |
name: MTEB SprintDuplicateQuestions (default)
|
7978 |
-
revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46
|
7979 |
-
split: test
|
7980 |
type: mteb/sprintduplicatequestions-pairclassification
|
|
|
|
|
|
|
7981 |
metrics:
|
7982 |
- type: cosine_accuracy
|
7983 |
value: 99.68415841584158
|
@@ -8061,14 +8067,14 @@ model-index:
|
|
8061 |
value: 86.65231431646933
|
8062 |
- type: similarity_recall
|
8063 |
value: 80.5
|
8064 |
-
|
8065 |
-
type:
|
8066 |
-
|
8067 |
-
config: default
|
8068 |
name: MTEB StackExchangeClustering (default)
|
8069 |
-
revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259
|
8070 |
-
split: test
|
8071 |
type: mteb/stackexchange-clustering
|
|
|
|
|
|
|
8072 |
metrics:
|
8073 |
- type: main_score
|
8074 |
value: 37.82716668016299
|
@@ -8076,14 +8082,14 @@ model-index:
|
|
8076 |
value: 37.82716668016299
|
8077 |
- type: v_measure_std
|
8078 |
value: 3.9071651545475055
|
8079 |
-
|
8080 |
type: Clustering
|
8081 |
-
|
8082 |
-
config: default
|
8083 |
name: MTEB StackExchangeClusteringP2P (default)
|
8084 |
-
revision: 815ca46b2622cec33ccafc3735d572c266efdb44
|
8085 |
-
split: test
|
8086 |
type: mteb/stackexchange-clustering-p2p
|
|
|
|
|
|
|
8087 |
metrics:
|
8088 |
- type: main_score
|
8089 |
value: 31.549916824347523
|
@@ -8091,14 +8097,14 @@ model-index:
|
|
8091 |
value: 31.549916824347523
|
8092 |
- type: v_measure_std
|
8093 |
value: 1.649284454526032
|
8094 |
-
|
8095 |
-
type:
|
8096 |
-
|
8097 |
-
config: default
|
8098 |
name: MTEB StackOverflowDupQuestions (default)
|
8099 |
-
revision: e185fbe320c72810689fc5848eb6114e1ef5ec69
|
8100 |
-
split: test
|
8101 |
type: mteb/stackoverflowdupquestions-reranking
|
|
|
|
|
|
|
8102 |
metrics:
|
8103 |
- type: main_score
|
8104 |
value: 40.201162273119
|
@@ -8118,14 +8124,14 @@ model-index:
|
|
8118 |
value: 16.03297395659567
|
8119 |
- type: nAUC_mrr_std
|
8120 |
value: 4.5441260195062885
|
8121 |
-
|
8122 |
-
type:
|
8123 |
-
|
8124 |
-
config: default
|
8125 |
name: MTEB SummEval (default)
|
8126 |
-
revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c
|
8127 |
-
split: test
|
8128 |
type: mteb/summeval
|
|
|
|
|
|
|
8129 |
metrics:
|
8130 |
- type: cosine_pearson
|
8131 |
value: 30.982384340344282
|
@@ -8141,14 +8147,14 @@ model-index:
|
|
8141 |
value: 30.982384340344282
|
8142 |
- type: spearman
|
8143 |
value: 31.512077655680574
|
8144 |
-
|
8145 |
-
type:
|
8146 |
-
|
8147 |
-
config: default
|
8148 |
name: MTEB TRECCOVID (default)
|
8149 |
-
revision: bb9466bac8153a0349341eb1b22e06409e78ef4e
|
8150 |
-
split: test
|
8151 |
type: mteb/trec-covid
|
|
|
|
|
|
|
8152 |
metrics:
|
8153 |
- type: main_score
|
8154 |
value: 40.475
|
@@ -8432,14 +8438,14 @@ model-index:
|
|
8432 |
value: 0.337
|
8433 |
- type: recall_at_5
|
8434 |
value: 0.5329999999999999
|
8435 |
-
|
8436 |
type: Retrieval
|
8437 |
-
|
8438 |
-
config: default
|
8439 |
name: MTEB Touche2020 (default)
|
8440 |
-
revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f
|
8441 |
-
split: test
|
8442 |
type: mteb/touche2020
|
|
|
|
|
|
|
8443 |
metrics:
|
8444 |
- type: main_score
|
8445 |
value: 12.869
|
@@ -8723,14 +8729,14 @@ model-index:
|
|
8723 |
value: 2.495
|
8724 |
- type: recall_at_5
|
8725 |
value: 5.4719999999999995
|
8726 |
-
|
8727 |
-
type:
|
8728 |
-
|
8729 |
-
config: default
|
8730 |
name: MTEB ToxicConversationsClassification (default)
|
8731 |
-
revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de
|
8732 |
-
split: test
|
8733 |
type: mteb/toxic_conversations_50k
|
|
|
|
|
|
|
8734 |
metrics:
|
8735 |
- type: accuracy
|
8736 |
value: 66.591796875
|
@@ -8744,14 +8750,14 @@ model-index:
|
|
8744 |
value: 74.03661967545759
|
8745 |
- type: main_score
|
8746 |
value: 66.591796875
|
8747 |
-
|
8748 |
type: Classification
|
8749 |
-
|
8750 |
-
config: default
|
8751 |
name: MTEB TweetSentimentExtractionClassification (default)
|
8752 |
-
revision: d604517c81ca91fe16a244d1248fc021f9ecee7a
|
8753 |
-
split: test
|
8754 |
type: mteb/tweet_sentiment_extraction
|
|
|
|
|
|
|
8755 |
metrics:
|
8756 |
- type: accuracy
|
8757 |
value: 52.524052065648
|
@@ -8761,14 +8767,14 @@ model-index:
|
|
8761 |
value: 51.956916785617736
|
8762 |
- type: main_score
|
8763 |
value: 52.524052065648
|
8764 |
-
|
8765 |
-
type:
|
8766 |
-
|
8767 |
-
config: default
|
8768 |
name: MTEB TwentyNewsgroupsClustering (default)
|
8769 |
-
revision: 6125ec4e24fa026cec8a478383ee943acfbd5449
|
8770 |
-
split: test
|
8771 |
type: mteb/twentynewsgroups-clustering
|
|
|
|
|
|
|
8772 |
metrics:
|
8773 |
- type: main_score
|
8774 |
value: 27.072966157648477
|
@@ -8776,14 +8782,14 @@ model-index:
|
|
8776 |
value: 27.072966157648477
|
8777 |
- type: v_measure_std
|
8778 |
value: 1.563199572918265
|
8779 |
-
|
8780 |
-
type:
|
8781 |
-
|
8782 |
-
config: default
|
8783 |
name: MTEB TwitterSemEval2015 (default)
|
8784 |
-
revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1
|
8785 |
-
split: test
|
8786 |
type: mteb/twittersemeval2015-pairclassification
|
|
|
|
|
|
|
8787 |
metrics:
|
8788 |
- type: cosine_accuracy
|
8789 |
value: 80.6818859152411
|
@@ -8867,14 +8873,14 @@ model-index:
|
|
8867 |
value: 45.14719848053181
|
8868 |
- type: similarity_recall
|
8869 |
value: 62.71767810026385
|
8870 |
-
|
8871 |
type: PairClassification
|
8872 |
-
|
8873 |
-
config: default
|
8874 |
name: MTEB TwitterURLCorpus (default)
|
8875 |
-
revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf
|
8876 |
-
split: test
|
8877 |
type: mteb/twitterurlcorpus-pairclassification
|
|
|
|
|
|
|
8878 |
metrics:
|
8879 |
- type: cosine_accuracy
|
8880 |
value: 85.89474909768309
|
@@ -8958,13 +8964,6 @@ model-index:
|
|
8958 |
value: 69.57530076822728
|
8959 |
- type: similarity_recall
|
8960 |
value: 73.91438250692947
|
8961 |
-
task:
|
8962 |
-
type: PairClassification
|
8963 |
-
model_name: potion-base-2M
|
8964 |
-
tags:
|
8965 |
-
- embeddings
|
8966 |
-
- static-embeddings
|
8967 |
-
- mteb
|
8968 |
---
|
8969 |
|
8970 |
# potion-base-2M Model Card
|
|
|
1 |
---
|
2 |
library_name: model2vec
|
3 |
license: mit
|
4 |
+
tags:
|
5 |
+
- embeddings
|
6 |
+
- static-embeddings
|
7 |
+
- mteb
|
8 |
model-index:
|
9 |
- name: potion-base-2M
|
10 |
results:
|
11 |
+
- task:
|
12 |
+
type: Classification
|
13 |
+
dataset:
|
14 |
name: MTEB AmazonCounterfactualClassification (en-ext)
|
|
|
|
|
15 |
type: mteb/amazon_counterfactual
|
16 |
+
config: en-ext
|
17 |
+
split: test
|
18 |
+
revision: e8379541af4e31359cca9fbcf4b00f2671dba205
|
19 |
metrics:
|
20 |
- type: accuracy
|
21 |
value: 64.09295352323838
|
|
|
29 |
value: 71.02083973787023
|
30 |
- type: main_score
|
31 |
value: 64.09295352323838
|
32 |
+
- task:
|
33 |
type: Classification
|
34 |
+
dataset:
|
|
|
35 |
name: MTEB AmazonCounterfactualClassification (en)
|
|
|
|
|
36 |
type: mteb/amazon_counterfactual
|
37 |
+
config: en
|
38 |
+
split: test
|
39 |
+
revision: e8379541af4e31359cca9fbcf4b00f2671dba205
|
40 |
metrics:
|
41 |
- type: accuracy
|
42 |
value: 65.44776119402985
|
|
|
50 |
value: 68.76825531256598
|
51 |
- type: main_score
|
52 |
value: 65.44776119402985
|
53 |
+
- task:
|
54 |
type: Classification
|
55 |
+
dataset:
|
|
|
56 |
name: MTEB AmazonPolarityClassification (default)
|
|
|
|
|
57 |
type: mteb/amazon_polarity
|
58 |
+
config: default
|
59 |
+
split: test
|
60 |
+
revision: e2d317d38cd51312af73b3d32a06d1a08b442046
|
61 |
metrics:
|
62 |
- type: accuracy
|
63 |
value: 70.8279
|
|
|
71 |
value: 70.5783166369514
|
72 |
- type: main_score
|
73 |
value: 70.8279
|
74 |
+
- task:
|
75 |
type: Classification
|
76 |
+
dataset:
|
|
|
77 |
name: MTEB AmazonReviewsClassification (en)
|
|
|
|
|
78 |
type: mteb/amazon_reviews_multi
|
79 |
+
config: en
|
80 |
+
split: test
|
81 |
+
revision: 1399c76144fd37290681b995c656ef9b2e06e26d
|
82 |
metrics:
|
83 |
- type: accuracy
|
84 |
value: 32.996
|
|
|
88 |
value: 32.31726739771067
|
89 |
- type: main_score
|
90 |
value: 32.996
|
91 |
+
- task:
|
92 |
+
type: Retrieval
|
93 |
+
dataset:
|
|
|
94 |
name: MTEB ArguAna (default)
|
|
|
|
|
95 |
type: mteb/arguana
|
96 |
+
config: default
|
97 |
+
split: test
|
98 |
+
revision: c22ab2a51041ffd869aaddef7af8d8215647e41a
|
99 |
metrics:
|
100 |
- type: main_score
|
101 |
value: 32.622
|
|
|
379 |
value: 31.791999999999998
|
380 |
- type: recall_at_5
|
381 |
value: 40.114
|
382 |
+
- task:
|
383 |
+
type: Clustering
|
384 |
+
dataset:
|
|
|
385 |
name: MTEB ArxivClusteringP2P (default)
|
|
|
|
|
386 |
type: mteb/arxiv-clustering-p2p
|
387 |
+
config: default
|
388 |
+
split: test
|
389 |
+
revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d
|
390 |
metrics:
|
391 |
- type: main_score
|
392 |
value: 29.870127302809124
|
|
|
394 |
value: 29.870127302809124
|
395 |
- type: v_measure_std
|
396 |
value: 14.791231720290682
|
397 |
+
- task:
|
398 |
type: Clustering
|
399 |
+
dataset:
|
|
|
400 |
name: MTEB ArxivClusteringS2S (default)
|
|
|
|
|
401 |
type: mteb/arxiv-clustering-s2s
|
402 |
+
config: default
|
403 |
+
split: test
|
404 |
+
revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53
|
405 |
metrics:
|
406 |
- type: main_score
|
407 |
value: 20.120157976895523
|
|
|
409 |
value: 20.120157976895523
|
410 |
- type: v_measure_std
|
411 |
value: 15.985610307944178
|
412 |
+
- task:
|
413 |
+
type: Reranking
|
414 |
+
dataset:
|
|
|
415 |
name: MTEB AskUbuntuDupQuestions (default)
|
|
|
|
|
416 |
type: mteb/askubuntudupquestions-reranking
|
417 |
+
config: default
|
418 |
+
split: test
|
419 |
+
revision: 2000358ca161889fa9c082cb41daa8dcfb161a54
|
420 |
metrics:
|
421 |
- type: main_score
|
422 |
value: 52.90637925416103
|
|
|
436 |
value: 24.008690838618477
|
437 |
- type: nAUC_mrr_std
|
438 |
value: 4.127979271888478
|
439 |
+
- task:
|
440 |
+
type: STS
|
441 |
+
dataset:
|
|
|
442 |
name: MTEB BIOSSES (default)
|
|
|
|
|
443 |
type: mteb/biosses-sts
|
444 |
+
config: default
|
445 |
+
split: test
|
446 |
+
revision: d3fb88f8f02e40887cd149695127462bbcf29b4a
|
447 |
metrics:
|
448 |
- type: cosine_pearson
|
449 |
value: 69.51991057082712
|
|
|
463 |
value: 69.51991057082712
|
464 |
- type: spearman
|
465 |
value: 64.10808725228159
|
466 |
+
- task:
|
467 |
+
type: Classification
|
468 |
+
dataset:
|
|
|
469 |
name: MTEB Banking77Classification (default)
|
|
|
|
|
470 |
type: mteb/banking77
|
471 |
+
config: default
|
472 |
+
split: test
|
473 |
+
revision: 0fd18e25b25c072e09e0d92ab615fda904d66300
|
474 |
metrics:
|
475 |
- type: accuracy
|
476 |
value: 65.17207792207793
|
|
|
480 |
value: 63.62144343754335
|
481 |
- type: main_score
|
482 |
value: 65.17207792207793
|
483 |
+
- task:
|
484 |
+
type: Clustering
|
485 |
+
dataset:
|
|
|
486 |
name: MTEB BiorxivClusteringP2P (default)
|
|
|
|
|
487 |
type: mteb/biorxiv-clustering-p2p
|
488 |
+
config: default
|
489 |
+
split: test
|
490 |
+
revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40
|
491 |
metrics:
|
492 |
- type: main_score
|
493 |
value: 25.780291675770933
|
|
|
495 |
value: 25.780291675770933
|
496 |
- type: v_measure_std
|
497 |
value: 0.5140442536046052
|
498 |
+
- task:
|
499 |
type: Clustering
|
500 |
+
dataset:
|
|
|
501 |
name: MTEB BiorxivClusteringS2S (default)
|
|
|
|
|
502 |
type: mteb/biorxiv-clustering-s2s
|
503 |
+
config: default
|
504 |
+
split: test
|
505 |
+
revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908
|
506 |
metrics:
|
507 |
- type: main_score
|
508 |
value: 14.938305313404305
|
|
|
510 |
value: 14.938305313404305
|
511 |
- type: v_measure_std
|
512 |
value: 0.6925176157191298
|
513 |
+
- task:
|
514 |
+
type: Retrieval
|
515 |
+
dataset:
|
|
|
516 |
name: MTEB CQADupstackAndroidRetrieval (default)
|
|
|
|
|
517 |
type: mteb/cqadupstack-android
|
518 |
+
config: default
|
519 |
+
split: test
|
520 |
+
revision: f46a197baaae43b4f621051089b82a364682dfeb
|
521 |
metrics:
|
522 |
- type: main_score
|
523 |
value: 25.330000000000002
|
|
|
801 |
value: 21.951999999999998
|
802 |
- type: recall_at_5
|
803 |
value: 26.866
|
804 |
+
- task:
|
805 |
type: Retrieval
|
806 |
+
dataset:
|
|
|
807 |
name: MTEB CQADupstackEnglishRetrieval (default)
|
|
|
|
|
808 |
type: mteb/cqadupstack-english
|
809 |
+
config: default
|
810 |
+
split: test
|
811 |
+
revision: ad9991cb51e31e31e430383c75ffb2885547b5f0
|
812 |
metrics:
|
813 |
- type: main_score
|
814 |
value: 19.03
|
|
|
1092 |
value: 17.95
|
1093 |
- type: recall_at_5
|
1094 |
value: 20.605
|
1095 |
+
- task:
|
1096 |
type: Retrieval
|
1097 |
+
dataset:
|
|
|
1098 |
name: MTEB CQADupstackGamingRetrieval (default)
|
|
|
|
|
1099 |
type: mteb/cqadupstack-gaming
|
1100 |
+
config: default
|
1101 |
+
split: test
|
1102 |
+
revision: 4885aa143210c98657558c04aaf3dc47cfb54340
|
1103 |
metrics:
|
1104 |
- type: main_score
|
1105 |
value: 29.583
|
|
|
1383 |
value: 28.872999999999998
|
1384 |
- type: recall_at_5
|
1385 |
value: 33.771
|
1386 |
+
- task:
|
1387 |
type: Retrieval
|
1388 |
+
dataset:
|
|
|
1389 |
name: MTEB CQADupstackGisRetrieval (default)
|
|
|
|
|
1390 |
type: mteb/cqadupstack-gis
|
1391 |
+
config: default
|
1392 |
+
split: test
|
1393 |
+
revision: 5003b3064772da1887988e05400cf3806fe491f2
|
1394 |
metrics:
|
1395 |
- type: main_score
|
1396 |
value: 14.183000000000002
|
|
|
1674 |
value: 13.221
|
1675 |
- type: recall_at_5
|
1676 |
value: 14.895
|
1677 |
+
- task:
|
1678 |
type: Retrieval
|
1679 |
+
dataset:
|
|
|
1680 |
name: MTEB CQADupstackMathematicaRetrieval (default)
|
|
|
|
|
1681 |
type: mteb/cqadupstack-mathematica
|
1682 |
+
config: default
|
1683 |
+
split: test
|
1684 |
+
revision: 90fceea13679c63fe563ded68f3b6f06e50061de
|
1685 |
metrics:
|
1686 |
- type: main_score
|
1687 |
value: 8.94
|
|
|
1965 |
value: 8.260000000000002
|
1966 |
- type: recall_at_5
|
1967 |
value: 10.82
|
1968 |
+
- task:
|
1969 |
type: Retrieval
|
1970 |
+
dataset:
|
|
|
1971 |
name: MTEB CQADupstackPhysicsRetrieval (default)
|
|
|
|
|
1972 |
type: mteb/cqadupstack-physics
|
1973 |
+
config: default
|
1974 |
+
split: test
|
1975 |
+
revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4
|
1976 |
metrics:
|
1977 |
- type: main_score
|
1978 |
value: 20.294999999999998
|
|
|
2256 |
value: 19.167
|
2257 |
- type: recall_at_5
|
2258 |
value: 22.281000000000002
|
2259 |
+
- task:
|
2260 |
type: Retrieval
|
2261 |
+
dataset:
|
|
|
2262 |
name: MTEB CQADupstackProgrammersRetrieval (default)
|
|
|
|
|
2263 |
type: mteb/cqadupstack-programmers
|
2264 |
+
config: default
|
2265 |
+
split: test
|
2266 |
+
revision: 6184bc1440d2dbc7612be22b50686b8826d22b32
|
2267 |
metrics:
|
2268 |
- type: main_score
|
2269 |
value: 14.094999999999999
|
|
|
2547 |
value: 12.546
|
2548 |
- type: recall_at_5
|
2549 |
value: 15.453
|
2550 |
+
- task:
|
2551 |
type: Retrieval
|
2552 |
+
dataset:
|
|
|
2553 |
name: MTEB CQADupstackRetrieval (default)
|
|
|
|
|
2554 |
type: CQADupstackRetrieval_is_a_combined_dataset
|
2555 |
+
config: default
|
2556 |
+
split: test
|
2557 |
+
revision: CQADupstackRetrieval_is_a_combined_dataset
|
2558 |
metrics:
|
2559 |
- type: main_score
|
2560 |
value: 16.292583333333337
|
2561 |
- type: ndcg_at_10
|
2562 |
value: 16.292583333333337
|
2563 |
+
- task:
|
2564 |
type: Retrieval
|
2565 |
+
dataset:
|
|
|
2566 |
name: MTEB CQADupstackStatsRetrieval (default)
|
|
|
|
|
2567 |
type: mteb/cqadupstack-stats
|
2568 |
+
config: default
|
2569 |
+
split: test
|
2570 |
+
revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a
|
2571 |
metrics:
|
2572 |
- type: main_score
|
2573 |
value: 11.084
|
|
|
2851 |
value: 9.052
|
2852 |
- type: recall_at_5
|
2853 |
value: 12.891
|
2854 |
+
- task:
|
2855 |
type: Retrieval
|
2856 |
+
dataset:
|
|
|
2857 |
name: MTEB CQADupstackTexRetrieval (default)
|
|
|
|
|
2858 |
type: mteb/cqadupstack-tex
|
2859 |
+
config: default
|
2860 |
+
split: test
|
2861 |
+
revision: 46989137a86843e03a6195de44b09deda022eec7
|
2862 |
metrics:
|
2863 |
- type: main_score
|
2864 |
value: 9.0
|
|
|
3142 |
value: 7.965999999999999
|
3143 |
- type: recall_at_5
|
3144 |
value: 9.795
|
3145 |
+
- task:
|
3146 |
type: Retrieval
|
3147 |
+
dataset:
|
|
|
3148 |
name: MTEB CQADupstackUnixRetrieval (default)
|
|
|
|
|
3149 |
type: mteb/cqadupstack-unix
|
3150 |
+
config: default
|
3151 |
+
split: test
|
3152 |
+
revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53
|
3153 |
metrics:
|
3154 |
- type: main_score
|
3155 |
value: 14.642
|
|
|
3433 |
value: 13.555
|
3434 |
- type: recall_at_5
|
3435 |
value: 16.259
|
3436 |
+
- task:
|
3437 |
type: Retrieval
|
3438 |
+
dataset:
|
|
|
3439 |
name: MTEB CQADupstackWebmastersRetrieval (default)
|
|
|
|
|
3440 |
type: mteb/cqadupstack-webmasters
|
3441 |
+
config: default
|
3442 |
+
split: test
|
3443 |
+
revision: 160c094312a0e1facb97e55eeddb698c0abe3571
|
3444 |
metrics:
|
3445 |
- type: main_score
|
3446 |
value: 17.971999999999998
|
|
|
3724 |
value: 15.906
|
3725 |
- type: recall_at_5
|
3726 |
value: 20.16
|
3727 |
+
- task:
|
3728 |
type: Retrieval
|
3729 |
+
dataset:
|
|
|
3730 |
name: MTEB CQADupstackWordpressRetrieval (default)
|
|
|
|
|
3731 |
type: mteb/cqadupstack-wordpress
|
3732 |
+
config: default
|
3733 |
+
split: test
|
3734 |
+
revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4
|
3735 |
metrics:
|
3736 |
- type: main_score
|
3737 |
value: 11.357000000000001
|
|
|
4015 |
value: 10.789
|
4016 |
- type: recall_at_5
|
4017 |
value: 14.116000000000001
|
4018 |
+
- task:
|
4019 |
type: Retrieval
|
4020 |
+
dataset:
|
|
|
4021 |
name: MTEB ClimateFEVER (default)
|
|
|
|
|
4022 |
type: mteb/climate-fever
|
4023 |
+
config: default
|
4024 |
+
split: test
|
4025 |
+
revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380
|
4026 |
metrics:
|
4027 |
- type: main_score
|
4028 |
value: 8.665000000000001
|
|
|
4306 |
value: 5.862
|
4307 |
- type: recall_at_5
|
4308 |
value: 7.595000000000001
|
4309 |
+
- task:
|
4310 |
type: Retrieval
|
4311 |
+
dataset:
|
|
|
4312 |
name: MTEB DBPedia (default)
|
|
|
|
|
4313 |
type: mteb/dbpedia
|
4314 |
+
config: default
|
4315 |
+
split: test
|
4316 |
+
revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659
|
4317 |
metrics:
|
4318 |
- type: main_score
|
4319 |
value: 18.317
|
|
|
4597 |
value: 6.6530000000000005
|
4598 |
- type: recall_at_5
|
4599 |
value: 7.93
|
4600 |
+
- task:
|
4601 |
+
type: Classification
|
4602 |
+
dataset:
|
|
|
4603 |
name: MTEB EmotionClassification (default)
|
|
|
|
|
4604 |
type: mteb/emotion
|
4605 |
+
config: default
|
4606 |
+
split: test
|
4607 |
+
revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37
|
4608 |
metrics:
|
4609 |
- type: accuracy
|
4610 |
value: 42.035
|
|
|
4614 |
value: 44.414257303205105
|
4615 |
- type: main_score
|
4616 |
value: 42.035
|
4617 |
+
- task:
|
4618 |
+
type: Retrieval
|
4619 |
+
dataset:
|
|
|
4620 |
name: MTEB FEVER (default)
|
|
|
|
|
4621 |
type: mteb/fever
|
4622 |
+
config: default
|
4623 |
+
split: test
|
4624 |
+
revision: bea83ef9e8fb933d90a2f1d5515737465d613e12
|
4625 |
metrics:
|
4626 |
- type: main_score
|
4627 |
value: 18.761
|
|
|
4905 |
value: 17.79
|
4906 |
- type: recall_at_5
|
4907 |
value: 21.956
|
4908 |
+
- task:
|
4909 |
type: Retrieval
|
4910 |
+
dataset:
|
|
|
4911 |
name: MTEB FiQA2018 (default)
|
|
|
|
|
4912 |
type: mteb/fiqa
|
4913 |
+
config: default
|
4914 |
+
split: test
|
4915 |
+
revision: 27a168819829fe9bcd655c2df245fb19452e8e06
|
4916 |
metrics:
|
4917 |
- type: main_score
|
4918 |
value: 10.219000000000001
|
|
|
5196 |
value: 7.8549999999999995
|
5197 |
- type: recall_at_5
|
5198 |
value: 9.861
|
5199 |
+
- task:
|
5200 |
type: Retrieval
|
5201 |
+
dataset:
|
|
|
5202 |
name: MTEB HotpotQA (default)
|
|
|
|
|
5203 |
type: mteb/hotpotqa
|
5204 |
+
config: default
|
5205 |
+
split: test
|
5206 |
+
revision: ab518f4d6fcca38d87c25209f94beba119d02014
|
5207 |
metrics:
|
5208 |
- type: main_score
|
5209 |
value: 21.614
|
|
|
5487 |
value: 17.873
|
5488 |
- type: recall_at_5
|
5489 |
value: 20.608
|
5490 |
+
- task:
|
5491 |
+
type: Classification
|
5492 |
+
dataset:
|
|
|
5493 |
name: MTEB ImdbClassification (default)
|
|
|
|
|
5494 |
type: mteb/imdb
|
5495 |
+
config: default
|
5496 |
+
split: test
|
5497 |
+
revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7
|
5498 |
metrics:
|
5499 |
- type: accuracy
|
5500 |
value: 70.35639999999998
|
|
|
5508 |
value: 70.18257490051944
|
5509 |
- type: main_score
|
5510 |
value: 70.35639999999998
|
5511 |
+
- task:
|
5512 |
+
type: Retrieval
|
5513 |
+
dataset:
|
|
|
5514 |
name: MTEB MSMARCO (default)
|
|
|
|
|
5515 |
type: mteb/msmarco
|
5516 |
+
config: default
|
5517 |
+
split: test
|
5518 |
+
revision: c5a29a104738b98a9e76336939199e264163d4a0
|
5519 |
metrics:
|
5520 |
- type: main_score
|
5521 |
value: 29.474
|
|
|
5799 |
value: 2.2800000000000002
|
5800 |
- type: recall_at_5
|
5801 |
value: 3.94
|
5802 |
+
- task:
|
5803 |
+
type: Classification
|
5804 |
+
dataset:
|
|
|
5805 |
name: MTEB MTOPDomainClassification (en)
|
|
|
|
|
5806 |
type: mteb/mtop_domain
|
5807 |
+
config: en
|
5808 |
+
split: test
|
5809 |
+
revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf
|
5810 |
metrics:
|
5811 |
- type: accuracy
|
5812 |
value: 79.27268581851345
|
|
|
5816 |
value: 79.14088602852584
|
5817 |
- type: main_score
|
5818 |
value: 79.27268581851345
|
5819 |
+
- task:
|
5820 |
type: Classification
|
5821 |
+
dataset:
|
|
|
5822 |
name: MTEB MTOPIntentClassification (en)
|
|
|
|
|
5823 |
type: mteb/mtop_intent
|
5824 |
+
config: en
|
5825 |
+
split: test
|
5826 |
+
revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba
|
5827 |
metrics:
|
5828 |
- type: accuracy
|
5829 |
value: 42.49886000911992
|
|
|
5833 |
value: 46.29236281595424
|
5834 |
- type: main_score
|
5835 |
value: 42.49886000911992
|
5836 |
+
- task:
|
5837 |
type: Classification
|
5838 |
+
dataset:
|
|
|
5839 |
name: MTEB MassiveIntentClassification (en)
|
|
|
|
|
5840 |
type: mteb/amazon_massive_intent
|
5841 |
+
config: en
|
5842 |
+
split: test
|
5843 |
+
revision: 4672e20407010da34463acc759c162ca9734bca6
|
5844 |
metrics:
|
5845 |
- type: accuracy
|
5846 |
value: 54.065232010759914
|
|
|
5850 |
value: 52.69815077422998
|
5851 |
- type: main_score
|
5852 |
value: 54.065232010759914
|
5853 |
+
- task:
|
5854 |
type: Classification
|
5855 |
+
dataset:
|
|
|
5856 |
name: MTEB MassiveScenarioClassification (en)
|
|
|
|
|
5857 |
type: mteb/amazon_massive_scenario
|
5858 |
+
config: en
|
5859 |
+
split: test
|
5860 |
+
revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8
|
5861 |
metrics:
|
5862 |
- type: accuracy
|
5863 |
value: 59.596503026227296
|
|
|
5867 |
value: 59.23698301210568
|
5868 |
- type: main_score
|
5869 |
value: 59.596503026227296
|
5870 |
+
- task:
|
5871 |
+
type: Clustering
|
5872 |
+
dataset:
|
|
|
5873 |
name: MTEB MedrxivClusteringP2P (default)
|
|
|
|
|
5874 |
type: mteb/medrxiv-clustering-p2p
|
5875 |
+
config: default
|
5876 |
+
split: test
|
5877 |
+
revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73
|
5878 |
metrics:
|
5879 |
- type: main_score
|
5880 |
value: 25.59161751046095
|
|
|
5882 |
value: 25.59161751046095
|
5883 |
- type: v_measure_std
|
5884 |
value: 1.4816189134361553
|
5885 |
+
- task:
|
5886 |
type: Clustering
|
5887 |
+
dataset:
|
|
|
5888 |
name: MTEB MedrxivClusteringS2S (default)
|
|
|
|
|
5889 |
type: mteb/medrxiv-clustering-s2s
|
5890 |
+
config: default
|
5891 |
+
split: test
|
5892 |
+
revision: 35191c8c0dca72d8ff3efcd72aa802307d469663
|
5893 |
metrics:
|
5894 |
- type: main_score
|
5895 |
value: 21.396391045777328
|
|
|
5897 |
value: 21.396391045777328
|
5898 |
- type: v_measure_std
|
5899 |
value: 1.6103207158789596
|
5900 |
+
- task:
|
5901 |
+
type: Reranking
|
5902 |
+
dataset:
|
|
|
5903 |
name: MTEB MindSmallReranking (default)
|
|
|
|
|
5904 |
type: mteb/mind_small
|
5905 |
+
config: default
|
5906 |
+
split: test
|
5907 |
+
revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7
|
5908 |
metrics:
|
5909 |
- type: main_score
|
5910 |
value: 28.017817065141404
|
|
|
5924 |
value: -19.527290469919414
|
5925 |
- type: nAUC_mrr_std
|
5926 |
value: -6.772185014428633
|
5927 |
+
- task:
|
5928 |
+
type: Retrieval
|
5929 |
+
dataset:
|
|
|
5930 |
name: MTEB NFCorpus (default)
|
|
|
|
|
5931 |
type: mteb/nfcorpus
|
5932 |
+
config: default
|
5933 |
+
split: test
|
5934 |
+
revision: ec0fa4fe99da2ff19ca1214b7966684033a58814
|
5935 |
metrics:
|
5936 |
- type: main_score
|
5937 |
value: 18.958
|
|
|
6215 |
value: 5.005
|
6216 |
- type: recall_at_5
|
6217 |
value: 6.3950000000000005
|
6218 |
+
- task:
|
6219 |
type: Retrieval
|
6220 |
+
dataset:
|
|
|
6221 |
name: MTEB NQ (default)
|
|
|
|
|
6222 |
type: mteb/nq
|
6223 |
+
config: default
|
6224 |
+
split: test
|
6225 |
+
revision: b774495ed302d8c44a3a7ea25c90dbce03968f31
|
6226 |
metrics:
|
6227 |
- type: main_score
|
6228 |
value: 13.048000000000002
|
|
|
6506 |
value: 11.584999999999999
|
6507 |
- type: recall_at_5
|
6508 |
value: 15.662
|
6509 |
+
- task:
|
6510 |
type: Retrieval
|
6511 |
+
dataset:
|
|
|
6512 |
name: MTEB QuoraRetrieval (default)
|
|
|
|
|
6513 |
type: mteb/quora
|
6514 |
+
config: default
|
6515 |
+
split: test
|
6516 |
+
revision: e4e08e0b7dbe3c8700f0daef558ff32256715259
|
6517 |
metrics:
|
6518 |
- type: main_score
|
6519 |
value: 75.889
|
|
|
6797 |
value: 74.507
|
6798 |
- type: recall_at_5
|
6799 |
value: 79.487
|
6800 |
+
- task:
|
6801 |
+
type: Clustering
|
6802 |
+
dataset:
|
|
|
6803 |
name: MTEB RedditClustering (default)
|
|
|
|
|
6804 |
type: mteb/reddit-clustering
|
6805 |
+
config: default
|
6806 |
+
split: test
|
6807 |
+
revision: 24640382cdbf8abc73003fb0fa6d111a705499eb
|
6808 |
metrics:
|
6809 |
- type: main_score
|
6810 |
value: 29.134297978095674
|
|
|
6812 |
value: 29.134297978095674
|
6813 |
- type: v_measure_std
|
6814 |
value: 3.9934034124121185
|
6815 |
+
- task:
|
6816 |
type: Clustering
|
6817 |
+
dataset:
|
|
|
6818 |
name: MTEB RedditClusteringP2P (default)
|
|
|
|
|
6819 |
type: mteb/reddit-clustering-p2p
|
6820 |
+
config: default
|
6821 |
+
split: test
|
6822 |
+
revision: 385e3cb46b4cfa89021f56c4380204149d0efe33
|
6823 |
metrics:
|
6824 |
- type: main_score
|
6825 |
value: 39.215421675518
|
|
|
6827 |
value: 39.215421675518
|
6828 |
- type: v_measure_std
|
6829 |
value: 10.607286582764162
|
6830 |
+
- task:
|
6831 |
+
type: Retrieval
|
6832 |
+
dataset:
|
|
|
6833 |
name: MTEB SCIDOCS (default)
|
|
|
|
|
6834 |
type: mteb/scidocs
|
6835 |
+
config: default
|
6836 |
+
split: test
|
6837 |
+
revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88
|
6838 |
metrics:
|
6839 |
- type: main_score
|
6840 |
value: 8.163
|
|
|
7118 |
value: 4.35
|
7119 |
- type: recall_at_5
|
7120 |
value: 5.765
|
7121 |
+
- task:
|
7122 |
+
type: STS
|
7123 |
+
dataset:
|
|
|
7124 |
name: MTEB SICK-R (default)
|
|
|
|
|
7125 |
type: mteb/sickr-sts
|
7126 |
+
config: default
|
7127 |
+
split: test
|
7128 |
+
revision: 20a6d6f312dd54037fe07a32d58e5e168867909d
|
7129 |
metrics:
|
7130 |
- type: cosine_pearson
|
7131 |
value: 74.06014749723313
|
|
|
7145 |
value: 74.06014749723313
|
7146 |
- type: spearman
|
7147 |
value: 62.85583348143348
|
7148 |
+
- task:
|
7149 |
type: STS
|
7150 |
+
dataset:
|
|
|
7151 |
name: MTEB STS12 (default)
|
|
|
|
|
7152 |
type: mteb/sts12-sts
|
7153 |
+
config: default
|
7154 |
+
split: test
|
7155 |
+
revision: a0d554a64d88156834ff5ae9920b964011b16384
|
7156 |
metrics:
|
7157 |
- type: cosine_pearson
|
7158 |
value: 71.71587397454503
|
|
|
7172 |
value: 71.71587397454503
|
7173 |
- type: spearman
|
7174 |
value: 62.07913034464432
|
7175 |
+
- task:
|
7176 |
type: STS
|
7177 |
+
dataset:
|
|
|
7178 |
name: MTEB STS13 (default)
|
|
|
|
|
7179 |
type: mteb/sts13-sts
|
7180 |
+
config: default
|
7181 |
+
split: test
|
7182 |
+
revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca
|
7183 |
metrics:
|
7184 |
- type: cosine_pearson
|
7185 |
value: 74.00146491973214
|
|
|
7199 |
value: 74.00146491973214
|
7200 |
- type: spearman
|
7201 |
value: 75.73113726697468
|
7202 |
+
- task:
|
7203 |
type: STS
|
7204 |
+
dataset:
|
|
|
7205 |
name: MTEB STS14 (default)
|
|
|
|
|
7206 |
type: mteb/sts14-sts
|
7207 |
+
config: default
|
7208 |
+
split: test
|
7209 |
+
revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375
|
7210 |
metrics:
|
7211 |
- type: cosine_pearson
|
7212 |
value: 73.18799052289306
|
|
|
7226 |
value: 73.18799052289306
|
7227 |
- type: spearman
|
7228 |
value: 69.27997439795548
|
7229 |
+
- task:
|
7230 |
type: STS
|
7231 |
+
dataset:
|
|
|
7232 |
name: MTEB STS15 (default)
|
|
|
|
|
7233 |
type: mteb/sts15-sts
|
7234 |
+
config: default
|
7235 |
+
split: test
|
7236 |
+
revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3
|
7237 |
metrics:
|
7238 |
- type: cosine_pearson
|
7239 |
value: 75.05240168700195
|
|
|
7253 |
value: 75.05240168700195
|
7254 |
- type: spearman
|
7255 |
value: 76.32976845993336
|
7256 |
+
- task:
|
7257 |
type: STS
|
7258 |
+
dataset:
|
|
|
7259 |
name: MTEB STS16 (default)
|
|
|
|
|
7260 |
type: mteb/sts16-sts
|
7261 |
+
config: default
|
7262 |
+
split: test
|
7263 |
+
revision: 4d8694f8f0e0100860b497b999b3dbed754a0513
|
7264 |
metrics:
|
7265 |
- type: cosine_pearson
|
7266 |
value: 71.35240308275529
|
|
|
7280 |
value: 71.35240308275529
|
7281 |
- type: spearman
|
7282 |
value: 73.46659216141927
|
7283 |
+
- task:
|
7284 |
type: STS
|
7285 |
+
dataset:
|
|
|
7286 |
name: MTEB STS17 (en-en)
|
|
|
|
|
7287 |
type: mteb/sts17-crosslingual-sts
|
7288 |
+
config: en-en
|
7289 |
+
split: test
|
7290 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7291 |
metrics:
|
7292 |
- type: cosine_pearson
|
7293 |
value: 80.32585308865436
|
|
|
7307 |
value: 80.32585308865436
|
7308 |
- type: spearman
|
7309 |
value: 82.08042618874391
|
7310 |
+
- task:
|
7311 |
type: STS
|
7312 |
+
dataset:
|
|
|
7313 |
name: MTEB STS17 (fr-en)
|
|
|
|
|
7314 |
type: mteb/sts17-crosslingual-sts
|
7315 |
+
config: fr-en
|
7316 |
+
split: test
|
7317 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7318 |
metrics:
|
7319 |
- type: cosine_pearson
|
7320 |
value: 26.492433454505004
|
|
|
7334 |
value: 26.492433454505004
|
7335 |
- type: spearman
|
7336 |
value: 25.26192630209604
|
7337 |
+
- task:
|
7338 |
type: STS
|
7339 |
+
dataset:
|
|
|
7340 |
name: MTEB STS17 (en-ar)
|
|
|
|
|
7341 |
type: mteb/sts17-crosslingual-sts
|
7342 |
+
config: en-ar
|
7343 |
+
split: test
|
7344 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7345 |
metrics:
|
7346 |
- type: cosine_pearson
|
7347 |
value: 4.849502004066215
|
|
|
7361 |
value: 4.849502004066215
|
7362 |
- type: spearman
|
7363 |
value: 2.4221360201347566
|
7364 |
+
- task:
|
7365 |
type: STS
|
7366 |
+
dataset:
|
|
|
7367 |
name: MTEB STS17 (it-en)
|
|
|
|
|
7368 |
type: mteb/sts17-crosslingual-sts
|
7369 |
+
config: it-en
|
7370 |
+
split: test
|
7371 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7372 |
metrics:
|
7373 |
- type: cosine_pearson
|
7374 |
value: 17.67430795163699
|
|
|
7388 |
value: 17.67430795163699
|
7389 |
- type: spearman
|
7390 |
value: 14.138028269188412
|
7391 |
+
- task:
|
7392 |
type: STS
|
7393 |
+
dataset:
|
|
|
7394 |
name: MTEB STS17 (en-tr)
|
|
|
|
|
7395 |
type: mteb/sts17-crosslingual-sts
|
7396 |
+
config: en-tr
|
7397 |
+
split: test
|
7398 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7399 |
metrics:
|
7400 |
- type: cosine_pearson
|
7401 |
value: 11.032677618214326
|
|
|
7415 |
value: 11.032677618214326
|
7416 |
- type: spearman
|
7417 |
value: 8.819837594034183
|
7418 |
+
- task:
|
7419 |
type: STS
|
7420 |
+
dataset:
|
|
|
7421 |
name: MTEB STS17 (nl-en)
|
|
|
|
|
7422 |
type: mteb/sts17-crosslingual-sts
|
7423 |
+
config: nl-en
|
7424 |
+
split: test
|
7425 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7426 |
metrics:
|
7427 |
- type: cosine_pearson
|
7428 |
value: 21.77242194085935
|
|
|
7442 |
value: 21.77242194085935
|
7443 |
- type: spearman
|
7444 |
value: 19.564246863458028
|
7445 |
+
- task:
|
7446 |
type: STS
|
7447 |
+
dataset:
|
|
|
7448 |
name: MTEB STS17 (en-de)
|
|
|
|
|
7449 |
type: mteb/sts17-crosslingual-sts
|
7450 |
+
config: en-de
|
7451 |
+
split: test
|
7452 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7453 |
metrics:
|
7454 |
- type: cosine_pearson
|
7455 |
value: 24.34392722914247
|
|
|
7469 |
value: 24.34392722914247
|
7470 |
- type: spearman
|
7471 |
value: 22.516912091222096
|
7472 |
+
- task:
|
7473 |
type: STS
|
7474 |
+
dataset:
|
|
|
7475 |
name: MTEB STS17 (es-en)
|
|
|
|
|
7476 |
type: mteb/sts17-crosslingual-sts
|
7477 |
+
config: es-en
|
7478 |
+
split: test
|
7479 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7480 |
metrics:
|
7481 |
- type: cosine_pearson
|
7482 |
value: 11.486165309912764
|
|
|
7496 |
value: 11.486165309912764
|
7497 |
- type: spearman
|
7498 |
value: 10.139614392782256
|
7499 |
+
- task:
|
7500 |
type: STS
|
7501 |
+
dataset:
|
|
|
7502 |
name: MTEB STS22 (en)
|
|
|
|
|
7503 |
type: mteb/sts22-crosslingual-sts
|
7504 |
+
config: en
|
7505 |
+
split: test
|
7506 |
+
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7507 |
metrics:
|
7508 |
- type: cosine_pearson
|
7509 |
value: 56.1393113780294
|
|
|
7523 |
value: 56.1393113780294
|
7524 |
- type: spearman
|
7525 |
value: 62.64707232707212
|
7526 |
+
- task:
|
7527 |
type: STS
|
7528 |
+
dataset:
|
|
|
7529 |
name: MTEB STS22 (zh-en)
|
|
|
|
|
7530 |
type: mteb/sts22-crosslingual-sts
|
7531 |
+
config: zh-en
|
7532 |
+
split: test
|
7533 |
+
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7534 |
metrics:
|
7535 |
- type: cosine_pearson
|
7536 |
value: 9.554093605202135
|
|
|
7550 |
value: 9.554093605202135
|
7551 |
- type: spearman
|
7552 |
value: 11.788855140937605
|
7553 |
+
- task:
|
7554 |
type: STS
|
7555 |
+
dataset:
|
|
|
7556 |
name: MTEB STS22 (de-en)
|
|
|
|
|
7557 |
type: mteb/sts22-crosslingual-sts
|
7558 |
+
config: de-en
|
7559 |
+
split: test
|
7560 |
+
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7561 |
metrics:
|
7562 |
- type: cosine_pearson
|
7563 |
value: 23.699968394697848
|
|
|
7577 |
value: 23.699968394697848
|
7578 |
- type: spearman
|
7579 |
value: 25.635685273215014
|
7580 |
+
- task:
|
7581 |
type: STS
|
7582 |
+
dataset:
|
|
|
7583 |
name: MTEB STS22 (es-en)
|
|
|
|
|
7584 |
type: mteb/sts22-crosslingual-sts
|
7585 |
+
config: es-en
|
7586 |
+
split: test
|
7587 |
+
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7588 |
metrics:
|
7589 |
- type: cosine_pearson
|
7590 |
value: 9.344287684864119
|
|
|
7604 |
value: 9.344287684864119
|
7605 |
- type: spearman
|
7606 |
value: 10.042423712385212
|
7607 |
+
- task:
|
7608 |
type: STS
|
7609 |
+
dataset:
|
|
|
7610 |
name: MTEB STS22 (pl-en)
|
|
|
|
|
7611 |
type: mteb/sts22-crosslingual-sts
|
7612 |
+
config: pl-en
|
7613 |
+
split: test
|
7614 |
+
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7615 |
metrics:
|
7616 |
- type: cosine_pearson
|
7617 |
value: 8.001041267374578
|
|
|
7631 |
value: 8.001041267374578
|
7632 |
- type: spearman
|
7633 |
value: 15.127881072012025
|
7634 |
+
- task:
|
7635 |
type: STS
|
7636 |
+
dataset:
|
|
|
7637 |
name: MTEB STSBenchmark (default)
|
|
|
|
|
7638 |
type: mteb/stsbenchmark-sts
|
7639 |
+
config: default
|
7640 |
+
split: test
|
7641 |
+
revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831
|
7642 |
metrics:
|
7643 |
- type: cosine_pearson
|
7644 |
value: 73.57677681061787
|
|
|
7658 |
value: 73.57677681061787
|
7659 |
- type: spearman
|
7660 |
value: 72.80800903257308
|
7661 |
+
- task:
|
7662 |
+
type: Reranking
|
7663 |
+
dataset:
|
|
|
7664 |
name: MTEB SciDocsRR (default)
|
|
|
|
|
7665 |
type: mteb/scidocs-reranking
|
7666 |
+
config: default
|
7667 |
+
split: test
|
7668 |
+
revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab
|
7669 |
metrics:
|
7670 |
- type: main_score
|
7671 |
value: 66.13944998572143
|
|
|
7685 |
value: 69.96013858740152
|
7686 |
- type: nAUC_mrr_std
|
7687 |
value: 62.072046098925156
|
7688 |
+
- task:
|
7689 |
+
type: Retrieval
|
7690 |
+
dataset:
|
|
|
7691 |
name: MTEB SciFact (default)
|
|
|
|
|
7692 |
type: mteb/scifact
|
7693 |
+
config: default
|
7694 |
+
split: test
|
7695 |
+
revision: 0228b52cf27578f30900b9e5271d331663a030d7
|
7696 |
metrics:
|
7697 |
- type: main_score
|
7698 |
value: 36.55
|
|
|
7976 |
value: 36.306
|
7977 |
- type: recall_at_5
|
7978 |
value: 43.389
|
7979 |
+
- task:
|
7980 |
+
type: PairClassification
|
7981 |
+
dataset:
|
|
|
7982 |
name: MTEB SprintDuplicateQuestions (default)
|
|
|
|
|
7983 |
type: mteb/sprintduplicatequestions-pairclassification
|
7984 |
+
config: default
|
7985 |
+
split: test
|
7986 |
+
revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46
|
7987 |
metrics:
|
7988 |
- type: cosine_accuracy
|
7989 |
value: 99.68415841584158
|
|
|
8067 |
value: 86.65231431646933
|
8068 |
- type: similarity_recall
|
8069 |
value: 80.5
|
8070 |
+
- task:
|
8071 |
+
type: Clustering
|
8072 |
+
dataset:
|
|
|
8073 |
name: MTEB StackExchangeClustering (default)
|
|
|
|
|
8074 |
type: mteb/stackexchange-clustering
|
8075 |
+
config: default
|
8076 |
+
split: test
|
8077 |
+
revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259
|
8078 |
metrics:
|
8079 |
- type: main_score
|
8080 |
value: 37.82716668016299
|
|
|
8082 |
value: 37.82716668016299
|
8083 |
- type: v_measure_std
|
8084 |
value: 3.9071651545475055
|
8085 |
+
- task:
|
8086 |
type: Clustering
|
8087 |
+
dataset:
|
|
|
8088 |
name: MTEB StackExchangeClusteringP2P (default)
|
|
|
|
|
8089 |
type: mteb/stackexchange-clustering-p2p
|
8090 |
+
config: default
|
8091 |
+
split: test
|
8092 |
+
revision: 815ca46b2622cec33ccafc3735d572c266efdb44
|
8093 |
metrics:
|
8094 |
- type: main_score
|
8095 |
value: 31.549916824347523
|
|
|
8097 |
value: 31.549916824347523
|
8098 |
- type: v_measure_std
|
8099 |
value: 1.649284454526032
|
8100 |
+
- task:
|
8101 |
+
type: Reranking
|
8102 |
+
dataset:
|
|
|
8103 |
name: MTEB StackOverflowDupQuestions (default)
|
|
|
|
|
8104 |
type: mteb/stackoverflowdupquestions-reranking
|
8105 |
+
config: default
|
8106 |
+
split: test
|
8107 |
+
revision: e185fbe320c72810689fc5848eb6114e1ef5ec69
|
8108 |
metrics:
|
8109 |
- type: main_score
|
8110 |
value: 40.201162273119
|
|
|
8124 |
value: 16.03297395659567
|
8125 |
- type: nAUC_mrr_std
|
8126 |
value: 4.5441260195062885
|
8127 |
+
- task:
|
8128 |
+
type: Summarization
|
8129 |
+
dataset:
|
|
|
8130 |
name: MTEB SummEval (default)
|
|
|
|
|
8131 |
type: mteb/summeval
|
8132 |
+
config: default
|
8133 |
+
split: test
|
8134 |
+
revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c
|
8135 |
metrics:
|
8136 |
- type: cosine_pearson
|
8137 |
value: 30.982384340344282
|
|
|
8147 |
value: 30.982384340344282
|
8148 |
- type: spearman
|
8149 |
value: 31.512077655680574
|
8150 |
+
- task:
|
8151 |
+
type: Retrieval
|
8152 |
+
dataset:
|
|
|
8153 |
name: MTEB TRECCOVID (default)
|
|
|
|
|
8154 |
type: mteb/trec-covid
|
8155 |
+
config: default
|
8156 |
+
split: test
|
8157 |
+
revision: bb9466bac8153a0349341eb1b22e06409e78ef4e
|
8158 |
metrics:
|
8159 |
- type: main_score
|
8160 |
value: 40.475
|
|
|
8438 |
value: 0.337
|
8439 |
- type: recall_at_5
|
8440 |
value: 0.5329999999999999
|
8441 |
+
- task:
|
8442 |
type: Retrieval
|
8443 |
+
dataset:
|
|
|
8444 |
name: MTEB Touche2020 (default)
|
|
|
|
|
8445 |
type: mteb/touche2020
|
8446 |
+
config: default
|
8447 |
+
split: test
|
8448 |
+
revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f
|
8449 |
metrics:
|
8450 |
- type: main_score
|
8451 |
value: 12.869
|
|
|
8729 |
value: 2.495
|
8730 |
- type: recall_at_5
|
8731 |
value: 5.4719999999999995
|
8732 |
+
- task:
|
8733 |
+
type: Classification
|
8734 |
+
dataset:
|
|
|
8735 |
name: MTEB ToxicConversationsClassification (default)
|
|
|
|
|
8736 |
type: mteb/toxic_conversations_50k
|
8737 |
+
config: default
|
8738 |
+
split: test
|
8739 |
+
revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de
|
8740 |
metrics:
|
8741 |
- type: accuracy
|
8742 |
value: 66.591796875
|
|
|
8750 |
value: 74.03661967545759
|
8751 |
- type: main_score
|
8752 |
value: 66.591796875
|
8753 |
+
- task:
|
8754 |
type: Classification
|
8755 |
+
dataset:
|
|
|
8756 |
name: MTEB TweetSentimentExtractionClassification (default)
|
|
|
|
|
8757 |
type: mteb/tweet_sentiment_extraction
|
8758 |
+
config: default
|
8759 |
+
split: test
|
8760 |
+
revision: d604517c81ca91fe16a244d1248fc021f9ecee7a
|
8761 |
metrics:
|
8762 |
- type: accuracy
|
8763 |
value: 52.524052065648
|
|
|
8767 |
value: 51.956916785617736
|
8768 |
- type: main_score
|
8769 |
value: 52.524052065648
|
8770 |
+
- task:
|
8771 |
+
type: Clustering
|
8772 |
+
dataset:
|
|
|
8773 |
name: MTEB TwentyNewsgroupsClustering (default)
|
|
|
|
|
8774 |
type: mteb/twentynewsgroups-clustering
|
8775 |
+
config: default
|
8776 |
+
split: test
|
8777 |
+
revision: 6125ec4e24fa026cec8a478383ee943acfbd5449
|
8778 |
metrics:
|
8779 |
- type: main_score
|
8780 |
value: 27.072966157648477
|
|
|
8782 |
value: 27.072966157648477
|
8783 |
- type: v_measure_std
|
8784 |
value: 1.563199572918265
|
8785 |
+
- task:
|
8786 |
+
type: PairClassification
|
8787 |
+
dataset:
|
|
|
8788 |
name: MTEB TwitterSemEval2015 (default)
|
|
|
|
|
8789 |
type: mteb/twittersemeval2015-pairclassification
|
8790 |
+
config: default
|
8791 |
+
split: test
|
8792 |
+
revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1
|
8793 |
metrics:
|
8794 |
- type: cosine_accuracy
|
8795 |
value: 80.6818859152411
|
|
|
8873 |
value: 45.14719848053181
|
8874 |
- type: similarity_recall
|
8875 |
value: 62.71767810026385
|
8876 |
+
- task:
|
8877 |
type: PairClassification
|
8878 |
+
dataset:
|
|
|
8879 |
name: MTEB TwitterURLCorpus (default)
|
|
|
|
|
8880 |
type: mteb/twitterurlcorpus-pairclassification
|
8881 |
+
config: default
|
8882 |
+
split: test
|
8883 |
+
revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf
|
8884 |
metrics:
|
8885 |
- type: cosine_accuracy
|
8886 |
value: 85.89474909768309
|
|
|
8964 |
value: 69.57530076822728
|
8965 |
- type: similarity_recall
|
8966 |
value: 73.91438250692947
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8967 |
---
|
8968 |
|
8969 |
# potion-base-2M Model Card
|