|
model,dataset,metric,value
|
|
all-MiniLM-L6-v2,AmazonCounterfactualClassification,accuracy,0.641492537313433
|
|
all-MiniLM-L6-v2,AmazonPolarityClassification,accuracy,0.62582975
|
|
all-MiniLM-L6-v2,AmazonReviewsClassification,accuracy,0.31786
|
|
all-MiniLM-L6-v2,Banking77Classification,accuracy,0.7975000000000001
|
|
all-MiniLM-L6-v2,EmotionClassification,accuracy,0.3843
|
|
all-MiniLM-L6-v2,ImdbClassification,accuracy,0.606612
|
|
all-MiniLM-L6-v2,MassiveIntentClassification,accuracy,0.6740416946872899
|
|
all-MiniLM-L6-v2,MassiveScenarioClassification,accuracy,0.7575655682582381
|
|
all-MiniLM-L6-v2,MTOPDomainClassification,accuracy,0.9156178750569997
|
|
all-MiniLM-L6-v2,MTOPIntentClassification,accuracy,0.6218194254445966
|
|
all-MiniLM-L6-v2,ToxicConversationsClassification,accuracy,0.669918
|
|
all-MiniLM-L6-v2,TweetSentimentExtractionClassification,accuracy,0.554102999434069
|
|
all-MiniLM-L6-v2,ArxivClusteringP2P,v_measure,0.46545950790501556
|
|
all-MiniLM-L6-v2,ArxivClusteringS2S,v_measure,0.3785709823840442
|
|
all-MiniLM-L6-v2,BiorxivClusteringP2P,v_measure,0.38483019141351227
|
|
all-MiniLM-L6-v2,BiorxivClusteringS2S,v_measure,0.33170209943399803
|
|
all-MiniLM-L6-v2,MedrxivClusteringP2P,v_measure,0.3440873490143895
|
|
all-MiniLM-L6-v2,MedrxivClusteringS2S,v_measure,0.3229220750053091
|
|
all-MiniLM-L6-v2,RedditClustering,v_measure,0.5066969274980475
|
|
all-MiniLM-L6-v2,RedditClusteringP2P,v_measure,0.8150529020581544
|
|
all-MiniLM-L6-v2,StackExchangeClustering,v_measure,0.5336142166203601
|
|
all-MiniLM-L6-v2,StackExchangeClusteringP2P,v_measure,0.3800182562780098
|
|
all-MiniLM-L6-v2,TwentyNewsgroupsClustering,v_measure,0.4686027142764777
|
|
all-MiniLM-L6-v2,SprintDuplicateQuestions,ap,0.9455063045792446
|
|
all-MiniLM-L6-v2,TwitterSemEval2015,ap,0.6785802440228593
|
|
all-MiniLM-L6-v2,TwitterURLCorpus,ap,0.847016595145111
|
|
all-MiniLM-L6-v2,AskUbuntuDupQuestions,map,0.634768168123733
|
|
all-MiniLM-L6-v2,MindSmallReranking,map,0.30798042020200267
|
|
all-MiniLM-L6-v2,SciDocsRR,map,0.8711941318470207
|
|
all-MiniLM-L6-v2,StackOverflowDupQuestions,map,0.5076213438431608
|
|
all-MiniLM-L6-v2,ArguAna,ndcg_at_10,0.50167
|
|
all-MiniLM-L6-v2,ClimateFEVER,ndcg_at_10,0.20272
|
|
all-MiniLM-L6-v2,CQADupstackRetrieval,ndcg_at_10,0.4132475
|
|
all-MiniLM-L6-v2,DBPedia,ndcg_at_10,0.3233
|
|
all-MiniLM-L6-v2,FEVER,ndcg_at_10,0.51934
|
|
all-MiniLM-L6-v2,FiQA2018,ndcg_at_10,0.36867
|
|
all-MiniLM-L6-v2,HotpotQA,ndcg_at_10,0.46513
|
|
all-MiniLM-L6-v2,MSMARCO,ndcg_at_10,0.63685
|
|
all-MiniLM-L6-v2,NFCorpus,ndcg_at_10,0.31594
|
|
all-MiniLM-L6-v2,NQ,ndcg_at_10,0.43869
|
|
all-MiniLM-L6-v2,QuoraRetrieval,ndcg_at_10,0.87556
|
|
all-MiniLM-L6-v2,SCIDOCS,ndcg_at_10,0.21641
|
|
all-MiniLM-L6-v2,SciFact,ndcg_at_10,0.64508
|
|
all-MiniLM-L6-v2,Touche2020,ndcg_at_10,0.16904
|
|
all-MiniLM-L6-v2,TRECCOVID,ndcg_at_10,0.47246
|
|
all-MiniLM-L6-v2,BIOSSES,cosine_spearman,0.8164041444909368
|
|
all-MiniLM-L6-v2,SICK-R,cosine_spearman,0.7758245130495687
|
|
all-MiniLM-L6-v2,STS12,cosine_spearman,0.7236900735029991
|
|
all-MiniLM-L6-v2,STS13,cosine_spearman,0.8060316722220763
|
|
all-MiniLM-L6-v2,STS14,cosine_spearman,0.7558912800301661
|
|
all-MiniLM-L6-v2,STS15,cosine_spearman,0.8538966051883823
|
|
all-MiniLM-L6-v2,STS16,cosine_spearman,0.7898945343973262
|
|
all-MiniLM-L6-v2,STS17,cosine_spearman,0.8758779089494525
|
|
all-MiniLM-L6-v2,STS22,cosine_spearman,0.6721465212910986
|
|
all-MiniLM-L6-v2,STSBenchmark,cosine_spearman,0.8203246731235654
|
|
all-MiniLM-L6-v2,SummEval,cosine_spearman,0.3080875767124449
|
|
|