results / contriever-base-msmarco_results.csv
nouamanetazi's picture
nouamanetazi HF staff
add model column
b475468
raw history blame
No virus
3.84 kB
model,dataset,metric,value
contriever-base-msmarco,AmazonCounterfactualClassification,accuracy,0.7219402985074627
contriever-base-msmarco,AmazonPolarityClassification,accuracy,0.6863262499999999
contriever-base-msmarco,AmazonReviewsClassification,accuracy,0.37422
contriever-base-msmarco,Banking77Classification,accuracy,0.8001623376623377
contriever-base-msmarco,EmotionClassification,accuracy,0.44770000000000004
contriever-base-msmarco,ImdbClassification,accuracy,0.670388
contriever-base-msmarco,MassiveIntentClassification,accuracy,0.6777740416946872
contriever-base-msmarco,MassiveScenarioClassification,accuracy,0.7599865501008743
contriever-base-msmarco,MTOPDomainClassification,accuracy,0.9317829457364342
contriever-base-msmarco,MTOPIntentClassification,accuracy,0.6931372549019608
contriever-base-msmarco,ToxicConversationsClassification,accuracy,0.6776540000000001
contriever-base-msmarco,TweetSentimentExtractionClassification,accuracy,0.5610356536502547
contriever-base-msmarco,ArxivClusteringP2P,v_measure,0.42609650289899825
contriever-base-msmarco,ArxivClusteringS2S,v_measure,0.32317533856057756
contriever-base-msmarco,BiorxivClusteringP2P,v_measure,0.34974621074365847
contriever-base-msmarco,BiorxivClusteringS2S,v_measure,0.29083876572182377
contriever-base-msmarco,MedrxivClusteringP2P,v_measure,0.3118675108904548
contriever-base-msmarco,MedrxivClusteringS2S,v_measure,0.2727180592101203
contriever-base-msmarco,RedditClustering,v_measure,0.5489383973987073
contriever-base-msmarco,RedditClusteringP2P,v_measure,0.5757965400787036
contriever-base-msmarco,StackExchangeClustering,v_measure,0.6314901527677189
contriever-base-msmarco,StackExchangeClusteringP2P,v_measure,0.3225478381926452
contriever-base-msmarco,TwentyNewsgroupsClustering,v_measure,0.46822120585974714
contriever-base-msmarco,SprintDuplicateQuestions,ap,0.9554507015913917
contriever-base-msmarco,TwitterSemEval2015,ap,0.6684972822513366
contriever-base-msmarco,TwitterURLCorpus,ap,0.8520869965638479
contriever-base-msmarco,AskUbuntuDupQuestions,map,0.5668809109301288
contriever-base-msmarco,MindSmallReranking,map,0.3157849515126634
contriever-base-msmarco,SciDocsRR,map,0.7650730255518788
contriever-base-msmarco,StackOverflowDupQuestions,map,0.4778138013068855
contriever-base-msmarco,ArguAna,ndcg_at_10,0.4832
contriever-base-msmarco,ClimateFEVER,ndcg_at_10,0.24788
contriever-base-msmarco,CQADupstackRetrieval,ndcg_at_10,0.3366891666666667
contriever-base-msmarco,DBPedia,ndcg_at_10,0.38103
contriever-base-msmarco,FEVER,ndcg_at_10,0.59294
contriever-base-msmarco,FiQA2018,ndcg_at_10,0.2742
contriever-base-msmarco,HotpotQA,ndcg_at_10,0.56808
contriever-base-msmarco,MSMARCO,ndcg_at_10,0.62494
contriever-base-msmarco,NFCorpus,ndcg_at_10,0.31315
contriever-base-msmarco,NQ,ndcg_at_10,0.41833
contriever-base-msmarco,QuoraRetrieval,ndcg_at_10,0.86719
contriever-base-msmarco,SCIDOCS,ndcg_at_10,0.17125
contriever-base-msmarco,SciFact,ndcg_at_10,0.6551
contriever-base-msmarco,Touche2020,ndcg_at_10,0.15789
contriever-base-msmarco,TRECCOVID,ndcg_at_10,0.44769
contriever-base-msmarco,BIOSSES,cosine_spearman,0.8331816352111794
contriever-base-msmarco,SICK-R,cosine_spearman,0.702003740199912
contriever-base-msmarco,STS12,cosine_spearman,0.643399816144592
contriever-base-msmarco,STS13,cosine_spearman,0.8003351945178154
contriever-base-msmarco,STS14,cosine_spearman,0.7450891052510561
contriever-base-msmarco,STS15,cosine_spearman,0.8329896673390621
contriever-base-msmarco,STS16,cosine_spearman,0.7967145670824619
contriever-base-msmarco,STS17,cosine_spearman,0.863170912160602
contriever-base-msmarco,STS22,cosine_spearman,0.6463838183557634
contriever-base-msmarco,STSBenchmark,cosine_spearman,0.7880678352766406
contriever-base-msmarco,SummEval,cosine_spearman,0.30361250428863945