results / SGPT-5.8B-weightedmean-msmarco-specb-bitfit_results.csv
nouamanetazi's picture
nouamanetazi HF staff
add model column
b475468
raw history blame
No virus
4.89 kB
model,dataset,metric,value
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,AmazonCounterfactualClassification,accuracy,0.6922388059701492
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,AmazonPolarityClassification,accuracy,0.7126109999999999
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,AmazonReviewsClassification,accuracy,0.39191999999999994
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,Banking77Classification,accuracy,0.8449350649350649
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,EmotionClassification,accuracy,0.49655000000000005
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,ImdbClassification,accuracy,0.666376
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,MassiveIntentClassification,accuracy,0.703866845998655
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,MassiveScenarioClassification,accuracy,0.7627774041694687
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,MTOPDomainClassification,accuracy,0.9346557227542178
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,MTOPIntentClassification,accuracy,0.7242134062927497
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,ToxicConversationsClassification,accuracy,0.67709
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,TweetSentimentExtractionClassification,accuracy,0.5685059422750425
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,ArxivClusteringP2P,v_measure,0.4559037428592033
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,ArxivClusteringS2S,v_measure,0.3886371701986363
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,BiorxivClusteringP2P,v_measure,0.3655145972298938
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,BiorxivClusteringS2S,v_measure,0.3369901851846774
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,MedrxivClusteringP2P,v_measure,0.3151174592577334
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,MedrxivClusteringS2S,v_measure,0.28764235987575365
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,RedditClustering,v_measure,0.4045148482612238
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,RedditClusteringP2P,v_measure,0.5574949067303913
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,StackExchangeClustering,v_measure,0.5920812266121527
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,StackExchangeClusteringP2P,v_measure,0.33954248554638056
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,TwentyNewsgroupsClustering,v_measure,0.3946372298609047
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,SprintDuplicateQuestions,ap,0.9384024096781063
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,TwitterSemEval2015,ap,0.6686677647503386
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,TwitterURLCorpus,ap,0.8528960532524223
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,AskUbuntuDupQuestions,map,0.6162556869142777
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,MindSmallReranking,map,
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,SciDocsRR,map,0.8078825425914723
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,StackOverflowDupQuestions,map,0.5152800990025549
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,ArguAna,ndcg_at_10,0.52203
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,ClimateFEVER,ndcg_at_10,
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,CQADupstackRetrieval,ndcg_at_10,0.3940316666666666
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,DBPedia,ndcg_at_10,
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,FEVER,ndcg_at_10,
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,FiQA2018,ndcg_at_10,0.37463
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,HotpotQA,ndcg_at_10,
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,MSMARCO,ndcg_at_10,
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,NFCorpus,ndcg_at_10,0.36161
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,NQ,ndcg_at_10,
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,QuoraRetrieval,ndcg_at_10,0.87899
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,SCIDOCS,ndcg_at_10,0.19899
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,SciFact,ndcg_at_10,0.74485
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,Touche2020,ndcg_at_10,
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,TRECCOVID,ndcg_at_10,0.77592
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,BIOSSES,cosine_spearman,0.862501580394454
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,SICK-R,cosine_spearman,0.862501580394454
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,STS12,cosine_spearman,0.675020551515597
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,STS13,cosine_spearman,0.7916333501772059
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,STS14,cosine_spearman,0.7446048326701329
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,STS15,cosine_spearman,0.8447194637929274
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,STS16,cosine_spearman,0.8096089203722137
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,STS17,cosine_spearman,0.8778202647220289
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,STS22,cosine_spearman,0.6934889515492327
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,STSBenchmark,cosine_spearman,0.8138977341532744
SGPT-5.8B-weightedmean-msmarco-specb-bitfit,SummEval,cosine_spearman,0.24747448399760644