Sentence Similarity
sentence-transformers
PyTorch
English
bert
feature-extraction
mteb
custom_code
Eval Results
6 papers
lodestone-base-4096-v1 / mteb_results /CQADupstackStatsRetrieval.json
dylanAtHum's picture
Add CQADupstack Benchmarks
c25c4c3 unverified
{
"dataset_revision": null,
"mteb_dataset_name": "CQADupstackStatsRetrieval",
"mteb_version": "1.1.0",
"test": {
"evaluation_time": 250.76,
"map_at_1": 0.17582,
"map_at_10": 0.22803,
"map_at_100": 0.23503,
"map_at_1000": 0.236,
"map_at_3": 0.21375,
"map_at_5": 0.22052,
"mrr_at_1": 0.20399,
"mrr_at_10": 0.2537,
"mrr_at_100": 0.26016,
"mrr_at_1000": 0.26091,
"mrr_at_3": 0.23952,
"mrr_at_5": 0.24619,
"ndcg_at_1": 0.20399,
"ndcg_at_10": 0.25964,
"ndcg_at_100": 0.29607,
"ndcg_at_1000": 0.32349,
"ndcg_at_3": 0.23177,
"ndcg_at_5": 0.24276,
"precision_at_1": 0.20399,
"precision_at_10": 0.04018,
"precision_at_100": 0.00629,
"precision_at_1000": 0.00093,
"precision_at_3": 0.09969,
"precision_at_5": 0.06748,
"recall_at_1": 0.17582,
"recall_at_10": 0.3335,
"recall_at_100": 0.50219,
"recall_at_1000": 0.71061,
"recall_at_3": 0.2562,
"recall_at_5": 0.28291
}
}