Sentence Similarity
sentence-transformers
PyTorch
English
bert
feature-extraction
mteb
custom_code
Eval Results
6 papers
lodestone-base-4096-v1 / mteb_results /CQADupstackPhysicsRetrieval.json
dylanAtHum's picture
Add CQADupstack Benchmarks
c25c4c3 unverified
{
"dataset_revision": null,
"mteb_dataset_name": "CQADupstackPhysicsRetrieval",
"mteb_version": "1.1.0",
"test": {
"evaluation_time": 181.93,
"map_at_1": 0.22764,
"map_at_10": 0.31744,
"map_at_100": 0.33037,
"map_at_1000": 0.33156,
"map_at_3": 0.29015,
"map_at_5": 0.30434,
"mrr_at_1": 0.28296,
"mrr_at_10": 0.3703,
"mrr_at_100": 0.37902,
"mrr_at_1000": 0.37966,
"mrr_at_3": 0.34568,
"mrr_at_5": 0.35786,
"ndcg_at_1": 0.28296,
"ndcg_at_10": 0.37289,
"ndcg_at_100": 0.42787,
"ndcg_at_1000": 0.45382,
"ndcg_at_3": 0.32598,
"ndcg_at_5": 0.34521,
"precision_at_1": 0.28296,
"precision_at_10": 0.06901,
"precision_at_100": 0.01135,
"precision_at_1000": 0.00152,
"precision_at_3": 0.15367,
"precision_at_5": 0.1103,
"recall_at_1": 0.22764,
"recall_at_10": 0.48807,
"recall_at_100": 0.71859,
"recall_at_1000": 0.89606,
"recall_at_3": 0.35594,
"recall_at_5": 0.40541
}
}