lodestone-base-4096-v1 / mteb_results /CQADupstackTexRetrieval.json
dylanAtHum's picture
Add CQADupstack Benchmarks
c25c4c3 unverified
raw
history blame
994 Bytes
{
"dataset_revision": null,
"mteb_dataset_name": "CQADupstackTexRetrieval",
"mteb_version": "1.1.0",
"test": {
"evaluation_time": 533.51,
"map_at_1": 0.11071,
"map_at_10": 0.16202,
"map_at_100": 0.17112,
"map_at_1000": 0.17238,
"map_at_3": 0.14508,
"map_at_5": 0.15441,
"mrr_at_1": 0.13833,
"mrr_at_10": 0.19235,
"mrr_at_100": 0.20109,
"mrr_at_1000": 0.20196,
"mrr_at_3": 0.17515,
"mrr_at_5": 0.18505,
"ndcg_at_1": 0.13833,
"ndcg_at_10": 0.19643,
"ndcg_at_100": 0.24298,
"ndcg_at_1000": 0.27614,
"ndcg_at_3": 0.16528,
"ndcg_at_5": 0.17991,
"precision_at_1": 0.13833,
"precision_at_10": 0.03699,
"precision_at_100": 0.00713,
"precision_at_1000": 0.00116,
"precision_at_3": 0.07903,
"precision_at_5": 0.05891,
"recall_at_1": 0.11071,
"recall_at_10": 0.27019,
"recall_at_100": 0.48404,
"recall_at_1000": 0.72641,
"recall_at_3": 0.18336,
"recall_at_5": 0.21991
}
}