results / results /SGPT-5.8B-weightedmean-msmarco-specb-bitfit /CQADupstackMathematicaRetrieval.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
1.17 kB
{
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 2931.62,
"map_at_1": 0.14984,
"map_at_10": 0.23457,
"map_at_100": 0.24723,
"map_at_1000": 0.24846,
"map_at_3": 0.20873,
"map_at_5": 0.22357,
"mrr_at_1": 0.18159,
"mrr_at_10": 0.27431,
"mrr_at_100": 0.28449,
"mrr_at_1000": 0.2852,
"mrr_at_3": 0.24979,
"mrr_at_5": 0.26447,
"ndcg_at_1": 0.18159,
"ndcg_at_10": 0.28628,
"ndcg_at_100": 0.34741,
"ndcg_at_1000": 0.37516,
"ndcg_at_3": 0.23902,
"ndcg_at_5": 0.26294,
"precision_at_1": 0.18159,
"precision_at_10": 0.05485,
"precision_at_100": 0.00985,
"precision_at_1000": 0.00136,
"precision_at_3": 0.11774,
"precision_at_5": 0.08731,
"recall_at_1": 0.14984,
"recall_at_10": 0.40198,
"recall_at_100": 0.67115,
"recall_at_1000": 0.86497,
"recall_at_3": 0.27639,
"recall_at_5": 0.33595
},
"mteb_dataset_name": "CQADupstackMathematicaRetrieval",
"dataset_revision": "2b9f5791698b5be7bc5e10535c8690f20043c3db"
}