SGPT-1.3B-weightedmean-msmarco-specb-bitfit / evaluation /mteb /CQADupstackMathematicaRetrieval.json
Muennighoff's picture
Add MTEB evaluation
5d4e702
raw
history blame contribute delete
935 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 686.89,
"map_at_1": 0.1124,
"map_at_10": 0.18503,
"map_at_100": 0.19553,
"map_at_1000": 0.1969,
"map_at_3": 0.1615,
"map_at_5": 0.17254,
"mrr_at_1": 0.13806,
"mrr_at_10": 0.21939,
"mrr_at_100": 0.22827,
"mrr_at_1000": 0.22911,
"mrr_at_3": 0.1932,
"mrr_at_5": 0.20558,
"ndcg_at_1": 0.13806,
"ndcg_at_10": 0.23383,
"ndcg_at_100": 0.28834,
"ndcg_at_1000": 0.32175,
"ndcg_at_3": 0.18652,
"ndcg_at_5": 0.20505,
"precision_at_1": 0.13806,
"precision_at_10": 0.04714,
"precision_at_100": 0.00864,
"precision_at_1000": 0.0013,
"precision_at_3": 0.09328,
"precision_at_5": 0.06841,
"recall_at_1": 0.1124,
"recall_at_10": 0.34854,
"recall_at_100": 0.59503,
"recall_at_1000": 0.8325,
"recall_at_3": 0.2202,
"recall_at_5": 0.26715
}
}