Muennighoff's picture
Add MTEB eval
aa91968
raw
history blame
943 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 2522.93,
"map_at_1": 0.22167,
"map_at_10": 0.29544,
"map_at_100": 0.30579,
"map_at_1000": 0.3067,
"map_at_3": 0.26982,
"map_at_5": 0.28474,
"mrr_at_1": 0.24068,
"mrr_at_10": 0.31237,
"mrr_at_100": 0.32222,
"mrr_at_1000": 0.32292,
"mrr_at_3": 0.28776,
"mrr_at_5": 0.30234,
"ndcg_at_1": 0.24068,
"ndcg_at_10": 0.33973,
"ndcg_at_100": 0.39135,
"ndcg_at_1000": 0.41444,
"ndcg_at_3": 0.29018,
"ndcg_at_5": 0.31559,
"precision_at_1": 0.24068,
"precision_at_10": 0.05299,
"precision_at_100": 0.00823,
"precision_at_1000": 0.00106,
"precision_at_3": 0.12166,
"precision_at_5": 0.08768,
"recall_at_1": 0.22167,
"recall_at_10": 0.46115,
"recall_at_100": 0.69867,
"recall_at_1000": 0.87234,
"recall_at_3": 0.32798,
"recall_at_5": 0.38951
}
}