Muennighoff's picture
Add MTEB eval
aa91968
raw
history blame
942 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 30738.76,
"map_at_1": 0.10367,
"map_at_10": 0.18954,
"map_at_100": 0.20675,
"map_at_1000": 0.20868,
"map_at_3": 0.15486,
"map_at_5": 0.17347,
"mrr_at_1": 0.23257,
"mrr_at_10": 0.35419,
"mrr_at_100": 0.36361,
"mrr_at_1000": 0.36403,
"mrr_at_3": 0.31748,
"mrr_at_5": 0.34077,
"ndcg_at_1": 0.23257,
"ndcg_at_10": 0.2711,
"ndcg_at_100": 0.33981,
"ndcg_at_1000": 0.37444,
"ndcg_at_3": 0.21472,
"ndcg_at_5": 0.23769,
"precision_at_1": 0.23257,
"precision_at_10": 0.08704,
"precision_at_100": 0.01606,
"precision_at_1000": 0.00225,
"precision_at_3": 0.16287,
"precision_at_5": 0.13068,
"recall_at_1": 0.10367,
"recall_at_10": 0.33706,
"recall_at_100": 0.57375,
"recall_at_1000": 0.7679,
"recall_at_3": 0.2018,
"recall_at_5": 0.26215
}
}