Muennighoff's picture
Add MTEB eval
aa91968
raw
history blame
935 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 254.16,
"map_at_1": 0.2596,
"map_at_10": 0.41619,
"map_at_100": 0.42673,
"map_at_1000": 0.42684,
"map_at_3": 0.36569,
"map_at_5": 0.39397,
"mrr_at_1": 0.26316,
"mrr_at_10": 0.41772,
"mrr_at_100": 0.4282,
"mrr_at_1000": 0.4283,
"mrr_at_3": 0.36724,
"mrr_at_5": 0.39529,
"ndcg_at_1": 0.2596,
"ndcg_at_10": 0.50491,
"ndcg_at_100": 0.54865,
"ndcg_at_1000": 0.55107,
"ndcg_at_3": 0.40053,
"ndcg_at_5": 0.45134,
"precision_at_1": 0.2596,
"precision_at_10": 0.07895,
"precision_at_100": 0.00978,
"precision_at_1000": 0.001,
"precision_at_3": 0.16714,
"precision_at_5": 0.12489,
"recall_at_1": 0.2596,
"recall_at_10": 0.78947,
"recall_at_100": 0.97795,
"recall_at_1000": 0.99644,
"recall_at_3": 0.50142,
"recall_at_5": 0.62447
}
}