Muennighoff's picture
Add MTEB eval
aa91968
raw
history blame
936 Bytes
{
"dataset_version": null,
"mteb_version": "0.0.2",
"test": {
"evaluation_time": 168.77,
"map_at_1": 0.05604,
"map_at_10": 0.1238,
"map_at_100": 0.15791,
"map_at_1000": 0.17327,
"map_at_3": 0.0915,
"map_at_5": 0.10599,
"mrr_at_1": 0.45201,
"mrr_at_10": 0.53374,
"mrr_at_100": 0.54089,
"mrr_at_1000": 0.54123,
"mrr_at_3": 0.51445,
"mrr_at_5": 0.5259,
"ndcg_at_1": 0.42879,
"ndcg_at_10": 0.33891,
"ndcg_at_100": 0.31392,
"ndcg_at_1000": 0.4036,
"ndcg_at_3": 0.39076,
"ndcg_at_5": 0.37047,
"precision_at_1": 0.44582,
"precision_at_10": 0.25294,
"precision_at_100": 0.08285,
"precision_at_1000": 0.02148,
"precision_at_3": 0.3612,
"precision_at_5": 0.3195,
"recall_at_1": 0.05604,
"recall_at_10": 0.16239,
"recall_at_100": 0.3216,
"recall_at_1000": 0.64513,
"recall_at_3": 0.10406,
"recall_at_5": 0.12685
}
}