results / results /e5-mistral-7b-instruct /SyntecRetrieval.json
Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
raw history blame
No virus
985 Bytes
{
"dataset_revision": "77f7e271bf4a92b24fce5119f3486b583ca016ff",
"mteb_dataset_name": "SyntecRetrieval",
"mteb_version": "1.1.2.dev0",
"test": {
"evaluation_time": 45.64,
"map_at_1": 0.33,
"map_at_10": 0.48236,
"map_at_100": 0.49096,
"map_at_1000": 0.49096,
"map_at_3": 0.44167,
"map_at_5": 0.46667,
"mrr_at_1": 0.33,
"mrr_at_10": 0.48236,
"mrr_at_100": 0.49096,
"mrr_at_1000": 0.49096,
"mrr_at_3": 0.44167,
"mrr_at_5": 0.46667,
"ndcg_at_1": 0.33,
"ndcg_at_10": 0.55901,
"ndcg_at_100": 0.60177,
"ndcg_at_1000": 0.60177,
"ndcg_at_3": 0.47726,
"ndcg_at_5": 0.52244,
"precision_at_1": 0.33,
"precision_at_10": 0.08,
"precision_at_100": 0.01,
"precision_at_1000": 0.001,
"precision_at_3": 0.19333,
"precision_at_5": 0.138,
"recall_at_1": 0.33,
"recall_at_10": 0.8,
"recall_at_100": 1.0,
"recall_at_1000": 1.0,
"recall_at_3": 0.58,
"recall_at_5": 0.69
}
}