results / results /camembert-base /XPQARetrieval.json
Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
{
"dataset_revision": "c99d599f0a6ab9b85b065da6f9d94f9cf731679f",
"mteb_dataset_name": "XPQARetrieval",
"mteb_version": "1.1.3.dev0",
"test": {
"evaluation_time": 3.37,
"fr": {
"map_at_1": 0.10465,
"map_at_10": 0.15817,
"map_at_100": 0.16852,
"map_at_1000": 0.17067,
"map_at_3": 0.1397,
"map_at_5": 0.14979,
"mrr_at_1": 0.18158,
"mrr_at_10": 0.23793,
"mrr_at_100": 0.24574,
"mrr_at_1000": 0.24686,
"mrr_at_3": 0.22252,
"mrr_at_5": 0.23126,
"ndcg_at_1": 0.18158,
"ndcg_at_10": 0.19922,
"ndcg_at_100": 0.24954,
"ndcg_at_1000": 0.30658,
"ndcg_at_3": 0.1758,
"ndcg_at_5": 0.18213,
"precision_at_1": 0.18158,
"precision_at_10": 0.04967,
"precision_at_100": 0.00957,
"precision_at_1000": 0.00179,
"precision_at_3": 0.1117,
"precision_at_5": 0.08091,
"recall_at_1": 0.10465,
"recall_at_10": 0.2407,
"recall_at_100": 0.44709,
"recall_at_1000": 0.84379,
"recall_at_3": 0.16709,
"recall_at_5": 0.19902
}
}
}