Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
{
"dataset_revision": "c99d599f0a6ab9b85b065da6f9d94f9cf731679f",
"mteb_dataset_name": "XPQARetrieval",
"mteb_version": "1.1.3.dev0",
"test": {
"evaluation_time": 106.11,
"fr": {
"map_at_1": 0.24152,
"map_at_10": 0.36048,
"map_at_100": 0.37348,
"map_at_1000": 0.37509,
"map_at_3": 0.32388,
"map_at_5": 0.34646,
"mrr_at_1": 0.40721,
"mrr_at_10": 0.4599,
"mrr_at_100": 0.46691,
"mrr_at_1000": 0.46761,
"mrr_at_3": 0.44459,
"mrr_at_5": 0.4522,
"ndcg_at_1": 0.40721,
"ndcg_at_10": 0.41288,
"ndcg_at_100": 0.46455,
"ndcg_at_1000": 0.50036,
"ndcg_at_3": 0.38075,
"ndcg_at_5": 0.38843,
"precision_at_1": 0.40721,
"precision_at_10": 0.0976,
"precision_at_100": 0.0143,
"precision_at_1000": 0.00193,
"precision_at_3": 0.23409,
"precision_at_5": 0.16742,
"recall_at_1": 0.24152,
"recall_at_10": 0.4656,
"recall_at_100": 0.67036,
"recall_at_1000": 0.91544,
"recall_at_3": 0.35472,
"recall_at_5": 0.40538
}
}
}