Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
{
"dataset_revision": "c99d599f0a6ab9b85b065da6f9d94f9cf731679f",
"mteb_dataset_name": "XPQARetrieval",
"mteb_version": "1.1.3.dev0",
"test": {
"evaluation_time": 47.2,
"fr": {
"map_at_1": 0.27361,
"map_at_10": 0.39755,
"map_at_100": 0.41354,
"map_at_1000": 0.41522,
"map_at_3": 0.3637,
"map_at_5": 0.38421,
"mrr_at_1": 0.4259,
"mrr_at_10": 0.48981,
"mrr_at_100": 0.49937,
"mrr_at_1000": 0.49993,
"mrr_at_3": 0.4713,
"mrr_at_5": 0.48158,
"ndcg_at_1": 0.4259,
"ndcg_at_10": 0.45227,
"ndcg_at_100": 0.51813,
"ndcg_at_1000": 0.55056,
"ndcg_at_3": 0.41705,
"ndcg_at_5": 0.42712,
"precision_at_1": 0.4259,
"precision_at_10": 0.10107,
"precision_at_100": 0.01571,
"precision_at_1000": 0.00205,
"precision_at_3": 0.24611,
"precision_at_5": 0.1725,
"recall_at_1": 0.27361,
"recall_at_10": 0.51578,
"recall_at_100": 0.78053,
"recall_at_1000": 0.9911,
"recall_at_3": 0.40296,
"recall_at_5": 0.45503
}
}
}