results / results /distilbert-base-fr-cased /AlloprofRetrieval.json
Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
raw history blame
No virus
1.03 kB
{
"dataset_revision": "392ba3f5bcc8c51f578786c1fc3dae648662cb9b",
"mteb_dataset_name": "AlloprofRetrieval",
"mteb_version": "1.1.2.dev0",
"test": {
"evaluation_time": 23.04,
"map_at_1": 0.00475,
"map_at_10": 0.01021,
"map_at_100": 0.01348,
"map_at_1000": 0.01546,
"map_at_3": 0.00792,
"map_at_5": 0.00891,
"mrr_at_1": 0.00475,
"mrr_at_10": 0.01021,
"mrr_at_100": 0.01348,
"mrr_at_1000": 0.01546,
"mrr_at_3": 0.00792,
"mrr_at_5": 0.00891,
"ndcg_at_1": 0.00475,
"ndcg_at_10": 0.01386,
"ndcg_at_100": 0.03667,
"ndcg_at_1000": 0.11065,
"ndcg_at_3": 0.00888,
"ndcg_at_5": 0.01067,
"precision_at_1": 0.00475,
"precision_at_10": 0.00259,
"precision_at_100": 0.0015,
"precision_at_1000": 0.00078,
"precision_at_3": 0.00389,
"precision_at_5": 0.0032,
"recall_at_1": 0.00475,
"recall_at_10": 0.02591,
"recall_at_100": 0.14983,
"recall_at_1000": 0.77936,
"recall_at_3": 0.01166,
"recall_at_5": 0.01598
}
}