results / results /all-MiniLM-L6-v2 /AlloprofRetrieval.json
Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
raw history blame
No virus
1.03 kB
{
"dataset_revision": "392ba3f5bcc8c51f578786c1fc3dae648662cb9b",
"mteb_dataset_name": "AlloprofRetrieval",
"mteb_version": "1.1.2.dev0",
"test": {
"evaluation_time": 12.87,
"map_at_1": 0.16235,
"map_at_10": 0.24027,
"map_at_100": 0.25064,
"map_at_1000": 0.25164,
"map_at_3": 0.21661,
"map_at_5": 0.22952,
"mrr_at_1": 0.16235,
"mrr_at_10": 0.24031,
"mrr_at_100": 0.25067,
"mrr_at_1000": 0.25168,
"mrr_at_3": 0.21654,
"mrr_at_5": 0.22956,
"ndcg_at_1": 0.16235,
"ndcg_at_10": 0.28413,
"ndcg_at_100": 0.33814,
"ndcg_at_1000": 0.36853,
"ndcg_at_3": 0.23459,
"ndcg_at_5": 0.25796,
"precision_at_1": 0.16235,
"precision_at_10": 0.04249,
"precision_at_100": 0.00686,
"precision_at_1000": 0.00093,
"precision_at_3": 0.09557,
"precision_at_5": 0.06874,
"recall_at_1": 0.16235,
"recall_at_10": 0.42487,
"recall_at_100": 0.6861,
"recall_at_1000": 0.93394,
"recall_at_3": 0.2867,
"recall_at_5": 0.3437
}
}