results / results /all-MiniLM-L12-v2 /AlloprofRetrieval.json
Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
raw history blame
No virus
1.03 kB
{
"dataset_revision": "392ba3f5bcc8c51f578786c1fc3dae648662cb9b",
"mteb_dataset_name": "AlloprofRetrieval",
"mteb_version": "1.1.2.dev0",
"test": {
"evaluation_time": 12.79,
"map_at_1": 0.20035,
"map_at_10": 0.28604,
"map_at_100": 0.29545,
"map_at_1000": 0.29629,
"map_at_3": 0.26137,
"map_at_5": 0.27566,
"mrr_at_1": 0.20078,
"mrr_at_10": 0.28633,
"mrr_at_100": 0.29574,
"mrr_at_1000": 0.29658,
"mrr_at_3": 0.26166,
"mrr_at_5": 0.27595,
"ndcg_at_1": 0.20035,
"ndcg_at_10": 0.33196,
"ndcg_at_100": 0.38089,
"ndcg_at_1000": 0.40734,
"ndcg_at_3": 0.28129,
"ndcg_at_5": 0.30706,
"precision_at_1": 0.20035,
"precision_at_10": 0.0478,
"precision_at_100": 0.00714,
"precision_at_1000": 0.00093,
"precision_at_3": 0.11298,
"precision_at_5": 0.08031,
"recall_at_1": 0.20035,
"recall_at_10": 0.47798,
"recall_at_100": 0.71416,
"recall_at_1000": 0.93135,
"recall_at_3": 0.33895,
"recall_at_5": 0.40155
}
}