results / results /bert-base-25lang-cased /AlloprofRetrieval.json
Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
raw history blame
No virus
1.03 kB
{
"dataset_revision": "392ba3f5bcc8c51f578786c1fc3dae648662cb9b",
"mteb_dataset_name": "AlloprofRetrieval",
"mteb_version": "1.1.2.dev0",
"test": {
"evaluation_time": 36.47,
"map_at_1": 0.00691,
"map_at_10": 0.01172,
"map_at_100": 0.01623,
"map_at_1000": 0.01849,
"map_at_3": 0.00871,
"map_at_5": 0.01002,
"mrr_at_1": 0.00691,
"mrr_at_10": 0.01172,
"mrr_at_100": 0.01623,
"mrr_at_1000": 0.01849,
"mrr_at_3": 0.00871,
"mrr_at_5": 0.01002,
"ndcg_at_1": 0.00691,
"ndcg_at_10": 0.01604,
"ndcg_at_100": 0.0476,
"ndcg_at_1000": 0.12397,
"ndcg_at_3": 0.00945,
"ndcg_at_5": 0.01189,
"precision_at_1": 0.00691,
"precision_at_10": 0.00307,
"precision_at_100": 0.00202,
"precision_at_1000": 0.00084,
"precision_at_3": 0.00389,
"precision_at_5": 0.00354,
"recall_at_1": 0.00691,
"recall_at_10": 0.03066,
"recall_at_100": 0.20207,
"recall_at_1000": 0.83722,
"recall_at_3": 0.01166,
"recall_at_5": 0.0177
}
}