results / results /LASER2 /AlloprofRetrieval.json
Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
raw history blame
No virus
1.02 kB
{
"dataset_revision": "392ba3f5bcc8c51f578786c1fc3dae648662cb9b",
"mteb_dataset_name": "AlloprofRetrieval",
"mteb_version": "1.1.2.dev0",
"test": {
"evaluation_time": 8.94,
"map_at_1": 0.01295,
"map_at_10": 0.02362,
"map_at_100": 0.02869,
"map_at_1000": 0.03051,
"map_at_3": 0.01914,
"map_at_5": 0.02115,
"mrr_at_1": 0.01295,
"mrr_at_10": 0.02362,
"mrr_at_100": 0.02869,
"mrr_at_1000": 0.03051,
"mrr_at_3": 0.01914,
"mrr_at_5": 0.02115,
"ndcg_at_1": 0.01295,
"ndcg_at_10": 0.03097,
"ndcg_at_100": 0.0641,
"ndcg_at_1000": 0.13011,
"ndcg_at_3": 0.02131,
"ndcg_at_5": 0.0249,
"precision_at_1": 0.01295,
"precision_at_10": 0.00553,
"precision_at_100": 0.0023,
"precision_at_1000": 0.00079,
"precision_at_3": 0.00921,
"precision_at_5": 0.00725,
"recall_at_1": 0.01295,
"recall_at_10": 0.05527,
"recall_at_100": 0.23014,
"recall_at_1000": 0.788,
"recall_at_3": 0.02763,
"recall_at_5": 0.03627
}
}