results / results /e5-mistral-7b-instruct /AlloprofRetrieval.json
Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
raw history blame
No virus
1.03 kB
{
"dataset_revision": "392ba3f5bcc8c51f578786c1fc3dae648662cb9b",
"mteb_dataset_name": "AlloprofRetrieval",
"mteb_version": "1.1.2.dev0",
"test": {
"evaluation_time": 4147.23,
"map_at_1": 0.09283,
"map_at_10": 0.13739,
"map_at_100": 0.14513,
"map_at_1000": 0.14635,
"map_at_3": 0.12147,
"map_at_5": 0.12996,
"mrr_at_1": 0.09283,
"mrr_at_10": 0.13739,
"mrr_at_100": 0.14513,
"mrr_at_1000": 0.14635,
"mrr_at_3": 0.12147,
"mrr_at_5": 0.12996,
"ndcg_at_1": 0.09283,
"ndcg_at_10": 0.16464,
"ndcg_at_100": 0.20786,
"ndcg_at_1000": 0.24711,
"ndcg_at_3": 0.13117,
"ndcg_at_5": 0.14655,
"precision_at_1": 0.09283,
"precision_at_10": 0.0253,
"precision_at_100": 0.00468,
"precision_at_1000": 0.00079,
"precision_at_3": 0.05311,
"precision_at_5": 0.03938,
"recall_at_1": 0.09283,
"recall_at_10": 0.25302,
"recall_at_100": 0.46848,
"recall_at_1000": 0.79188,
"recall_at_3": 0.15933,
"recall_at_5": 0.19689
}
}