results / results /LLM2Vec-Meta-Llama-3-unsupervised /CQADupstackGamingRetrieval.json
Muennighoff's picture
LLM2Vec Meta-Llama-3 Results (#53)
2655e97 verified
{
"dataset_revision": null,
"mteb_dataset_name": "CQADupstackGamingRetrieval",
"mteb_version": "1.0.1.dev0",
"test": {
"evaluation_time": 292.59,
"map_at_1": 0.31863,
"map_at_10": 0.43733,
"map_at_100": 0.45005,
"map_at_1000": 0.45074,
"map_at_3": 0.40593,
"map_at_5": 0.42272,
"mrr_at_1": 0.37555,
"mrr_at_10": 0.47533,
"mrr_at_100": 0.48432,
"mrr_at_1000": 0.4847,
"mrr_at_3": 0.44901,
"mrr_at_5": 0.46274,
"ndcg_at_1": 0.37555,
"ndcg_at_10": 0.49789,
"ndcg_at_100": 0.5506,
"ndcg_at_1000": 0.56434,
"ndcg_at_3": 0.44238,
"ndcg_at_5": 0.46698,
"precision_at_1": 0.37555,
"precision_at_10": 0.08257,
"precision_at_100": 0.01189,
"precision_at_1000": 0.00136,
"precision_at_3": 0.2023,
"precision_at_5": 0.13868,
"recall_at_1": 0.31863,
"recall_at_10": 0.64188,
"recall_at_100": 0.87026,
"recall_at_1000": 0.96761,
"recall_at_3": 0.48986,
"recall_at_5": 0.55177
}
}