Muennighoff's picture
Updated results for Polish tasks (#22)
6f085ed
raw history blame
No virus
1.87 kB
{
"dataset_revision": null,
"mteb_dataset_name": "MSMARCO-PL",
"mteb_version": "1.1.1",
"validation": {
"evaluation_time": 11672.11,
"map_at_1": 0.01934,
"map_at_10": 0.03529,
"map_at_100": 0.03962,
"map_at_1000": 0.04036,
"map_at_3": 0.02918,
"map_at_5": 0.03248,
"mrr_at_1": 0.01963,
"mrr_at_10": 0.03596,
"mrr_at_100": 0.04035,
"mrr_at_1000": 0.04109,
"mrr_at_3": 0.02968,
"mrr_at_5": 0.03306,
"ndcg_at_1": 0.01948,
"ndcg_at_10": 0.04565,
"ndcg_at_100": 0.07165,
"ndcg_at_1000": 0.09613,
"ndcg_at_3": 0.03274,
"ndcg_at_5": 0.03868,
"precision_at_1": 0.01948,
"precision_at_10": 0.00807,
"precision_at_100": 0.00219,
"precision_at_1000": 0.00043,
"precision_at_3": 0.01452,
"precision_at_5": 0.01163,
"recall_at_1": 0.01934,
"recall_at_10": 0.07851,
"recall_at_100": 0.21026,
"recall_at_1000": 0.41104,
"recall_at_3": 0.0426,
"recall_at_5": 0.05683
},
"test": {
"evaluation_time": 6923.69,
"map_at_1": 0.00415,
"map_at_10": 0.01431,
"map_at_100": 0.0285,
"map_at_1000": 0.04222,
"map_at_3": 0.0092,
"map_at_5": 0.01143,
"mrr_at_1": 0.13953,
"mrr_at_10": 0.2188,
"mrr_at_100": 0.22549,
"mrr_at_1000": 0.22672,
"mrr_at_3": 0.20543,
"mrr_at_5": 0.21589,
"ndcg_at_1": 0.0969,
"ndcg_at_10": 0.09134,
"ndcg_at_100": 0.09299,
"ndcg_at_1000": 0.15831,
"ndcg_at_3": 0.10353,
"ndcg_at_5": 0.09863,
"precision_at_1": 0.13953,
"precision_at_10": 0.1093,
"precision_at_100": 0.06302,
"precision_at_1000": 0.02191,
"precision_at_3": 0.13178,
"precision_at_5": 0.13023,
"recall_at_1": 0.00415,
"recall_at_10": 0.0191,
"recall_at_100": 0.0766,
"recall_at_1000": 0.23672,
"recall_at_3": 0.01051,
"recall_at_5": 0.014
}
}