Muennighoff's picture
Updated results for Polish tasks (#22)
6f085ed
raw history blame
No virus
1.86 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Quora-PL",
"mteb_version": "1.1.1",
"test": {
"evaluation_time": 275.24,
"map_at_1": 0.54755,
"map_at_10": 0.66975,
"map_at_100": 0.67879,
"map_at_1000": 0.67925,
"map_at_3": 0.63898,
"map_at_5": 0.65717,
"mrr_at_1": 0.6311,
"mrr_at_10": 0.71035,
"mrr_at_100": 0.71422,
"mrr_at_1000": 0.71437,
"mrr_at_3": 0.69215,
"mrr_at_5": 0.70309,
"ndcg_at_1": 0.6309,
"ndcg_at_10": 0.71953,
"ndcg_at_100": 0.74821,
"ndcg_at_1000": 0.75514,
"ndcg_at_3": 0.67721,
"ndcg_at_5": 0.69828,
"precision_at_1": 0.6309,
"precision_at_10": 0.11165,
"precision_at_100": 0.01399,
"precision_at_1000": 0.00151,
"precision_at_3": 0.29617,
"precision_at_5": 0.1982,
"recall_at_1": 0.54755,
"recall_at_10": 0.82218,
"recall_at_100": 0.93648,
"recall_at_1000": 0.98125,
"recall_at_3": 0.70367,
"recall_at_5": 0.76007
},
"validation": {
"evaluation_time": 161.13,
"map_at_1": 0.54782,
"map_at_10": 0.67188,
"map_at_100": 0.68099,
"map_at_1000": 0.6814,
"map_at_3": 0.6424,
"map_at_5": 0.66002,
"mrr_at_1": 0.6312,
"mrr_at_10": 0.71235,
"mrr_at_100": 0.71699,
"mrr_at_1000": 0.71714,
"mrr_at_3": 0.6965,
"mrr_at_5": 0.70638,
"ndcg_at_1": 0.6314,
"ndcg_at_10": 0.72011,
"ndcg_at_100": 0.7508,
"ndcg_at_1000": 0.75759,
"ndcg_at_3": 0.68235,
"ndcg_at_5": 0.70153,
"precision_at_1": 0.6314,
"precision_at_10": 0.11094,
"precision_at_100": 0.0137,
"precision_at_1000": 0.00148,
"precision_at_3": 0.29907,
"precision_at_5": 0.1994,
"recall_at_1": 0.54782,
"recall_at_10": 0.81918,
"recall_at_100": 0.94108,
"recall_at_1000": 0.98545,
"recall_at_3": 0.71277,
"recall_at_5": 0.76534
}
}