Muennighoff's picture
Updated results for Polish tasks (#22)
6f085ed
raw history blame
No virus
1.41 kB
{
"dataset_revision": null,
"mteb_dataset_name": "PSC",
"mteb_version": "1.1.1",
"test": {
"cos_sim": {
"accuracy": 0.9406307977736549,
"accuracy_threshold": 0.3957713842391968,
"ap": 0.9634568218484734,
"f1": 0.906158357771261,
"f1_threshold": 0.3957713842391968,
"precision": 0.8728813559322034,
"recall": 0.9420731707317073
},
"dot": {
"accuracy": 0.9322820037105751,
"accuracy_threshold": 0.25806647539138794,
"ap": 0.9489335370741049,
"f1": 0.8958630527817404,
"f1_threshold": 0.2532580494880676,
"precision": 0.8418230563002681,
"recall": 0.9573170731707317
},
"euclidean": {
"accuracy": 0.9304267161410018,
"accuracy_threshold": 0.8625168204307556,
"ap": 0.9500721498710053,
"f1": 0.8851454823889741,
"f1_threshold": 0.8712700009346008,
"precision": 0.8892307692307693,
"recall": 0.8810975609756098
},
"evaluation_time": 1.85,
"manhattan": {
"accuracy": 0.9304267161410018,
"accuracy_threshold": 15.52189826965332,
"ap": 0.947601184500943,
"f1": 0.8840803709428129,
"f1_threshold": 15.52189826965332,
"precision": 0.896551724137931,
"recall": 0.8719512195121951
},
"max": {
"accuracy": 0.9406307977736549,
"ap": 0.9634568218484734,
"f1": 0.906158357771261
}
}
}