Muennighoff's picture
Updated results for Polish tasks (#22)
6f085ed
raw history blame
No virus
1.35 kB
{
"dataset_revision": null,
"mteb_dataset_name": "CDSC-E",
"mteb_version": "1.1.1",
"test": {
"cos_sim": {
"accuracy": 0.88,
"accuracy_threshold": 0.9367988109588623,
"ap": 0.7182921565060718,
"f1": 0.6512702078521939,
"f1_threshold": 0.7809259295463562,
"precision": 0.5802469135802469,
"recall": 0.7421052631578947
},
"dot": {
"accuracy": 0.849,
"accuracy_threshold": 0.5912673473358154,
"ap": 0.5785734689123088,
"f1": 0.581986143187067,
"f1_threshold": 0.49015504121780396,
"precision": 0.5185185185185185,
"recall": 0.6631578947368421
},
"euclidean": {
"accuracy": 0.882,
"accuracy_threshold": 0.2713625431060791,
"ap": 0.7200106117212798,
"f1": 0.6389496717724289,
"f1_threshold": 0.5501507520675659,
"precision": 0.5468164794007491,
"recall": 0.7684210526315789
},
"evaluation_time": 0.79,
"manhattan": {
"accuracy": 0.877,
"accuracy_threshold": 4.479024887084961,
"ap": 0.7092270283235846,
"f1": 0.6382022471910113,
"f1_threshold": 9.358081817626953,
"precision": 0.5568627450980392,
"recall": 0.7473684210526316
},
"max": {
"accuracy": 0.882,
"ap": 0.7200106117212798,
"f1": 0.6512702078521939
}
}
}