Muennighoff's picture
Readd
393178e
raw history blame
No virus
1.42 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Ocnli",
"mteb_version": "1.1.0",
"validation": {
"cos_sim": {
"accuracy": 0.7417433676231727,
"accuracy_threshold": 0.6340124607086182,
"ap": 0.7793597319090767,
"f1": 0.7673772011121409,
"f1_threshold": 0.587164580821991,
"precision": 0.6837324525185797,
"recall": 0.8743400211193242
},
"dot": {
"accuracy": 0.7417433676231727,
"accuracy_threshold": 0.6340124607086182,
"ap": 0.7793597319090767,
"f1": 0.7673772011121409,
"f1_threshold": 0.5871647000312805,
"precision": 0.6837324525185797,
"recall": 0.8743400211193242
},
"euclidean": {
"accuracy": 0.7417433676231727,
"accuracy_threshold": 0.8555554747581482,
"ap": 0.7793597319090767,
"f1": 0.7673772011121409,
"f1_threshold": 0.9086642265319824,
"precision": 0.6837324525185797,
"recall": 0.8743400211193242
},
"evaluation_time": 2.11,
"manhattan": {
"accuracy": 0.7433676231727125,
"accuracy_threshold": 22.02208709716797,
"ap": 0.779552262331702,
"f1": 0.7707362534948741,
"f1_threshold": 23.073806762695312,
"precision": 0.6897414512093412,
"recall": 0.8732840549102429
},
"max": {
"accuracy": 0.7433676231727125,
"ap": 0.779552262331702,
"f1": 0.7707362534948741
}
}
}