qMTEB / results /all-MiniLM-L6-v2 /SprintDuplicateQuestions.json
varun4's picture
added results
fba41a4
raw
history blame
1.41 kB
{
"dataset_revision": "d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46",
"mteb_dataset_name": "SprintDuplicateQuestions",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.9978514851485148,
"accuracy_threshold": 0.71305251121521,
"ap": 0.9455063045792447,
"f1": 0.890126582278481,
"f1_threshold": 0.71305251121521,
"precision": 0.9015384615384615,
"recall": 0.879
},
"dot": {
"accuracy": 0.9978514851485148,
"accuracy_threshold": 0.7130525708198547,
"ap": 0.9455063045792447,
"f1": 0.890126582278481,
"f1_threshold": 0.7130525708198547,
"precision": 0.9015384615384615,
"recall": 0.879
},
"euclidean": {
"accuracy": 0.9978514851485148,
"accuracy_threshold": 0.7575584650039673,
"ap": 0.9455063045792447,
"f1": 0.890126582278481,
"f1_threshold": 0.7575584650039673,
"precision": 0.9015384615384615,
"recall": 0.879
},
"evaluation_time": 13.21,
"manhattan": {
"accuracy": 0.9978415841584158,
"accuracy_threshold": 11.837488174438477,
"ap": 0.9454002074215007,
"f1": 0.8898989898989899,
"f1_threshold": 11.837488174438477,
"precision": 0.8989795918367347,
"recall": 0.881
},
"max": {
"accuracy": 0.9978514851485148,
"ap": 0.9455063045792447,
"f1": 0.890126582278481
}
}
}