lodestone-base-4096-v1 / mteb_results /SprintDuplicateQuestions.json
dylanAtHum's picture
Initial Commit
64ae4c7 unverified
raw
history blame
1.4 kB
{
"dataset_revision": "d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46",
"mteb_dataset_name": "SprintDuplicateQuestions",
"mteb_version": "1.1.0",
"test": {
"cos_sim": {
"accuracy": 0.997039603960396,
"accuracy_threshold": 0.8038696646690369,
"ap": 0.9040809844250262,
"f1": 0.8453181583031557,
"f1_threshold": 0.8038696646690369,
"precision": 0.8756698821007503,
"recall": 0.817
},
"dot": {
"accuracy": 0.997039603960396,
"accuracy_threshold": 0.8038696050643921,
"ap": 0.9040809844250263,
"f1": 0.8453181583031557,
"f1_threshold": 0.8038696050643921,
"precision": 0.8756698821007503,
"recall": 0.817
},
"euclidean": {
"accuracy": 0.997039603960396,
"accuracy_threshold": 0.6263071894645691,
"ap": 0.904080982863383,
"f1": 0.8453181583031557,
"f1_threshold": 0.6263071894645691,
"precision": 0.8756698821007503,
"recall": 0.817
},
"evaluation_time": 6.81,
"manhattan": {
"accuracy": 0.997,
"accuracy_threshold": 13.793682098388672,
"ap": 0.9039771161966652,
"f1": 0.8432989690721648,
"f1_threshold": 13.853282928466797,
"precision": 0.8702127659574468,
"recall": 0.818
},
"max": {
"accuracy": 0.997039603960396,
"ap": 0.9040809844250263,
"f1": 0.8453181583031557
}
}
}