Muennighoff's picture
Add OpenSearch-text-hybrid for CMTEB (#29)
b5a584e verified
raw history blame
No virus
1.42 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Ocnli",
"mteb_version": "1.1.1",
"validation": {
"cos_sim": {
"accuracy": 0.8234975636166757,
"accuracy_threshold": 0.7150192856788635,
"ap": 0.8544093090862952,
"f1": 0.8368654509610647,
"f1_threshold": 0.7067376971244812,
"precision": 0.7846580406654344,
"recall": 0.8965153115100317
},
"dot": {
"accuracy": 0.8234975636166757,
"accuracy_threshold": 0.7150193452835083,
"ap": 0.8544093090862952,
"f1": 0.8368654509610647,
"f1_threshold": 0.7067376375198364,
"precision": 0.7846580406654344,
"recall": 0.8965153115100317
},
"euclidean": {
"accuracy": 0.8234975636166757,
"accuracy_threshold": 0.7549578547477722,
"ap": 0.8544093090862952,
"f1": 0.8368654509610647,
"f1_threshold": 0.7658488750457764,
"precision": 0.7846580406654344,
"recall": 0.8965153115100317
},
"evaluation_time": 8.67,
"manhattan": {
"accuracy": 0.8197076340010828,
"accuracy_threshold": 24.7159423828125,
"ap": 0.8483544289352172,
"f1": 0.8334167083541771,
"f1_threshold": 24.7159423828125,
"precision": 0.7918250950570342,
"recall": 0.8796198521647307
},
"max": {
"accuracy": 0.8234975636166757,
"ap": 0.8544093090862952,
"f1": 0.8368654509610647
}
}
}