Muennighoff's picture
Add OpenSearch-text-hybrid for CMTEB (#29)
b5a584e verified
raw history blame
No virus
1.42 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Cmnli",
"mteb_version": "1.1.1",
"validation": {
"cos_sim": {
"accuracy": 0.8366806975345761,
"accuracy_threshold": 0.7407156825065613,
"ap": 0.9077455179091799,
"f1": 0.8469479353680431,
"f1_threshold": 0.7373833060264587,
"precision": 0.8142394822006472,
"recall": 0.8823942015431377
},
"dot": {
"accuracy": 0.8366806975345761,
"accuracy_threshold": 0.740715742111206,
"ap": 0.9077451632132499,
"f1": 0.8469479353680431,
"f1_threshold": 0.7373833060264587,
"precision": 0.8142394822006472,
"recall": 0.8823942015431377
},
"euclidean": {
"accuracy": 0.8366806975345761,
"accuracy_threshold": 0.7201170325279236,
"ap": 0.9077454017352489,
"f1": 0.8469479353680431,
"f1_threshold": 0.7247298955917358,
"precision": 0.8142394822006472,
"recall": 0.8823942015431377
},
"evaluation_time": 45.19,
"manhattan": {
"accuracy": 0.834155141310884,
"accuracy_threshold": 23.821239471435547,
"ap": 0.9057095444704344,
"f1": 0.8451779499270237,
"f1_threshold": 23.821239471435547,
"precision": 0.8129589632829374,
"recall": 0.8800561140986672
},
"max": {
"accuracy": 0.8366806975345761,
"ap": 0.9077455179091799,
"f1": 0.8469479353680431
}
}
}