aspire's picture
upload file
4ec0bf2 verified
raw
history blame
1.45 kB
{
"dataset_revision": "66e76a618a34d6d565d5538088562851e6daa7ec",
"mteb_dataset_name": "Ocnli",
"mteb_version": "1.1.2",
"validation": {
"cos_sim": {
"accuracy": 0.8110449377368706,
"accuracy_threshold": 0.5927015784967749,
"ap": 0.8517712014448026,
"f1": 0.8300094966761632,
"f1_threshold": 0.5652001420400894,
"precision": 0.7540983606557377,
"recall": 0.9229144667370645
},
"dot": {
"accuracy": 0.8110449377368706,
"accuracy_threshold": 0.59326171875,
"ap": 0.8515382639837256,
"f1": 0.8301707779886148,
"f1_threshold": 0.56494140625,
"precision": 0.7536606373815676,
"recall": 0.9239704329461457
},
"euclidean": {
"accuracy": 0.8110449377368706,
"accuracy_threshold": 0.90270946410721,
"ap": 0.8517692794840255,
"f1": 0.8300094966761632,
"f1_threshold": 0.9324667371600142,
"precision": 0.7540983606557377,
"recall": 0.9229144667370645
},
"evaluation_time": 2.07,
"manhattan": {
"accuracy": 0.8105035192203573,
"accuracy_threshold": 30.225037306547165,
"ap": 0.8514211368570364,
"f1": 0.8296155671570954,
"f1_threshold": 31.521927028894424,
"precision": 0.753448275862069,
"recall": 0.9229144667370645
},
"max": {
"accuracy": 0.8110449377368706,
"ap": 0.8517712014448026,
"f1": 0.8301707779886148
}
}
}