Muennighoff's picture
Readd
393178e
raw history blame
No virus
1.42 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Ocnli",
"mteb_version": "1.1.0",
"validation": {
"cos_sim": {
"accuracy": 0.7097996751488901,
"accuracy_threshold": 0.6788190007209778,
"ap": 0.7541204808304739,
"f1": 0.7397722479966258,
"f1_threshold": 0.5811378359794617,
"precision": 0.6158707865168539,
"recall": 0.9260823653643083
},
"dot": {
"accuracy": 0.7097996751488901,
"accuracy_threshold": 0.6788190603256226,
"ap": 0.7541204808304739,
"f1": 0.7397722479966258,
"f1_threshold": 0.5811378955841064,
"precision": 0.6158707865168539,
"recall": 0.9260823653643083
},
"euclidean": {
"accuracy": 0.7097996751488901,
"accuracy_threshold": 0.8014748096466064,
"ap": 0.7541204808304739,
"f1": 0.7397722479966258,
"f1_threshold": 0.9152728319168091,
"precision": 0.6158707865168539,
"recall": 0.9260823653643083
},
"evaluation_time": 2.42,
"manhattan": {
"accuracy": 0.7097996751488901,
"accuracy_threshold": 17.35822296142578,
"ap": 0.7542641836873754,
"f1": 0.738585496866607,
"f1_threshold": 19.5109806060791,
"precision": 0.6410256410256411,
"recall": 0.8711721224920802
},
"max": {
"accuracy": 0.7097996751488901,
"ap": 0.7542641836873754,
"f1": 0.7397722479966258
}
}
}