Muennighoff's picture
Results for Chinese tasks (#9)
c3d2939
raw history blame
No virus
1.42 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Ocnli",
"mteb_version": "1.0.2",
"validation": {
"cos_sim": {
"accuracy": 0.5836491608012994,
"accuracy_threshold": 0.8999356031417847,
"ap": 0.6069876592252514,
"f1": 0.67984496124031,
"f1_threshold": 0.8494171500205994,
"precision": 0.5370483772198408,
"recall": 0.9260823653643083
},
"dot": {
"accuracy": 0.5836491608012994,
"accuracy_threshold": 0.8999356031417847,
"ap": 0.6069876592252516,
"f1": 0.67984496124031,
"f1_threshold": 0.8494172096252441,
"precision": 0.5370483772198408,
"recall": 0.9260823653643083
},
"euclidean": {
"accuracy": 0.5836491608012994,
"accuracy_threshold": 0.4473574459552765,
"ap": 0.6069876592252516,
"f1": 0.67984496124031,
"f1_threshold": 0.5487856864929199,
"precision": 0.5370483772198408,
"recall": 0.9260823653643083
},
"evaluation_time": 10.01,
"manhattan": {
"accuracy": 0.5825663237682729,
"accuracy_threshold": 13.770918846130371,
"ap": 0.6073987820492961,
"f1": 0.6816793893129772,
"f1_threshold": 17.443004608154297,
"precision": 0.5337716676628811,
"recall": 0.9429778247096093
},
"max": {
"accuracy": 0.5836491608012994,
"ap": 0.6073987820492961,
"f1": 0.6816793893129772
}
}
}