Muennighoff's picture
Results for Chinese tasks (#9)
c3d2939
raw history blame
No virus
1.41 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Ocnli",
"mteb_version": "1.0.2",
"validation": {
"cos_sim": {
"accuracy": 0.6978884677855983,
"accuracy_threshold": 0.8529754877090454,
"ap": 0.733197844494933,
"f1": 0.7298951048951049,
"f1_threshold": 0.8174299001693726,
"precision": 0.6226696495152871,
"recall": 0.8817317845828934
},
"dot": {
"accuracy": 0.6978884677855983,
"accuracy_threshold": 0.852975606918335,
"ap": 0.733197844494933,
"f1": 0.7298951048951049,
"f1_threshold": 0.8174299001693726,
"precision": 0.6226696495152871,
"recall": 0.8817317845828934
},
"euclidean": {
"accuracy": 0.6978884677855983,
"accuracy_threshold": 0.5422627925872803,
"ap": 0.733197844494933,
"f1": 0.7298951048951049,
"f1_threshold": 0.6042682528495789,
"precision": 0.6226696495152871,
"recall": 0.8817317845828934
},
"evaluation_time": 3.94,
"manhattan": {
"accuracy": 0.697347049269085,
"accuracy_threshold": 12.204912185668945,
"ap": 0.733601691355213,
"f1": 0.7314734088927637,
"f1_threshold": 13.393484115600586,
"precision": 0.6228656273199703,
"recall": 0.8859556494192186
},
"max": {
"accuracy": 0.6978884677855983,
"ap": 0.733601691355213,
"f1": 0.7314734088927637
}
}
}