Muennighoff's picture
Results for Chinese tasks (#9)
c3d2939
raw history blame
No virus
1.41 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Cmnli",
"mteb_version": "1.0.2",
"validation": {
"cos_sim": {
"accuracy": 0.6388454600120265,
"accuracy_threshold": 0.6196912527084351,
"ap": 0.6926620571292302,
"f1": 0.693971782813168,
"f1_threshold": 0.36149147152900696,
"precision": 0.5470477217578862,
"recall": 0.9487958849660977
},
"dot": {
"accuracy": 0.6287432351172579,
"accuracy_threshold": 285.160400390625,
"ap": 0.6773824074934662,
"f1": 0.6939557568367879,
"f1_threshold": 176.40261840820312,
"precision": 0.5529493407356003,
"recall": 0.9314940378770166
},
"euclidean": {
"accuracy": 0.6395670475045099,
"accuracy_threshold": 19.689075469970703,
"ap": 0.6916679141793004,
"f1": 0.6931132410365637,
"f1_threshold": 23.51457977294922,
"precision": 0.5585753111142898,
"recall": 0.9130231470657002
},
"evaluation_time": 21.65,
"manhattan": {
"accuracy": 0.6395670475045099,
"accuracy_threshold": 493.3773193359375,
"ap": 0.6922912961249245,
"f1": 0.6928233086066672,
"f1_threshold": 595.407958984375,
"precision": 0.5607638888888888,
"recall": 0.906242693476736
},
"max": {
"accuracy": 0.6395670475045099,
"ap": 0.6926620571292302,
"f1": 0.693971782813168
}
}
}