Muennighoff's picture
Results for Chinese tasks (#9)
c3d2939
{
"dataset_revision": null,
"mteb_dataset_name": "Ocnli",
"mteb_version": "1.0.2",
"validation": {
"cos_sim": {
"accuracy": 0.6269626421223606,
"accuracy_threshold": 0.8633235692977905,
"ap": 0.6525352977483082,
"f1": 0.6843878697117184,
"f1_threshold": 0.7713218331336975,
"precision": 0.5301624129930395,
"recall": 0.9651531151003168
},
"dot": {
"accuracy": 0.6269626421223606,
"accuracy_threshold": 0.8633235692977905,
"ap": 0.6525355998529411,
"f1": 0.6843878697117184,
"f1_threshold": 0.7713218331336975,
"precision": 0.5301624129930395,
"recall": 0.9651531151003168
},
"euclidean": {
"accuracy": 0.6269626421223606,
"accuracy_threshold": 0.5228314399719238,
"ap": 0.6525352977483083,
"f1": 0.6843878697117184,
"f1_threshold": 0.6762813329696655,
"precision": 0.5301624129930395,
"recall": 0.9651531151003168
},
"evaluation_time": 1.74,
"manhattan": {
"accuracy": 0.6242555495397942,
"accuracy_threshold": 9.380687713623047,
"ap": 0.6505620392387741,
"f1": 0.6840891621829361,
"f1_threshold": 11.845846176147461,
"precision": 0.5377643504531722,
"recall": 0.9398099260823654
},
"max": {
"accuracy": 0.6269626421223606,
"ap": 0.6525355998529411,
"f1": 0.6843878697117184
}
}
}