Muennighoff's picture
Results for Chinese tasks (#9)
c3d2939
raw history blame
No virus
1.41 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Cmnli",
"mteb_version": "1.0.2",
"validation": {
"cos_sim": {
"accuracy": 0.7332531569452796,
"accuracy_threshold": 0.8417980670928955,
"ap": 0.81693548551703,
"f1": 0.754461024731295,
"f1_threshold": 0.815097987651825,
"precision": 0.6813041839427064,
"recall": 0.845218611176058
},
"dot": {
"accuracy": 0.7332531569452796,
"accuracy_threshold": 0.8417981863021851,
"ap": 0.8167944185797849,
"f1": 0.754461024731295,
"f1_threshold": 0.8150980472564697,
"precision": 0.6813041839427064,
"recall": 0.845218611176058
},
"euclidean": {
"accuracy": 0.7332531569452796,
"accuracy_threshold": 0.5624978542327881,
"ap": 0.8169354208723735,
"f1": 0.754461024731295,
"f1_threshold": 0.6081151366233826,
"precision": 0.6813041839427064,
"recall": 0.845218611176058
},
"evaluation_time": 13.43,
"manhattan": {
"accuracy": 0.7328923631990378,
"accuracy_threshold": 12.403698921203613,
"ap": 0.8166917488482668,
"f1": 0.7551610239471511,
"f1_threshold": 13.547996520996094,
"precision": 0.6760303086305673,
"recall": 0.8552723871872808
},
"max": {
"accuracy": 0.7332531569452796,
"ap": 0.81693548551703,
"f1": 0.7551610239471511
}
}
}