Muennighoff's picture
Results for Chinese tasks (#9)
c3d2939
raw history blame
No virus
1.42 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Cmnli",
"mteb_version": "1.0.2",
"validation": {
"cos_sim": {
"accuracy": 0.7324113048707156,
"accuracy_threshold": 0.8254185914993286,
"ap": 0.8212077728141313,
"f1": 0.7476847087878165,
"f1_threshold": 0.7963211536407471,
"precision": 0.6677081418856827,
"recall": 0.8494271685761048
},
"dot": {
"accuracy": 0.7324113048707156,
"accuracy_threshold": 0.8254189491271973,
"ap": 0.8212565339848135,
"f1": 0.7476847087878165,
"f1_threshold": 0.7963211536407471,
"precision": 0.6677081418856827,
"recall": 0.8494271685761048
},
"euclidean": {
"accuracy": 0.7324113048707156,
"accuracy_threshold": 0.5908999443054199,
"ap": 0.8212076750086248,
"f1": 0.7476847087878165,
"f1_threshold": 0.6382458209991455,
"precision": 0.6677081418856827,
"recall": 0.8494271685761048
},
"evaluation_time": 22.75,
"manhattan": {
"accuracy": 0.7319302465423932,
"accuracy_threshold": 14.881189346313477,
"ap": 0.8213259787083136,
"f1": 0.7484231206700444,
"f1_threshold": 16.200695037841797,
"precision": 0.6709306637004079,
"recall": 0.8461538461538461
},
"max": {
"accuracy": 0.7324113048707156,
"ap": 0.8213259787083136,
"f1": 0.7484231206700444
}
}
}