Muennighoff's picture
Results for Chinese tasks (#9)
c3d2939
{
"dataset_revision": null,
"mteb_dataset_name": "Cmnli",
"mteb_version": "1.0.2",
"validation": {
"cos_sim": {
"accuracy": 0.6800962116656645,
"accuracy_threshold": 0.8584307432174683,
"ap": 0.7546263234379924,
"f1": 0.7141083198013182,
"f1_threshold": 0.8057594299316406,
"precision": 0.6036821705426356,
"recall": 0.8739770867430442
},
"dot": {
"accuracy": 0.6800962116656645,
"accuracy_threshold": 0.8584308624267578,
"ap": 0.7542800526326999,
"f1": 0.7141083198013182,
"f1_threshold": 0.8057594299316406,
"precision": 0.6036821705426356,
"recall": 0.8739770867430442
},
"euclidean": {
"accuracy": 0.6800962116656645,
"accuracy_threshold": 0.5321075320243835,
"ap": 0.7546263622354001,
"f1": 0.7141083198013182,
"f1_threshold": 0.6232825517654419,
"precision": 0.6036821705426356,
"recall": 0.8739770867430442
},
"evaluation_time": 6.63,
"manhattan": {
"accuracy": 0.6780517137702946,
"accuracy_threshold": 9.709148406982422,
"ap": 0.7544360066883088,
"f1": 0.7139737991266375,
"f1_threshold": 10.952827453613281,
"precision": 0.620279358510088,
"recall": 0.8410100537760112
},
"max": {
"accuracy": 0.6800962116656645,
"ap": 0.7546263622354001,
"f1": 0.7141083198013182
}
}
}