Muennighoff's picture
Results for Chinese tasks (#9)
c3d2939
raw history blame
No virus
1.42 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Cmnli",
"mteb_version": "1.0.2",
"validation": {
"cos_sim": {
"accuracy": 0.6559230306674684,
"accuracy_threshold": 0.9149799346923828,
"ap": 0.7254894349991994,
"f1": 0.701677884159636,
"f1_threshold": 0.8867361545562744,
"precision": 0.5900829081632653,
"recall": 0.8653261631985036
},
"dot": {
"accuracy": 0.6559230306674684,
"accuracy_threshold": 0.9149800539016724,
"ap": 0.7254464861808467,
"f1": 0.701677884159636,
"f1_threshold": 0.8867361545562744,
"precision": 0.5900829081632653,
"recall": 0.8653261631985036
},
"euclidean": {
"accuracy": 0.6559230306674684,
"accuracy_threshold": 0.41235920786857605,
"ap": 0.7254836144604117,
"f1": 0.701677884159636,
"f1_threshold": 0.47594934701919556,
"precision": 0.5900829081632653,
"recall": 0.8653261631985036
},
"evaluation_time": 54.44,
"manhattan": {
"accuracy": 0.6566446181599519,
"accuracy_threshold": 12.795174598693848,
"ap": 0.7254776417058088,
"f1": 0.7016500383729855,
"f1_threshold": 14.699525833129883,
"precision": 0.5949243533430942,
"recall": 0.8550385784428337
},
"max": {
"accuracy": 0.6566446181599519,
"ap": 0.7254894349991994,
"f1": 0.701677884159636
}
}
}