Muennighoff's picture
Results for Chinese tasks (#9)
c3d2939
raw history blame
No virus
1.42 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Cmnli",
"mteb_version": "1.0.2",
"validation": {
"cos_sim": {
"accuracy": 0.7358989777510523,
"accuracy_threshold": 0.8126825094223022,
"ap": 0.8216944597985953,
"f1": 0.756733737889918,
"f1_threshold": 0.7991254329681396,
"precision": 0.6946833463643471,
"recall": 0.8309562777647884
},
"dot": {
"accuracy": 0.7358989777510523,
"accuracy_threshold": 0.8121147155761719,
"ap": 0.8218094428696314,
"f1": 0.756733737889918,
"f1_threshold": 0.7991256713867188,
"precision": 0.6946833463643471,
"recall": 0.8309562777647884
},
"euclidean": {
"accuracy": 0.7358989777510523,
"accuracy_threshold": 0.6120743155479431,
"ap": 0.8216944217234582,
"f1": 0.756733737889918,
"f1_threshold": 0.6338368654251099,
"precision": 0.6946833463643471,
"recall": 0.8309562777647884
},
"evaluation_time": 24.32,
"manhattan": {
"accuracy": 0.7355381840048106,
"accuracy_threshold": 15.29022216796875,
"ap": 0.8212547480868224,
"f1": 0.7548359966358283,
"f1_threshold": 16.25013542175293,
"precision": 0.6857688634192932,
"recall": 0.839373392564882
},
"max": {
"accuracy": 0.7358989777510523,
"ap": 0.8218094428696314,
"f1": 0.756733737889918
}
}
}