imenelydiaker's picture
Add French evaluation results
e426f92
{
"dataset_revision": "8a04d940a42cd40658986fdd8e3da561533a3646",
"mteb_dataset_name": "PawsX",
"mteb_version": "1.1.3.dev0",
"test": {
"evaluation_time": 241.23,
"fr": {
"cos_sim": {
"accuracy": 0.632,
"accuracy_threshold": 0.9880824858469037,
"ap": 0.6365112459841364,
"f1": 0.6365622723962127,
"f1_threshold": 0.9413883526942921,
"precision": 0.4742268041237113,
"recall": 0.9678848283499446
},
"dot": {
"accuracy": 0.632,
"accuracy_threshold": 0.9880824752512578,
"ap": 0.6372170606558628,
"f1": 0.6365622723962127,
"f1_threshold": 0.9413883690557052,
"precision": 0.4742268041237113,
"recall": 0.9678848283499446
},
"euclidean": {
"accuracy": 0.632,
"accuracy_threshold": 0.15438593144644908,
"ap": 0.6365112459841364,
"f1": 0.6365622723962127,
"f1_threshold": 0.34237886440237303,
"precision": 0.4742268041237113,
"recall": 0.9678848283499446
},
"manhattan": {
"accuracy": 0.6355,
"accuracy_threshold": 7.654586146420513,
"ap": 0.6433065142499077,
"f1": 0.6378615891614793,
"f1_threshold": 16.080158381567912,
"precision": 0.4764770240700219,
"recall": 0.964562569213732
},
"max": {
"accuracy": 0.6355,
"ap": 0.6433065142499077,
"f1": 0.6378615891614793
}
}
}
}