Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
{
"dataset_revision": "8a04d940a42cd40658986fdd8e3da561533a3646",
"mteb_dataset_name": "PawsX",
"mteb_version": "1.1.3.dev0",
"test": {
"evaluation_time": 4.04,
"fr": {
"cos_sim": {
"accuracy": 0.5805,
"accuracy_threshold": 0.9825798153351122,
"ap": 0.5338191982712758,
"f1": 0.6251298026998962,
"f1_threshold": 0.700663692322502,
"precision": 0.45468277945619334,
"recall": 1.0
},
"dot": {
"accuracy": 0.5805,
"accuracy_threshold": 0.9825797864664809,
"ap": 0.5321972337878382,
"f1": 0.6251298026998962,
"f1_threshold": 0.700663673373858,
"precision": 0.45468277945619334,
"recall": 1.0
},
"euclidean": {
"accuracy": 0.5805,
"accuracy_threshold": 0.1866557191325731,
"ap": 0.5338191982712758,
"f1": 0.6251298026998962,
"f1_threshold": 0.7732727055581536,
"precision": 0.45468277945619334,
"recall": 1.0
},
"manhattan": {
"accuracy": 0.5785,
"accuracy_threshold": 4.0747642012211145,
"ap": 0.5337942395079072,
"f1": 0.6251298026998962,
"f1_threshold": 17.067192658275957,
"precision": 0.45468277945619334,
"recall": 1.0
},
"max": {
"accuracy": 0.5805,
"ap": 0.5338191982712758,
"f1": 0.6251298026998962
}
}
}
}