Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
raw history blame
No virus
1.43 kB
{
"dataset_revision": "8a04d940a42cd40658986fdd8e3da561533a3646",
"mteb_dataset_name": "PawsX",
"mteb_version": "1.1.3.dev0",
"test": {
"evaluation_time": 58.02,
"fr": {
"cos_sim": {
"accuracy": 0.594,
"accuracy_threshold": 0.9769771198689359,
"ap": 0.5731728148121378,
"f1": 0.6251298026998962,
"f1_threshold": 0.6564006246453931,
"precision": 0.45468277945619334,
"recall": 1.0
},
"dot": {
"accuracy": 0.595,
"accuracy_threshold": 0.9758832764687353,
"ap": 0.572888136373088,
"f1": 0.6251298026998962,
"f1_threshold": 0.6564062499464616,
"precision": 0.45468277945619334,
"recall": 1.0
},
"euclidean": {
"accuracy": 0.594,
"accuracy_threshold": 0.21421118207290207,
"ap": 0.5731754344650203,
"f1": 0.6251298026998962,
"f1_threshold": 0.8273740306089585,
"precision": 0.45468277945619334,
"recall": 1.0
},
"manhattan": {
"accuracy": 0.595,
"accuracy_threshold": 3.3371051107335,
"ap": 0.5734822424301145,
"f1": 0.6251298026998962,
"f1_threshold": 12.824252609723,
"precision": 0.45468277945619334,
"recall": 1.0
},
"max": {
"accuracy": 0.595,
"ap": 0.5734822424301145,
"f1": 0.6251298026998962
}
}
}
}