Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
raw history blame
No virus
1.48 kB
{
"dataset_revision": "8a04d940a42cd40658986fdd8e3da561533a3646",
"mteb_dataset_name": "PawsX",
"mteb_version": "1.1.3.dev0",
"test": {
"evaluation_time": 10.3,
"fr": {
"cos_sim": {
"accuracy": 0.573,
"accuracy_threshold": 0.9939910331786203,
"ap": 0.5228462246810233,
"f1": 0.625173852573018,
"f1_threshold": 0.9358169382887034,
"precision": 0.45565129244804864,
"recall": 0.9955703211517165
},
"dot": {
"accuracy": 0.573,
"accuracy_threshold": 0.9939910467044111,
"ap": 0.5230016237436258,
"f1": 0.625173852573018,
"f1_threshold": 0.9358169903262279,
"precision": 0.45565129244804864,
"recall": 0.9955703211517165
},
"euclidean": {
"accuracy": 0.573,
"accuracy_threshold": 0.10962615135486939,
"ap": 0.5228462246810233,
"f1": 0.625173852573018,
"f1_threshold": 0.35828199936094474,
"precision": 0.45565129244804864,
"recall": 0.9955703211517165
},
"manhattan": {
"accuracy": 0.574,
"accuracy_threshold": 2.574844159855047,
"ap": 0.5247397300310113,
"f1": 0.6251298026998962,
"f1_threshold": 11.817472039712811,
"precision": 0.45468277945619334,
"recall": 1.0
},
"max": {
"accuracy": 0.574,
"ap": 0.5247397300310113,
"f1": 0.625173852573018
}
}
}
}