Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
{
"dataset_revision": "8a04d940a42cd40658986fdd8e3da561533a3646",
"mteb_dataset_name": "PawsX",
"mteb_version": "1.1.3.dev0",
"test": {
"evaluation_time": 140.59,
"fr": {
"cos_sim": {
"accuracy": 0.6155,
"accuracy_threshold": 0.9889496670450961,
"ap": 0.6125533530469982,
"f1": 0.6281588447653429,
"f1_threshold": 0.8860056874747043,
"precision": 0.4659882163899304,
"recall": 0.9634551495016611
},
"dot": {
"accuracy": 0.616,
"accuracy_threshold": 0.9890297202070327,
"ap": 0.6121329579640336,
"f1": 0.6282005048683735,
"f1_threshold": 0.8849503272465148,
"precision": 0.4657754010695187,
"recall": 0.964562569213732
},
"euclidean": {
"accuracy": 0.6155,
"accuracy_threshold": 0.1486725247045886,
"ap": 0.6125347384389794,
"f1": 0.6281588447653429,
"f1_threshold": 0.4774460653943422,
"precision": 0.4659882163899304,
"recall": 0.9634551495016611
},
"manhattan": {
"accuracy": 0.6165,
"accuracy_threshold": 3.7231502517657327,
"ap": 0.612279621351029,
"f1": 0.6282005048683735,
"f1_threshold": 11.964458079781924,
"precision": 0.4657754010695187,
"recall": 0.964562569213732
},
"max": {
"accuracy": 0.6165,
"ap": 0.6125533530469982,
"f1": 0.6282005048683735
}
}
}
}