Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
raw history blame
No virus
1.49 kB
{
"dataset_revision": "8a04d940a42cd40658986fdd8e3da561533a3646",
"mteb_dataset_name": "PawsX",
"mteb_version": "1.1.3.dev0",
"test": {
"evaluation_time": 3.96,
"fr": {
"cos_sim": {
"accuracy": 0.578,
"accuracy_threshold": 0.9884029145948847,
"ap": 0.5322360899538218,
"f1": 0.6257839721254355,
"f1_threshold": 0.8928437828183153,
"precision": 0.456532791052364,
"recall": 0.9944629014396457
},
"dot": {
"accuracy": 0.578,
"accuracy_threshold": 0.9884029309036406,
"ap": 0.5333062169070473,
"f1": 0.6257839721254355,
"f1_threshold": 0.8928437471131099,
"precision": 0.456532791052364,
"recall": 0.9944629014396457
},
"euclidean": {
"accuracy": 0.578,
"accuracy_threshold": 0.15229628932302958,
"ap": 0.5322360899538218,
"f1": 0.6257839721254355,
"f1_threshold": 0.4629172125313885,
"precision": 0.456532791052364,
"recall": 0.9944629014396457
},
"manhattan": {
"accuracy": 0.5785,
"accuracy_threshold": 3.3623000295438032,
"ap": 0.5323868754892794,
"f1": 0.6255660048763497,
"f1_threshold": 10.25588538846192,
"precision": 0.4563008130081301,
"recall": 0.9944629014396457
},
"max": {
"accuracy": 0.5785,
"ap": 0.5333062169070473,
"f1": 0.6257839721254355
}
}
}
}