Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
raw history blame
No virus
1.49 kB
{
"dataset_revision": "8a04d940a42cd40658986fdd8e3da561533a3646",
"mteb_dataset_name": "PawsX",
"mteb_version": "1.1.3.dev0",
"test": {
"evaluation_time": 4.02,
"fr": {
"cos_sim": {
"accuracy": 0.5795,
"accuracy_threshold": 0.9825973279022131,
"ap": 0.5339388951464616,
"f1": 0.6253979483551468,
"f1_threshold": 0.8493756889783943,
"precision": 0.4594594594594595,
"recall": 0.9789590254706534
},
"dot": {
"accuracy": 0.5795,
"accuracy_threshold": 0.9825973092060061,
"ap": 0.5335291186438886,
"f1": 0.6253979483551468,
"f1_threshold": 0.849375729122165,
"precision": 0.4594594594594595,
"recall": 0.9789590254706534
},
"euclidean": {
"accuracy": 0.5795,
"accuracy_threshold": 0.18656179766038844,
"ap": 0.5339388951464616,
"f1": 0.6253979483551468,
"f1_threshold": 0.548860940498653,
"precision": 0.4594594594594595,
"recall": 0.9789590254706534
},
"manhattan": {
"accuracy": 0.5785,
"accuracy_threshold": 4.0747650607136165,
"ap": 0.534051195391792,
"f1": 0.6253979483551468,
"f1_threshold": 12.12762373367741,
"precision": 0.4594594594594595,
"recall": 0.9789590254706534
},
"max": {
"accuracy": 0.5795,
"ap": 0.534051195391792,
"f1": 0.6253979483551468
}
}
}
}