Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
raw history blame
No virus
1.43 kB
{
"dataset_revision": "9e9b1f8ef51616073f47f306f7f47dd91663f86a",
"mteb_dataset_name": "OpusparcusPC",
"mteb_version": "1.1.2.dev0",
"test": {
"evaluation_time": 2.09,
"fr": {
"cos_sim": {
"accuracy": 0.9990069513406157,
"accuracy_threshold": 0.34038404055817795,
"ap": 1.0,
"f1": 0.9995032290114257,
"f1_threshold": 0.34038404055817795,
"precision": 1.0,
"recall": 0.9990069513406157
},
"dot": {
"accuracy": 0.9990069513406157,
"accuracy_threshold": 0.3403840077003497,
"ap": 1.0,
"f1": 0.9995032290114257,
"f1_threshold": 0.3403840077003497,
"precision": 1.0,
"recall": 0.9990069513406157
},
"euclidean": {
"accuracy": 0.9990069513406157,
"accuracy_threshold": 1.1484630144786152,
"ap": 1.0,
"f1": 0.9995032290114257,
"f1_threshold": 1.1484630144786152,
"precision": 1.0,
"recall": 0.9990069513406157
},
"manhattan": {
"accuracy": 0.9990069513406157,
"accuracy_threshold": 25.30844736110612,
"ap": 1.0,
"f1": 0.9995032290114257,
"f1_threshold": 25.30844736110612,
"precision": 1.0,
"recall": 0.9990069513406157
},
"max": {
"accuracy": 0.9990069513406157,
"ap": 1.0,
"f1": 0.9995032290114257
}
}
}
}