Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
raw history blame
No virus
1.43 kB
{
"dataset_revision": "9e9b1f8ef51616073f47f306f7f47dd91663f86a",
"mteb_dataset_name": "OpusparcusPC",
"mteb_version": "1.1.2.dev0",
"test": {
"evaluation_time": 1.49,
"fr": {
"cos_sim": {
"accuracy": 0.9990069513406157,
"accuracy_threshold": 0.2298185778326003,
"ap": 1.0,
"f1": 0.9995032290114257,
"f1_threshold": 0.2298185778326003,
"precision": 1.0,
"recall": 0.9990069513406157
},
"dot": {
"accuracy": 0.9990069513406157,
"accuracy_threshold": 0.22981857357947008,
"ap": 1.0,
"f1": 0.9995032290114257,
"f1_threshold": 0.22981857357947008,
"precision": 1.0,
"recall": 0.9990069513406157
},
"euclidean": {
"accuracy": 0.9990069513406157,
"accuracy_threshold": 1.2409961737275523,
"ap": 1.0,
"f1": 0.9995032290114257,
"f1_threshold": 1.2409961737275523,
"precision": 1.0,
"recall": 0.9990069513406157
},
"manhattan": {
"accuracy": 0.9990069513406157,
"accuracy_threshold": 27.220050818745307,
"ap": 1.0,
"f1": 0.9995032290114257,
"f1_threshold": 27.220050818745307,
"precision": 1.0,
"recall": 0.9990069513406157
},
"max": {
"accuracy": 0.9990069513406157,
"ap": 1.0,
"f1": 0.9995032290114257
}
}
}
}