Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
raw history blame
No virus
1.43 kB
{
"dataset_revision": "9e9b1f8ef51616073f47f306f7f47dd91663f86a",
"mteb_dataset_name": "OpusparcusPC",
"mteb_version": "1.1.2.dev0",
"test": {
"evaluation_time": 0.99,
"fr": {
"cos_sim": {
"accuracy": 0.9990069513406157,
"accuracy_threshold": 0.20005719372606418,
"ap": 1.0,
"f1": 0.9995032290114257,
"f1_threshold": 0.20005719372606418,
"precision": 1.0,
"recall": 0.9990069513406157
},
"dot": {
"accuracy": 0.9990069513406157,
"accuracy_threshold": 0.20005719662081198,
"ap": 1.0,
"f1": 0.9995032290114257,
"f1_threshold": 0.20005719662081198,
"precision": 1.0,
"recall": 0.9990069513406157
},
"euclidean": {
"accuracy": 0.9990069513406157,
"accuracy_threshold": 1.264847724189768,
"ap": 1.0,
"f1": 0.9995032290114257,
"f1_threshold": 1.264847724189768,
"precision": 1.0,
"recall": 0.9990069513406157
},
"manhattan": {
"accuracy": 0.9990069513406157,
"accuracy_threshold": 21.905131239303934,
"ap": 1.0,
"f1": 0.9995032290114257,
"f1_threshold": 21.905131239303934,
"precision": 1.0,
"recall": 0.9990069513406157
},
"max": {
"accuracy": 0.9990069513406157,
"ap": 1.0,
"f1": 0.9995032290114257
}
}
}
}