Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
raw history blame
No virus
1.48 kB
{
"dataset_revision": "8a04d940a42cd40658986fdd8e3da561533a3646",
"mteb_dataset_name": "PawsX",
"mteb_version": "1.1.3.dev0",
"test": {
"evaluation_time": 3.6,
"fr": {
"cos_sim": {
"accuracy": 0.5635,
"accuracy_threshold": 0.9923265985316547,
"ap": 0.5076824569885957,
"f1": 0.6250871080139372,
"f1_threshold": 0.8856663609548949,
"precision": 0.4560244026436197,
"recall": 0.9933554817275747
},
"dot": {
"accuracy": 0.5635,
"accuracy_threshold": 0.9923266358923903,
"ap": 0.5071159142291282,
"f1": 0.6250871080139372,
"f1_threshold": 0.8856664067288832,
"precision": 0.4560244026436197,
"recall": 0.9933554817275747
},
"euclidean": {
"accuracy": 0.5635,
"accuracy_threshold": 0.12388220501537019,
"ap": 0.5076824569885957,
"f1": 0.6250871080139372,
"f1_threshold": 0.4781914751689264,
"precision": 0.4560244026436197,
"recall": 0.9933554817275747
},
"manhattan": {
"accuracy": 0.5635,
"accuracy_threshold": 1.9962251006430165,
"ap": 0.5077153706739639,
"f1": 0.6249134948096886,
"f1_threshold": 19.026859030628657,
"precision": 0.4544539506794162,
"recall": 1.0
},
"max": {
"accuracy": 0.5635,
"ap": 0.5077153706739639,
"f1": 0.6250871080139372
}
}
}
}