Muennighoff's picture
Add French evaluation results (#30)
0a0bc77 verified
{
"dataset_revision": "8a04d940a42cd40658986fdd8e3da561533a3646",
"mteb_dataset_name": "PawsX",
"mteb_version": "1.1.3.dev0",
"test": {
"evaluation_time": 4.04,
"fr": {
"cos_sim": {
"accuracy": 0.566,
"accuracy_threshold": 0.9955285335593526,
"ap": 0.521089966496604,
"f1": 0.6255230125523012,
"f1_threshold": 0.9026661174041206,
"precision": 0.45648854961832064,
"recall": 0.9933554817275747
},
"dot": {
"accuracy": 0.566,
"accuracy_threshold": 0.9955284811207616,
"ap": 0.5219936912087544,
"f1": 0.6255230125523012,
"f1_threshold": 0.9026661168088697,
"precision": 0.45648854961832064,
"recall": 0.9933554817275747
},
"euclidean": {
"accuracy": 0.566,
"accuracy_threshold": 0.09456705981791737,
"ap": 0.521089966496604,
"f1": 0.6255230125523012,
"f1_threshold": 0.44121162859740126,
"precision": 0.45648854961832064,
"recall": 0.9933554817275747
},
"manhattan": {
"accuracy": 0.5665,
"accuracy_threshold": 2.7342980123045777,
"ap": 0.522879238138305,
"f1": 0.6249134948096886,
"f1_threshold": 10.39639508943469,
"precision": 0.4544539506794162,
"recall": 1.0
},
"max": {
"accuracy": 0.5665,
"ap": 0.522879238138305,
"f1": 0.6255230125523012
}
}
}
}