Muennighoff's picture
Readd
393178e
raw history blame
No virus
1.42 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Cmnli",
"mteb_version": "1.1.0",
"validation": {
"cos_sim": {
"accuracy": 0.6903187011425135,
"accuracy_threshold": 0.7071499824523926,
"ap": 0.762374429554391,
"f1": 0.7221441477614937,
"f1_threshold": 0.645449697971344,
"precision": 0.6327176781002638,
"recall": 0.8410100537760112
},
"dot": {
"accuracy": 0.6903187011425135,
"accuracy_threshold": 0.7071500420570374,
"ap": 0.7621755897434168,
"f1": 0.7221441477614937,
"f1_threshold": 0.6454498171806335,
"precision": 0.6327176781002638,
"recall": 0.8410100537760112
},
"euclidean": {
"accuracy": 0.6903187011425135,
"accuracy_threshold": 0.7653103470802307,
"ap": 0.7623744445612339,
"f1": 0.7221441477614937,
"f1_threshold": 0.8420810699462891,
"precision": 0.6327176781002638,
"recall": 0.8410100537760112
},
"evaluation_time": 3.37,
"manhattan": {
"accuracy": 0.6898376428141912,
"accuracy_threshold": 13.920679092407227,
"ap": 0.762279201720205,
"f1": 0.7227414330218069,
"f1_threshold": 15.225663185119629,
"precision": 0.6337680648572436,
"recall": 0.8407762450315642
},
"max": {
"accuracy": 0.6903187011425135,
"ap": 0.7623744445612339,
"f1": 0.7227414330218069
}
}
}