Muennighoff's picture
Readd
393178e
raw history blame
No virus
1.42 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Cmnli",
"mteb_version": "1.1.0",
"validation": {
"cos_sim": {
"accuracy": 0.7607937462417318,
"accuracy_threshold": 0.6406447887420654,
"ap": 0.8527096936386681,
"f1": 0.7775705036760671,
"f1_threshold": 0.594871997833252,
"precision": 0.7326302729528535,
"recall": 0.8283843815758709
},
"dot": {
"accuracy": 0.7607937462417318,
"accuracy_threshold": 0.6406447291374207,
"ap": 0.8529254554557811,
"f1": 0.7775705036760671,
"f1_threshold": 0.594871997833252,
"precision": 0.7326302729528535,
"recall": 0.8283843815758709
},
"euclidean": {
"accuracy": 0.7607937462417318,
"accuracy_threshold": 0.8477679491043091,
"ap": 0.8527096104031583,
"f1": 0.7775705036760671,
"f1_threshold": 0.9001421928405762,
"precision": 0.7326302729528535,
"recall": 0.8283843815758709
},
"evaluation_time": 5.02,
"manhattan": {
"accuracy": 0.7605532170775706,
"accuracy_threshold": 21.524826049804688,
"ap": 0.8527525013324765,
"f1": 0.7786797692800683,
"f1_threshold": 23.282154083251953,
"precision": 0.7168141592920354,
"recall": 0.8522328735094693
},
"max": {
"accuracy": 0.7607937462417318,
"ap": 0.8529254554557811,
"f1": 0.7786797692800683
}
}
}