Muennighoff's picture
Readd
393178e
raw history blame
No virus
1.42 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Cmnli",
"mteb_version": "1.1.0",
"validation": {
"cos_sim": {
"accuracy": 0.7532170775706555,
"accuracy_threshold": 0.647645115852356,
"ap": 0.8410107571628663,
"f1": 0.7724867724867724,
"f1_threshold": 0.6053565144538879,
"precision": 0.6943304737038418,
"recall": 0.8704699555763385
},
"dot": {
"accuracy": 0.7532170775706555,
"accuracy_threshold": 0.6476450562477112,
"ap": 0.8410697261897573,
"f1": 0.7724867724867724,
"f1_threshold": 0.6053565144538879,
"precision": 0.6943304737038418,
"recall": 0.8704699555763385
},
"euclidean": {
"accuracy": 0.7532170775706555,
"accuracy_threshold": 0.8394699096679688,
"ap": 0.8410108021257265,
"f1": 0.7724867724867724,
"f1_threshold": 0.8884183168411255,
"precision": 0.6943304737038418,
"recall": 0.8704699555763385
},
"evaluation_time": 4.33,
"manhattan": {
"accuracy": 0.7536981358989777,
"accuracy_threshold": 18.23721694946289,
"ap": 0.8408963848701726,
"f1": 0.7717035217035216,
"f1_threshold": 19.787551879882812,
"precision": 0.6863959205973411,
"recall": 0.8812251578209025
},
"max": {
"accuracy": 0.7536981358989777,
"ap": 0.8410697261897573,
"f1": 0.7724867724867724
}
}
}