Muennighoff's picture
Readd
393178e
raw history blame
No virus
1.42 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Ocnli",
"mteb_version": "1.1.0",
"validation": {
"cos_sim": {
"accuracy": 0.6204656199242015,
"accuracy_threshold": 0.7207728624343872,
"ap": 0.6457097703997648,
"f1": 0.6853533359652585,
"f1_threshold": 0.5524237155914307,
"precision": 0.5472887767969735,
"recall": 0.9165786694825766
},
"dot": {
"accuracy": 0.6204656199242015,
"accuracy_threshold": 0.7207728624343872,
"ap": 0.6457097703997648,
"f1": 0.6853533359652585,
"f1_threshold": 0.5524237155914307,
"precision": 0.5472887767969735,
"recall": 0.9165786694825766
},
"euclidean": {
"accuracy": 0.6204656199242015,
"accuracy_threshold": 0.747298002243042,
"ap": 0.6457097703997648,
"f1": 0.6853533359652585,
"f1_threshold": 0.9461250305175781,
"precision": 0.5472887767969735,
"recall": 0.9165786694825766
},
"evaluation_time": 2.03,
"manhattan": {
"accuracy": 0.6199242014076881,
"accuracy_threshold": 13.520804405212402,
"ap": 0.6451663530124113,
"f1": 0.6834130781499203,
"f1_threshold": 16.900650024414062,
"precision": 0.5490070467648943,
"recall": 0.9049630411826821
},
"max": {
"accuracy": 0.6204656199242015,
"ap": 0.6457097703997648,
"f1": 0.6853533359652585
}
}
}