Muennighoff's picture
Updated results for Polish tasks (#22)
6f085ed
raw history blame
No virus
1.34 kB
{
"dataset_revision": null,
"mteb_dataset_name": "PPC",
"mteb_version": "1.1.1",
"test": {
"cos_sim": {
"accuracy": 0.753,
"accuracy_threshold": 0.845115065574646,
"ap": 0.8683362699207859,
"f1": 0.7938697318007663,
"f1_threshold": 0.7955561876296997,
"precision": 0.738944365192582,
"recall": 0.8576158940397351
},
"dot": {
"accuracy": 0.73,
"accuracy_threshold": 0.5435672998428345,
"ap": 0.8327559322760829,
"f1": 0.7884465261514442,
"f1_threshold": 0.5357716679573059,
"precision": 0.7459379615952733,
"recall": 0.8360927152317881
},
"euclidean": {
"accuracy": 0.738,
"accuracy_threshold": 0.43584996461868286,
"ap": 0.8598737731431576,
"f1": 0.7842227378190255,
"f1_threshold": 0.5215912461280823,
"precision": 0.7358490566037735,
"recall": 0.8394039735099338
},
"evaluation_time": 1.07,
"manhattan": {
"accuracy": 0.71,
"accuracy_threshold": 7.130887031555176,
"ap": 0.846881046006653,
"f1": 0.7650695517774344,
"f1_threshold": 8.852322578430176,
"precision": 0.717391304347826,
"recall": 0.8195364238410596
},
"max": {
"accuracy": 0.753,
"ap": 0.8683362699207859,
"f1": 0.7938697318007663
}
}
}