results / results /LaBSE /CDSC-E.json
Muennighoff's picture
Updated results for Polish tasks (#22)
6f085ed
raw history blame
No virus
1.33 kB
{
"dataset_revision": null,
"mteb_dataset_name": "CDSC-E",
"mteb_version": "1.1.1",
"test": {
"cos_sim": {
"accuracy": 0.87,
"accuracy_threshold": 0.969829223949605,
"ap": 0.6891216334658263,
"f1": 0.6082191780821918,
"f1_threshold": 0.9013995387852827,
"precision": 0.6342857142857142,
"recall": 0.5842105263157895
},
"dot": {
"accuracy": 0.87,
"accuracy_threshold": 0.9697265625,
"ap": 0.688550562919696,
"f1": 0.6098901098901099,
"f1_threshold": 0.90234375,
"precision": 0.6379310344827587,
"recall": 0.5842105263157895
},
"euclidean": {
"accuracy": 0.87,
"accuracy_threshold": 0.24560842630837865,
"ap": 0.6891864411908866,
"f1": 0.6082191780821918,
"f1_threshold": 0.44416641107506294,
"precision": 0.6342857142857142,
"recall": 0.5842105263157895
},
"evaluation_time": 0.87,
"manhattan": {
"accuracy": 0.87,
"accuracy_threshold": 5.883958101272583,
"ap": 0.685694522746493,
"f1": 0.6052631578947368,
"f1_threshold": 9.884694695472717,
"precision": 0.6052631578947368,
"recall": 0.6052631578947368
},
"max": {
"accuracy": 0.87,
"ap": 0.6891864411908866,
"f1": 0.6098901098901099
}
}
}