|
{ |
|
"dataset_revision": "9e9b1f8ef51616073f47f306f7f47dd91663f86a", |
|
"mteb_dataset_name": "OpusparcusPC", |
|
"mteb_version": "1.1.3.dev0", |
|
"test.full": { |
|
"evaluation_time": 51.2, |
|
"fr": { |
|
"cos_sim": { |
|
"accuracy": 0.832425068119891, |
|
"accuracy_threshold": 0.7467960817697848, |
|
"ap": 0.940778214081015, |
|
"f1": 0.8835489833641405, |
|
"f1_threshold": 0.7271200044325132, |
|
"precision": 0.8262748487467588, |
|
"recall": 0.9493545183714002 |
|
}, |
|
"dot": { |
|
"accuracy": 0.832425068119891, |
|
"accuracy_threshold": 0.7490443912839898, |
|
"ap": 0.9408199694181452, |
|
"f1": 0.8836565096952909, |
|
"f1_threshold": 0.7262316370031499, |
|
"precision": 0.825711820534944, |
|
"recall": 0.9503475670307845 |
|
}, |
|
"euclidean": { |
|
"accuracy": 0.832425068119891, |
|
"accuracy_threshold": 0.7116955506512509, |
|
"ap": 0.9407813925573325, |
|
"f1": 0.8835489833641405, |
|
"f1_threshold": 0.7386573389787766, |
|
"precision": 0.8262748487467588, |
|
"recall": 0.9493545183714002 |
|
}, |
|
"manhattan": { |
|
"accuracy": 0.829700272479564, |
|
"accuracy_threshold": 18.253194524231304, |
|
"ap": 0.9405161031929276, |
|
"f1": 0.8843663274745606, |
|
"f1_threshold": 18.437848682206912, |
|
"precision": 0.8277056277056277, |
|
"recall": 0.9493545183714002 |
|
}, |
|
"max": { |
|
"accuracy": 0.832425068119891, |
|
"ap": 0.9408199694181452, |
|
"f1": 0.8843663274745606 |
|
} |
|
} |
|
}, |
|
"validation.full": { |
|
"evaluation_time": 54.26, |
|
"fr": { |
|
"cos_sim": { |
|
"accuracy": 0.834045584045584, |
|
"accuracy_threshold": 0.7575474947358551, |
|
"ap": 0.9503580631772154, |
|
"f1": 0.8873981792046, |
|
"f1_threshold": 0.7489177144506245, |
|
"precision": 0.8495412844036697, |
|
"recall": 0.9287863590772317 |
|
}, |
|
"dot": { |
|
"accuracy": 0.8333333333333334, |
|
"accuracy_threshold": 0.761604523355961, |
|
"ap": 0.9503465979888317, |
|
"f1": 0.8872109485606419, |
|
"f1_threshold": 0.7405164688069064, |
|
"precision": 0.8377896613190731, |
|
"recall": 0.9428284854563691 |
|
}, |
|
"euclidean": { |
|
"accuracy": 0.834045584045584, |
|
"accuracy_threshold": 0.6963168738126557, |
|
"ap": 0.9503631394140197, |
|
"f1": 0.8873981792046, |
|
"f1_threshold": 0.7086247457212693, |
|
"precision": 0.8495412844036697, |
|
"recall": 0.9287863590772317 |
|
}, |
|
"manhattan": { |
|
"accuracy": 0.8354700854700855, |
|
"accuracy_threshold": 17.565671292860813, |
|
"ap": 0.9506502752461305, |
|
"f1": 0.8887818969667791, |
|
"f1_threshold": 17.565671292860813, |
|
"precision": 0.8546296296296296, |
|
"recall": 0.925777331995988 |
|
}, |
|
"max": { |
|
"accuracy": 0.8354700854700855, |
|
"ap": 0.9506502752461305, |
|
"f1": 0.8887818969667791 |
|
} |
|
} |
|
} |
|
} |