Muennighoff's picture
Results for Chinese tasks (#9)
c3d2939
raw history blame
No virus
1.42 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Ocnli",
"mteb_version": "1.0.2",
"validation": {
"cos_sim": {
"accuracy": 0.7260422306442881,
"accuracy_threshold": 0.8360525369644165,
"ap": 0.7574802077799594,
"f1": 0.7463369963369962,
"f1_threshold": 0.8120589256286621,
"precision": 0.6588520614389652,
"recall": 0.8606124604012672
},
"dot": {
"accuracy": 0.7260422306442881,
"accuracy_threshold": 0.8360525369644165,
"ap": 0.7574802077799594,
"f1": 0.7463369963369962,
"f1_threshold": 0.812059223651886,
"precision": 0.6588520614389652,
"recall": 0.8606124604012672
},
"euclidean": {
"accuracy": 0.7260422306442881,
"accuracy_threshold": 0.5726211071014404,
"ap": 0.7574802077799594,
"f1": 0.7463369963369962,
"f1_threshold": 0.6130922436714172,
"precision": 0.6588520614389652,
"recall": 0.8606124604012672
},
"evaluation_time": 6.36,
"manhattan": {
"accuracy": 0.7244179750947483,
"accuracy_threshold": 14.590071678161621,
"ap": 0.7572205600034598,
"f1": 0.745578231292517,
"f1_threshold": 15.700162887573242,
"precision": 0.6534181240063593,
"recall": 0.8680042238648363
},
"max": {
"accuracy": 0.7260422306442881,
"ap": 0.7574802077799594,
"f1": 0.7463369963369962
}
}
}