Muennighoff's picture
Results for Chinese tasks (#9)
c3d2939
raw history blame
No virus
1.38 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Ocnli",
"mteb_version": "1.0.2",
"validation": {
"cos_sim": {
"accuracy": 0.6930157011369789,
"accuracy_threshold": 0.8185935616493225,
"ap": 0.7137320105502852,
"f1": 0.7221499783268314,
"f1_threshold": 0.7772367000579834,
"precision": 0.6125,
"recall": 0.8796198521647307
},
"dot": {
"accuracy": 0.6930157011369789,
"accuracy_threshold": 0.818623423576355,
"ap": 0.7137314740206417,
"f1": 0.7221499783268314,
"f1_threshold": 0.7772367596626282,
"precision": 0.6125,
"recall": 0.8796198521647307
},
"euclidean": {
"accuracy": 0.6930157011369789,
"accuracy_threshold": 0.602339506149292,
"ap": 0.7137320105502853,
"f1": 0.7221499783268314,
"f1_threshold": 0.6674777269363403,
"precision": 0.6125,
"recall": 0.8796198521647307
},
"evaluation_time": 6.71,
"manhattan": {
"accuracy": 0.6978884677855983,
"accuracy_threshold": 15.341981887817383,
"ap": 0.713408096083143,
"f1": 0.7211864406779661,
"f1_threshold": 17.21076202392578,
"precision": 0.6022646850672329,
"recall": 0.8986272439281943
},
"max": {
"accuracy": 0.6978884677855983,
"ap": 0.7137320105502853,
"f1": 0.7221499783268314
}
}
}