results / results /m3e-base /Cmnli.json
Muennighoff's picture
Results for Chinese tasks (#9)
c3d2939
raw history blame
No virus
1.41 kB
{
"dataset_revision": null,
"mteb_dataset_name": "Cmnli",
"mteb_version": "1.0.2",
"validation": {
"cos_sim": {
"accuracy": 0.6449789536981358,
"accuracy_threshold": 0.8475901484489441,
"ap": 0.699815598339927,
"f1": 0.6958855098389982,
"f1_threshold": 0.7894073724746704,
"precision": 0.5635231058959872,
"recall": 0.9095160158989947
},
"dot": {
"accuracy": 0.6347564642212868,
"accuracy_threshold": 297.91455078125,
"ap": 0.6772785823549499,
"f1": 0.6926649723950573,
"f1_threshold": 275.12353515625,
"precision": 0.5539669189795347,
"recall": 0.9240121580547113
},
"euclidean": {
"accuracy": 0.6446181599518942,
"accuracy_threshold": 10.134183883666992,
"ap": 0.6999514063509157,
"f1": 0.6959758907995037,
"f1_threshold": 12.295406341552734,
"precision": 0.5604568165596003,
"recall": 0.9179331306990881
},
"evaluation_time": 9.52,
"manhattan": {
"accuracy": 0.645219482862297,
"accuracy_threshold": 229.61032104492188,
"ap": 0.6998282684459426,
"f1": 0.6956289504139589,
"f1_threshold": 270.831298828125,
"precision": 0.5616733755031628,
"recall": 0.9134907645545943
},
"max": {
"accuracy": 0.645219482862297,
"ap": 0.6999514063509157,
"f1": 0.6959758907995037
}
}
}