results / results /dfm-sentence-encoder-large-1 /MassiveIntentClassification.json
Muennighoff's picture
Add nordic results
bda9a2d
raw history blame
No virus
1.55 kB
{
"dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7",
"mteb_dataset_name": "MassiveIntentClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.6582716879623403,
"accuracy_stderr": 0.010244296914036263,
"f1": 0.6208842888158964,
"f1_stderr": 0.00941057589534994,
"main_score": 0.6582716879623403
},
"evaluation_time": 510.04,
"nb": {
"accuracy": 0.5757229320780095,
"accuracy_stderr": 0.014698899958770613,
"f1": 0.5479101324305733,
"f1_stderr": 0.009961334138223912,
"main_score": 0.5757229320780095
},
"sv": {
"accuracy": 0.5539340954942839,
"accuracy_stderr": 0.009796522813182882,
"f1": 0.5246392360657182,
"f1_stderr": 0.008644343226026373,
"main_score": 0.5539340954942839
}
},
"validation": {
"da": {
"accuracy": 0.6685686178061978,
"accuracy_stderr": 0.013267742327391455,
"f1": 0.6308335621455501,
"f1_stderr": 0.008900225826546147,
"main_score": 0.6685686178061978
},
"evaluation_time": 391.49,
"nb": {
"accuracy": 0.5822429906542056,
"accuracy_stderr": 0.014107810377474118,
"f1": 0.5511312858964658,
"f1_stderr": 0.00937744462621781,
"main_score": 0.5822429906542056
},
"sv": {
"accuracy": 0.5525823905558288,
"accuracy_stderr": 0.013344838727135107,
"f1": 0.5223199767067308,
"f1_stderr": 0.006240762591723962,
"main_score": 0.5525823905558288
}
}
}