File size: 1,548 Bytes
bda9a2d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
{
"dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7",
"mteb_dataset_name": "MassiveIntentClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.6582716879623403,
"accuracy_stderr": 0.010244296914036263,
"f1": 0.6208842888158964,
"f1_stderr": 0.00941057589534994,
"main_score": 0.6582716879623403
},
"evaluation_time": 510.04,
"nb": {
"accuracy": 0.5757229320780095,
"accuracy_stderr": 0.014698899958770613,
"f1": 0.5479101324305733,
"f1_stderr": 0.009961334138223912,
"main_score": 0.5757229320780095
},
"sv": {
"accuracy": 0.5539340954942839,
"accuracy_stderr": 0.009796522813182882,
"f1": 0.5246392360657182,
"f1_stderr": 0.008644343226026373,
"main_score": 0.5539340954942839
}
},
"validation": {
"da": {
"accuracy": 0.6685686178061978,
"accuracy_stderr": 0.013267742327391455,
"f1": 0.6308335621455501,
"f1_stderr": 0.008900225826546147,
"main_score": 0.6685686178061978
},
"evaluation_time": 391.49,
"nb": {
"accuracy": 0.5822429906542056,
"accuracy_stderr": 0.014107810377474118,
"f1": 0.5511312858964658,
"f1_stderr": 0.00937744462621781,
"main_score": 0.5822429906542056
},
"sv": {
"accuracy": 0.5525823905558288,
"accuracy_stderr": 0.013344838727135107,
"f1": 0.5223199767067308,
"f1_stderr": 0.006240762591723962,
"main_score": 0.5525823905558288
}
}
} |