results / results /DanskBERT /MassiveIntentClassification.json
Muennighoff's picture
Add nordic results
bda9a2d
raw history blame
No virus
1.56 kB
{
"dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7",
"mteb_dataset_name": "MassiveIntentClassification",
"mteb_version": "1.0.3.dev0",
"test": {
"da": {
"accuracy": 0.5468392737054472,
"accuracy_stderr": 0.011536817088604236,
"f1": 0.5039000994078626,
"f1_stderr": 0.006389390451655079,
"main_score": 0.5468392737054472
},
"evaluation_time": 207.42,
"nb": {
"accuracy": 0.45379959650302626,
"accuracy_stderr": 0.012549015365232539,
"f1": 0.4271217484445252,
"f1_stderr": 0.011229470448672847,
"main_score": 0.45379959650302626
},
"sv": {
"accuracy": 0.40817081371889713,
"accuracy_stderr": 0.006134927155520256,
"f1": 0.3841954212578219,
"f1_stderr": 0.008971093419384164,
"main_score": 0.40817081371889713
}
},
"validation": {
"da": {
"accuracy": 0.5582390555828824,
"accuracy_stderr": 0.011006445314683956,
"f1": 0.5151428356070841,
"f1_stderr": 0.008861586598311164,
"main_score": 0.5582390555828824
},
"evaluation_time": 189.86,
"nb": {
"accuracy": 0.46242006886374815,
"accuracy_stderr": 0.013734839963464364,
"f1": 0.43005970598528265,
"f1_stderr": 0.013601150407790828,
"main_score": 0.46242006886374815
},
"sv": {
"accuracy": 0.4048696507624201,
"accuracy_stderr": 0.009431994322367262,
"f1": 0.38552685366886996,
"f1_stderr": 0.007369118783426967,
"main_score": 0.4048696507624201
}
}
}