results / results /sentence-t5-base /MTOPIntentClassification.json
Muennighoff's picture
Add res
653fa27
raw history blame
No virus
3.25 kB
{
"test": {
"de": {
"accuracy": 0.4442659904198366,
"accuracy_stderr": 0.03453672489769159,
"f1": 0.27205947872504427,
"f1_stderr": 0.011785737058608074,
"main_score": 0.4442659904198366
},
"en": {
"accuracy": 0.6332421340629275,
"accuracy_stderr": 0.01838496766452937,
"f1": 0.45423410630279565,
"f1_stderr": 0.01118389186843692,
"main_score": 0.6332421340629275
},
"es": {
"accuracy": 0.42028018679119417,
"accuracy_stderr": 0.02010707425549429,
"f1": 0.26314909946795734,
"f1_stderr": 0.007043257657063301,
"main_score": 0.42028018679119417
},
"evaluation_time": 150.84,
"fr": {
"accuracy": 0.4384591293454432,
"accuracy_stderr": 0.031411075230798224,
"f1": 0.2951970197285979,
"f1_stderr": 0.007558160171831519,
"main_score": 0.4384591293454432
},
"hi": {
"accuracy": 0.0380064539261384,
"accuracy_stderr": 0.013026821172165614,
"f1": 0.012078686392462627,
"f1_stderr": 0.002149936335979169,
"main_score": 0.0380064539261384
},
"th": {
"accuracy": 0.05207956600361664,
"accuracy_stderr": 0.020509868902362137,
"f1": 0.015365513001536745,
"f1_stderr": 0.0055953771272099105,
"main_score": 0.05207956600361664
}
},
"validation": {
"de": {
"accuracy": 0.43322314049586785,
"accuracy_stderr": 0.02997371776616617,
"f1": 0.26791591448242374,
"f1_stderr": 0.013145599165313177,
"main_score": 0.43322314049586785
},
"en": {
"accuracy": 0.6403579418344519,
"accuracy_stderr": 0.025397795590561045,
"f1": 0.4416019115448817,
"f1_stderr": 0.019832587980014974,
"main_score": 0.6403579418344519
},
"es": {
"accuracy": 0.43922724296005244,
"accuracy_stderr": 0.01993683976817167,
"f1": 0.2923717807147227,
"f1_stderr": 0.008647128206056783,
"main_score": 0.43922724296005244
},
"evaluation_time": 125.75,
"fr": {
"accuracy": 0.45237793278376665,
"accuracy_stderr": 0.02891899990754128,
"f1": 0.299594801272391,
"f1_stderr": 0.008468453818834888,
"main_score": 0.45237793278376665
},
"hi": {
"accuracy": 0.038270377733598406,
"accuracy_stderr": 0.012784088601012149,
"f1": 0.014791835779179993,
"f1_stderr": 0.003084613708010073,
"main_score": 0.038270377733598406
},
"th": {
"accuracy": 0.047995212447636146,
"accuracy_stderr": 0.021991169835235275,
"f1": 0.016206479903567383,
"f1_stderr": 0.004085152165027258,
"main_score": 0.047995212447636146
}
},
"dataset_version": null,
"mteb_version": "0.0.2"
}