{ "dataset_revision": "31efe3c427b0bae9c22cbb560b8f15491cc6bed7", "mteb_dataset_name": "MassiveIntentClassification", "mteb_version": "1.0.3.dev0", "test": { "da": { "accuracy": 0.06513113651647613, "accuracy_stderr": 0.011214798579686689, "f1": 0.05043467599564514, "f1_stderr": 0.0035046004441874334, "main_score": 0.06513113651647613 }, "evaluation_time": 61.37, "nb": { "accuracy": 0.05655682582380632, "accuracy_stderr": 0.006670594050204399, "f1": 0.04546233598378012, "f1_stderr": 0.004960950317012453, "main_score": 0.05655682582380632 }, "sv": { "accuracy": 0.0660053799596503, "accuracy_stderr": 0.005903130694545474, "f1": 0.05218747326664214, "f1_stderr": 0.0038648945970980028, "main_score": 0.0660053799596503 } }, "validation": { "da": { "accuracy": 0.06182980816527299, "accuracy_stderr": 0.010147721345964106, "f1": 0.04927816549444951, "f1_stderr": 0.004759922244444279, "main_score": 0.06182980816527299 }, "evaluation_time": 54.24, "nb": { "accuracy": 0.053762911952779144, "accuracy_stderr": 0.005210488030150068, "f1": 0.042860707318464905, "f1_stderr": 0.003291170636180566, "main_score": 0.053762911952779144 }, "sv": { "accuracy": 0.058878504672897194, "accuracy_stderr": 0.006436165232467847, "f1": 0.04559030545362343, "f1_stderr": 0.00495909981017614, "main_score": 0.058878504672897194 } } }