{ "dataset_revision": "7d571f92784cd94a019292a1f45445077d0ef634", "mteb_dataset_name": "MassiveScenarioClassification", "mteb_version": "1.0.3.dev0", "test": { "da": { "accuracy": 0.674646940147949, "accuracy_stderr": 0.014803445590728282, "f1": 0.6595147279291771, "f1_stderr": 0.01678996773478074, "main_score": 0.674646940147949 }, "evaluation_time": 90.59, "nb": { "accuracy": 0.6618359112306658, "accuracy_stderr": 0.021329357261303417, "f1": 0.6474591310607613, "f1_stderr": 0.021072648258161693, "main_score": 0.6618359112306658 }, "sv": { "accuracy": 0.6914593140551446, "accuracy_stderr": 0.016069111837315294, "f1": 0.6743841787551895, "f1_stderr": 0.017101538379354537, "main_score": 0.6914593140551446 } }, "validation": { "da": { "accuracy": 0.6642892277422527, "accuracy_stderr": 0.017198009075793542, "f1": 0.6477028873642966, "f1_stderr": 0.016473026403322006, "main_score": 0.6642892277422527 }, "evaluation_time": 70.62, "nb": { "accuracy": 0.6570093457943926, "accuracy_stderr": 0.016090018357941563, "f1": 0.6424507827687334, "f1_stderr": 0.014730759355239376, "main_score": 0.6570093457943926 }, "sv": { "accuracy": 0.6801770782095425, "accuracy_stderr": 0.01743074477898866, "f1": 0.6624692361676364, "f1_stderr": 0.016467522961264077, "main_score": 0.6801770782095425 } } }