results / results /LASER2 /MTOPIntentClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
1.73 kB
{
"mteb_version": "0.0.2",
"test": {
"de": {
"accuracy": 0.5162299239222317,
"accuracy_stderr": 0.01832258118122447,
"f1": 0.3256925158951781,
"f1_stderr": 0.006031602034364266,
"main_score": 0.5162299239222317
},
"en": {
"accuracy": 0.49473324213406294,
"accuracy_stderr": 0.026360858836492213,
"f1": 0.3300910657296617,
"f1_stderr": 0.012151253776376927,
"main_score": 0.49473324213406294
},
"es": {
"accuracy": 0.5274516344229486,
"accuracy_stderr": 0.01871901414866834,
"f1": 0.33394567108321266,
"f1_stderr": 0.012228976278261429,
"main_score": 0.5274516344229486
},
"evaluation_time": 288.21,
"fr": {
"accuracy": 0.5011901033510806,
"accuracy_stderr": 0.02349600059161113,
"f1": 0.33814182942295407,
"f1_stderr": 0.008555560889430382,
"main_score": 0.5011901033510806
},
"hi": {
"accuracy": 0.45546790964503403,
"accuracy_stderr": 0.019083595788646334,
"f1": 0.27716607594942344,
"f1_stderr": 0.008465873041149053,
"main_score": 0.45546790964503403
},
"th": {
"accuracy": 0.5006871609403254,
"accuracy_stderr": 0.032474576694908,
"f1": 0.3475254801351875,
"f1_stderr": 0.01064830577932404,
"main_score": 0.5006871609403254
}
},
"mteb_dataset_name": "MTOPIntentClassification",
"dataset_revision": "6299947a7777084cc2d4b64235bf7190381ce755"
}