results / results /all-MiniLM-L12-v2 /MTOPDomainClassification.json
Muennighoff's picture
Update results
4951470
raw history blame
No virus
1.73 kB
{
"mteb_version": "0.0.2",
"test": {
"de": {
"accuracy": 0.7203719357565511,
"accuracy_stderr": 0.017337505013513325,
"f1": 0.6875742308679864,
"f1_stderr": 0.014284616988778162,
"main_score": 0.7203719357565511
},
"en": {
"accuracy": 0.9189922480620154,
"accuracy_stderr": 0.006589936160622735,
"f1": 0.9166762682851963,
"f1_stderr": 0.00584750568408716,
"main_score": 0.9189922480620154
},
"es": {
"accuracy": 0.7298532354903269,
"accuracy_stderr": 0.012275982849359837,
"f1": 0.7133173021994275,
"f1_stderr": 0.01121863286817087,
"main_score": 0.7298532354903269
},
"evaluation_time": 14.77,
"fr": {
"accuracy": 0.7559348575007829,
"accuracy_stderr": 0.017000528031869893,
"f1": 0.731511918522243,
"f1_stderr": 0.017328040962177056,
"main_score": 0.7559348575007829
},
"hi": {
"accuracy": 0.4036213696665471,
"accuracy_stderr": 0.017838466256713908,
"f1": 0.37865703085609476,
"f1_stderr": 0.020691579244820135,
"main_score": 0.4036213696665471
},
"th": {
"accuracy": 0.17099457504520796,
"accuracy_stderr": 0.02212353056791036,
"f1": 0.1286835498185132,
"f1_stderr": 0.018049344134980553,
"main_score": 0.17099457504520796
}
},
"mteb_dataset_name": "MTOPDomainClassification",
"dataset_revision": "a7e2a951126a26fc8c6a69f835f33a346ba259e3"
}