|
{ |
|
"test": { |
|
"de": { |
|
"accuracy": 0.4506903353057199, |
|
"accuracy_stderr": 0.013378118540701325, |
|
"f1": 0.30468468273374966, |
|
"f1_stderr": 0.008995351448063477, |
|
"main_score": 0.4506903353057199 |
|
}, |
|
"en": { |
|
"accuracy": 0.582421340629275, |
|
"accuracy_stderr": 0.006780598290184062, |
|
"f1": 0.40116960466226426, |
|
"f1_stderr": 0.009453547895794426, |
|
"main_score": 0.582421340629275 |
|
}, |
|
"es": { |
|
"accuracy": 0.4880920613742495, |
|
"accuracy_stderr": 0.019450645284678516, |
|
"f1": 0.3265985375400447, |
|
"f1_stderr": 0.012041416895760264, |
|
"main_score": 0.4880920613742495 |
|
}, |
|
"evaluation_time": 323.58, |
|
"fr": { |
|
"accuracy": 0.4433761352959599, |
|
"accuracy_stderr": 0.014015898928183473, |
|
"f1": 0.2930204743560644, |
|
"f1_stderr": 0.015391435602219726, |
|
"main_score": 0.4433761352959599 |
|
}, |
|
"hi": { |
|
"accuracy": 0.34198637504481894, |
|
"accuracy_stderr": 0.015449199463511821, |
|
"f1": 0.2206370603224841, |
|
"f1_stderr": 0.004165766179716482, |
|
"main_score": 0.34198637504481894 |
|
}, |
|
"th": { |
|
"accuracy": 0.4311030741410488, |
|
"accuracy_stderr": 0.012756015977250203, |
|
"f1": 0.2692408933648504, |
|
"f1_stderr": 0.006892806533674313, |
|
"main_score": 0.4311030741410488 |
|
} |
|
}, |
|
"validation": { |
|
"de": { |
|
"accuracy": 0.445564738292011, |
|
"accuracy_stderr": 0.010360617483778395, |
|
"f1": 0.27459727929412353, |
|
"f1_stderr": 0.0081800587643621, |
|
"main_score": 0.445564738292011 |
|
}, |
|
"en": { |
|
"accuracy": 0.5970469798657719, |
|
"accuracy_stderr": 0.012933976952608941, |
|
"f1": 0.40484173794377465, |
|
"f1_stderr": 0.014119500413989615, |
|
"main_score": 0.5970469798657719 |
|
}, |
|
"es": { |
|
"accuracy": 0.49502292075965953, |
|
"accuracy_stderr": 0.02246998023954115, |
|
"f1": 0.3153422553690758, |
|
"f1_stderr": 0.015186039000464618, |
|
"main_score": 0.49502292075965953 |
|
}, |
|
"evaluation_time": 249.98, |
|
"fr": { |
|
"accuracy": 0.4402663284717819, |
|
"accuracy_stderr": 0.016184359520079704, |
|
"f1": 0.26128143579041774, |
|
"f1_stderr": 0.010001476828499637, |
|
"main_score": 0.4402663284717819 |
|
}, |
|
"hi": { |
|
"accuracy": 0.3382703777335984, |
|
"accuracy_stderr": 0.012219364561758182, |
|
"f1": 0.22964605768066088, |
|
"f1_stderr": 0.006215891228964558, |
|
"main_score": 0.3382703777335984 |
|
}, |
|
"th": { |
|
"accuracy": 0.416098144823459, |
|
"accuracy_stderr": 0.014224978455212227, |
|
"f1": 0.25381438383568683, |
|
"f1_stderr": 0.015695743183157283, |
|
"main_score": 0.416098144823459 |
|
} |
|
}, |
|
"mteb_version": "0.0.2", |
|
"mteb_dataset_name": "MTOPIntentClassification", |
|
"dataset_revision": "6299947a7777084cc2d4b64235bf7190381ce755" |
|
} |