|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2986348122866894, |
|
"acc_stderr": 0.013374078615068756, |
|
"acc_norm": 0.34897610921501704, |
|
"acc_norm_stderr": 0.013928933461382497 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4010157339175463, |
|
"acc_stderr": 0.004891025533633027, |
|
"acc_norm": 0.527185819557857, |
|
"acc_norm_stderr": 0.004982400368939667 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.30994152046783624, |
|
"acc_stderr": 0.03546976959393161, |
|
"acc_norm": 0.30994152046783624, |
|
"acc_norm_stderr": 0.03546976959393161 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.18446601941747573, |
|
"acc_stderr": 0.03840423627288276, |
|
"acc_norm": 0.18446601941747573, |
|
"acc_norm_stderr": 0.03840423627288276 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.26947637292464877, |
|
"acc_stderr": 0.01586624307321506, |
|
"acc_norm": 0.26947637292464877, |
|
"acc_norm_stderr": 0.01586624307321506 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.34074074074074073, |
|
"acc_stderr": 0.040943762699967946, |
|
"acc_norm": 0.34074074074074073, |
|
"acc_norm_stderr": 0.040943762699967946 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621503, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621503 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.24680851063829787, |
|
"acc_stderr": 0.0281854413012341, |
|
"acc_norm": 0.24680851063829787, |
|
"acc_norm_stderr": 0.0281854413012341 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.24096385542168675, |
|
"acc_stderr": 0.033293941190735296, |
|
"acc_norm": 0.24096385542168675, |
|
"acc_norm_stderr": 0.033293941190735296 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3183279742765273, |
|
"acc_stderr": 0.026457225067811025, |
|
"acc_norm": 0.3183279742765273, |
|
"acc_norm_stderr": 0.026457225067811025 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.19730941704035873, |
|
"acc_stderr": 0.02670985334496796, |
|
"acc_norm": 0.19730941704035873, |
|
"acc_norm_stderr": 0.02670985334496796 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3816793893129771, |
|
"acc_stderr": 0.0426073515764456, |
|
"acc_norm": 0.3816793893129771, |
|
"acc_norm_stderr": 0.0426073515764456 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.25252525252525254, |
|
"acc_stderr": 0.030954055470365907, |
|
"acc_norm": 0.25252525252525254, |
|
"acc_norm_stderr": 0.030954055470365907 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2896551724137931, |
|
"acc_stderr": 0.03780019230438015, |
|
"acc_norm": 0.2896551724137931, |
|
"acc_norm_stderr": 0.03780019230438015 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.04220773659171451, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.04220773659171451 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.2184873949579832, |
|
"acc_stderr": 0.02684151432295893, |
|
"acc_norm": 0.2184873949579832, |
|
"acc_norm_stderr": 0.02684151432295893 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2128205128205128, |
|
"acc_stderr": 0.020752423722128002, |
|
"acc_norm": 0.2128205128205128, |
|
"acc_norm_stderr": 0.020752423722128002 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.04236511258094631, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.04236511258094631 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2955665024630542, |
|
"acc_stderr": 0.032104944337514575, |
|
"acc_norm": 0.2955665024630542, |
|
"acc_norm_stderr": 0.032104944337514575 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.267741935483871, |
|
"acc_stderr": 0.025189006660212385, |
|
"acc_norm": 0.267741935483871, |
|
"acc_norm_stderr": 0.025189006660212385 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.2606837606837607, |
|
"acc_stderr": 0.02876034895652341, |
|
"acc_norm": 0.2606837606837607, |
|
"acc_norm_stderr": 0.02876034895652341 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.22264150943396227, |
|
"acc_stderr": 0.025604233470899105, |
|
"acc_norm": 0.22264150943396227, |
|
"acc_norm_stderr": 0.025604233470899105 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.24545454545454545, |
|
"acc_stderr": 0.04122066502878285, |
|
"acc_norm": 0.24545454545454545, |
|
"acc_norm_stderr": 0.04122066502878285 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25555555555555554, |
|
"acc_stderr": 0.02659393910184408, |
|
"acc_norm": 0.25555555555555554, |
|
"acc_norm_stderr": 0.02659393910184408 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2913907284768212, |
|
"acc_stderr": 0.03710185726119995, |
|
"acc_norm": 0.2913907284768212, |
|
"acc_norm_stderr": 0.03710185726119995 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.20398009950248755, |
|
"acc_stderr": 0.02849317624532609, |
|
"acc_norm": 0.20398009950248755, |
|
"acc_norm_stderr": 0.02849317624532609 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.23121387283236994, |
|
"acc_stderr": 0.03214737302029469, |
|
"acc_norm": 0.23121387283236994, |
|
"acc_norm_stderr": 0.03214737302029469 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2857142857142857, |
|
"acc_stderr": 0.023266512213730575, |
|
"acc_norm": 0.2857142857142857, |
|
"acc_norm_stderr": 0.023266512213730575 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.034765901043041336, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.034765901043041336 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.16, |
|
"acc_stderr": 0.036845294917747094, |
|
"acc_norm": 0.16, |
|
"acc_norm_stderr": 0.036845294917747094 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421276, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421276 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.26011560693641617, |
|
"acc_stderr": 0.02361867831006937, |
|
"acc_norm": 0.26011560693641617, |
|
"acc_norm_stderr": 0.02361867831006937 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3067484662576687, |
|
"acc_stderr": 0.036230899157241474, |
|
"acc_norm": 0.3067484662576687, |
|
"acc_norm_stderr": 0.036230899157241474 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.2839506172839506, |
|
"acc_stderr": 0.02508947852376513, |
|
"acc_norm": 0.2839506172839506, |
|
"acc_norm_stderr": 0.02508947852376513 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.23834196891191708, |
|
"acc_stderr": 0.030748905363909895, |
|
"acc_norm": 0.23834196891191708, |
|
"acc_norm_stderr": 0.030748905363909895 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.03999423879281336, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.03999423879281336 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.24587155963302754, |
|
"acc_stderr": 0.018461940968708457, |
|
"acc_norm": 0.24587155963302754, |
|
"acc_norm_stderr": 0.018461940968708457 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.18253968253968253, |
|
"acc_stderr": 0.03455071019102148, |
|
"acc_norm": 0.18253968253968253, |
|
"acc_norm_stderr": 0.03455071019102148 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.25163398692810457, |
|
"acc_stderr": 0.024848018263875195, |
|
"acc_norm": 0.25163398692810457, |
|
"acc_norm_stderr": 0.024848018263875195 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816506, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816506 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.33884297520661155, |
|
"acc_stderr": 0.04320767807536669, |
|
"acc_norm": 0.33884297520661155, |
|
"acc_norm_stderr": 0.04320767807536669 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.2565789473684211, |
|
"acc_stderr": 0.0355418036802569, |
|
"acc_norm": 0.2565789473684211, |
|
"acc_norm_stderr": 0.0355418036802569 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.25980392156862747, |
|
"acc_stderr": 0.017740899509177788, |
|
"acc_norm": 0.25980392156862747, |
|
"acc_norm_stderr": 0.017740899509177788 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.19148936170212766, |
|
"acc_stderr": 0.023472645247949425, |
|
"acc_norm": 0.19148936170212766, |
|
"acc_norm_stderr": 0.023472645247949425 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.3392857142857143, |
|
"acc_stderr": 0.0449394906861354, |
|
"acc_norm": 0.3392857142857143, |
|
"acc_norm_stderr": 0.0449394906861354 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.20833333333333334, |
|
"acc_stderr": 0.027696910713093936, |
|
"acc_norm": 0.20833333333333334, |
|
"acc_norm_stderr": 0.027696910713093936 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24581005586592178, |
|
"acc_stderr": 0.014400296429225606, |
|
"acc_norm": 0.24581005586592178, |
|
"acc_norm_stderr": 0.014400296429225606 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720684, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720684 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.2757352941176471, |
|
"acc_stderr": 0.027146271936625162, |
|
"acc_norm": 0.2757352941176471, |
|
"acc_norm_stderr": 0.027146271936625162 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.24489795918367346, |
|
"acc_stderr": 0.027529637440174934, |
|
"acc_norm": 0.24489795918367346, |
|
"acc_norm_stderr": 0.027529637440174934 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.28270042194092826, |
|
"acc_stderr": 0.029312814153955914, |
|
"acc_norm": 0.28270042194092826, |
|
"acc_norm_stderr": 0.029312814153955914 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2542372881355932, |
|
"acc_stderr": 0.011121129007840664, |
|
"acc_norm": 0.2542372881355932, |
|
"acc_norm_stderr": 0.011121129007840664 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.24509803921568626, |
|
"acc_stderr": 0.03019028245350195, |
|
"acc_norm": 0.24509803921568626, |
|
"acc_norm_stderr": 0.03019028245350195 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.21818181818181817, |
|
"acc_stderr": 0.03225078108306289, |
|
"acc_norm": 0.21818181818181817, |
|
"acc_norm_stderr": 0.03225078108306289 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2839657282741738, |
|
"mc1_stderr": 0.01578537085839671, |
|
"mc2": 0.4444330897605926, |
|
"mc2_stderr": 0.015483222855074748 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5176056338028169, |
|
"acc_stderr": 0.017129150724246804, |
|
"acc_norm": 0.5727699530516432, |
|
"acc_norm_stderr": 0.016957281432341424 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "lcw99/polyglot-ko-12.8b-chang-instruct-chat", |
|
"model_sha": "a16de096eb135e66b90314e5ab84116c9f0f9d1b", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |