|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.31399317406143346, |
|
"acc_stderr": 0.013562691224726284, |
|
"acc_norm": 0.378839590443686, |
|
"acc_norm_stderr": 0.014175915490000322 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3615813582951603, |
|
"acc_stderr": 0.00479476484368527, |
|
"acc_norm": 0.4645488946425015, |
|
"acc_norm_stderr": 0.004977223485342025 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4269005847953216, |
|
"acc_stderr": 0.03793620616529918, |
|
"acc_norm": 0.4269005847953216, |
|
"acc_norm_stderr": 0.03793620616529918 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.39805825242718446, |
|
"acc_stderr": 0.0484674825397724, |
|
"acc_norm": 0.39805825242718446, |
|
"acc_norm_stderr": 0.0484674825397724 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5006385696040868, |
|
"acc_stderr": 0.01787994891443169, |
|
"acc_norm": 0.5006385696040868, |
|
"acc_norm_stderr": 0.01787994891443169 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.042320736951515885, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.042320736951515885 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.39148936170212767, |
|
"acc_stderr": 0.03190701242326812, |
|
"acc_norm": 0.39148936170212767, |
|
"acc_norm_stderr": 0.03190701242326812 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3313253012048193, |
|
"acc_stderr": 0.03664314777288087, |
|
"acc_norm": 0.3313253012048193, |
|
"acc_norm_stderr": 0.03664314777288087 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4662379421221865, |
|
"acc_stderr": 0.028333277109562793, |
|
"acc_norm": 0.4662379421221865, |
|
"acc_norm_stderr": 0.028333277109562793 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.3991031390134529, |
|
"acc_stderr": 0.03286745312567961, |
|
"acc_norm": 0.3991031390134529, |
|
"acc_norm_stderr": 0.03286745312567961 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4351145038167939, |
|
"acc_stderr": 0.04348208051644858, |
|
"acc_norm": 0.4351145038167939, |
|
"acc_norm_stderr": 0.04348208051644858 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.4494949494949495, |
|
"acc_stderr": 0.0354413249194797, |
|
"acc_norm": 0.4494949494949495, |
|
"acc_norm_stderr": 0.0354413249194797 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.42758620689655175, |
|
"acc_stderr": 0.04122737111370333, |
|
"acc_norm": 0.42758620689655175, |
|
"acc_norm_stderr": 0.04122737111370333 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.19607843137254902, |
|
"acc_stderr": 0.03950581861179962, |
|
"acc_norm": 0.19607843137254902, |
|
"acc_norm_stderr": 0.03950581861179962 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3739495798319328, |
|
"acc_stderr": 0.031429466378837076, |
|
"acc_norm": 0.3739495798319328, |
|
"acc_norm_stderr": 0.031429466378837076 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.35384615384615387, |
|
"acc_stderr": 0.024243783994062188, |
|
"acc_norm": 0.35384615384615387, |
|
"acc_norm_stderr": 0.024243783994062188 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956912, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956912 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4722222222222222, |
|
"acc_stderr": 0.048262172941398944, |
|
"acc_norm": 0.4722222222222222, |
|
"acc_norm_stderr": 0.048262172941398944 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.39901477832512317, |
|
"acc_stderr": 0.03445487686264715, |
|
"acc_norm": 0.39901477832512317, |
|
"acc_norm_stderr": 0.03445487686264715 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.45161290322580644, |
|
"acc_stderr": 0.02831050034856839, |
|
"acc_norm": 0.45161290322580644, |
|
"acc_norm_stderr": 0.02831050034856839 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.5683760683760684, |
|
"acc_stderr": 0.0324483553531149, |
|
"acc_norm": 0.5683760683760684, |
|
"acc_norm_stderr": 0.0324483553531149 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.3849056603773585, |
|
"acc_stderr": 0.029946498567699948, |
|
"acc_norm": 0.3849056603773585, |
|
"acc_norm_stderr": 0.029946498567699948 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.44545454545454544, |
|
"acc_stderr": 0.047605488214603246, |
|
"acc_norm": 0.44545454545454544, |
|
"acc_norm_stderr": 0.047605488214603246 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.24074074074074073, |
|
"acc_stderr": 0.0260671592222758, |
|
"acc_norm": 0.24074074074074073, |
|
"acc_norm_stderr": 0.0260671592222758 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.23841059602649006, |
|
"acc_stderr": 0.03479185572599661, |
|
"acc_norm": 0.23841059602649006, |
|
"acc_norm_stderr": 0.03479185572599661 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5223880597014925, |
|
"acc_stderr": 0.035319879302087305, |
|
"acc_norm": 0.5223880597014925, |
|
"acc_norm_stderr": 0.035319879302087305 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3063583815028902, |
|
"acc_stderr": 0.035149425512674394, |
|
"acc_norm": 0.3063583815028902, |
|
"acc_norm_stderr": 0.035149425512674394 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2671957671957672, |
|
"acc_stderr": 0.022789673145776568, |
|
"acc_norm": 0.2671957671957672, |
|
"acc_norm_stderr": 0.022789673145776568 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3194444444444444, |
|
"acc_stderr": 0.03899073687357336, |
|
"acc_norm": 0.3194444444444444, |
|
"acc_norm_stderr": 0.03899073687357336 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816506, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816506 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.54, |
|
"acc_stderr": 0.05009082659620333, |
|
"acc_norm": 0.54, |
|
"acc_norm_stderr": 0.05009082659620333 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.4161849710982659, |
|
"acc_stderr": 0.026538189104705484, |
|
"acc_norm": 0.4161849710982659, |
|
"acc_norm_stderr": 0.026538189104705484 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3987730061349693, |
|
"acc_stderr": 0.03847021420456023, |
|
"acc_norm": 0.3987730061349693, |
|
"acc_norm_stderr": 0.03847021420456023 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.43209876543209874, |
|
"acc_stderr": 0.027563010971606672, |
|
"acc_norm": 0.43209876543209874, |
|
"acc_norm_stderr": 0.027563010971606672 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.04020151261036845, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.04020151261036845 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.44559585492227977, |
|
"acc_stderr": 0.035870149860756595, |
|
"acc_norm": 0.44559585492227977, |
|
"acc_norm_stderr": 0.035870149860756595 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.32456140350877194, |
|
"acc_stderr": 0.04404556157374768, |
|
"acc_norm": 0.32456140350877194, |
|
"acc_norm_stderr": 0.04404556157374768 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.4091743119266055, |
|
"acc_stderr": 0.02108067026443373, |
|
"acc_norm": 0.4091743119266055, |
|
"acc_norm_stderr": 0.02108067026443373 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2619047619047619, |
|
"acc_stderr": 0.039325376803928704, |
|
"acc_norm": 0.2619047619047619, |
|
"acc_norm_stderr": 0.039325376803928704 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4019607843137255, |
|
"acc_stderr": 0.028074158947600663, |
|
"acc_norm": 0.4019607843137255, |
|
"acc_norm_stderr": 0.028074158947600663 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5950413223140496, |
|
"acc_stderr": 0.04481137755942469, |
|
"acc_norm": 0.5950413223140496, |
|
"acc_norm_stderr": 0.04481137755942469 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.42105263157894735, |
|
"acc_stderr": 0.040179012759817494, |
|
"acc_norm": 0.42105263157894735, |
|
"acc_norm_stderr": 0.040179012759817494 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3366013071895425, |
|
"acc_stderr": 0.01911721391149515, |
|
"acc_norm": 0.3366013071895425, |
|
"acc_norm_stderr": 0.01911721391149515 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3120567375886525, |
|
"acc_stderr": 0.027640120545169938, |
|
"acc_norm": 0.3120567375886525, |
|
"acc_norm_stderr": 0.027640120545169938 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.25892857142857145, |
|
"acc_stderr": 0.041577515398656284, |
|
"acc_norm": 0.25892857142857145, |
|
"acc_norm_stderr": 0.041577515398656284 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.24537037037037038, |
|
"acc_stderr": 0.029346665094372927, |
|
"acc_norm": 0.24537037037037038, |
|
"acc_norm_stderr": 0.029346665094372927 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421276, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421276 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.23897058823529413, |
|
"acc_stderr": 0.025905280644893006, |
|
"acc_norm": 0.23897058823529413, |
|
"acc_norm_stderr": 0.025905280644893006 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.3020408163265306, |
|
"acc_stderr": 0.02939360931987982, |
|
"acc_norm": 0.3020408163265306, |
|
"acc_norm_stderr": 0.02939360931987982 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.4388185654008439, |
|
"acc_stderr": 0.032302649315470375, |
|
"acc_norm": 0.4388185654008439, |
|
"acc_norm_stderr": 0.032302649315470375 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.288135593220339, |
|
"acc_stderr": 0.011567140661324561, |
|
"acc_norm": 0.288135593220339, |
|
"acc_norm_stderr": 0.011567140661324561 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.37254901960784315, |
|
"acc_stderr": 0.03393388584958404, |
|
"acc_norm": 0.37254901960784315, |
|
"acc_norm_stderr": 0.03393388584958404 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.41818181818181815, |
|
"acc_stderr": 0.038517163193983954, |
|
"acc_norm": 0.41818181818181815, |
|
"acc_norm_stderr": 0.038517163193983954 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2558139534883721, |
|
"mc1_stderr": 0.015274176219283345, |
|
"mc2": 0.4196251845895743, |
|
"mc2_stderr": 0.015120881881369678 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4214876033057851, |
|
"acc_stderr": 0.016977101932601518, |
|
"acc_norm": 0.5277449822904369, |
|
"acc_norm_stderr": 0.01716386797945601 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "AIdenU/LLAMA-2-13b-ko-Y24-DPO_v2.1", |
|
"model_sha": "c0f610c27136d4e990134245cb7c9f93e8ceb400", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |