|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2636518771331058, |
|
"acc_stderr": 0.012875929151297065, |
|
"acc_norm": 0.3122866894197952, |
|
"acc_norm_stderr": 0.013542598541688065 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.33100975901214896, |
|
"acc_stderr": 0.004696148339570981, |
|
"acc_norm": 0.4099780920135431, |
|
"acc_norm_stderr": 0.004908241354310212 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.03811079669833531, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.03811079669833531 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.2524271844660194, |
|
"acc_stderr": 0.04301250399690879, |
|
"acc_norm": 0.2524271844660194, |
|
"acc_norm_stderr": 0.04301250399690879 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.36398467432950193, |
|
"acc_stderr": 0.017205684809032232, |
|
"acc_norm": 0.36398467432950193, |
|
"acc_norm_stderr": 0.017205684809032232 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2518518518518518, |
|
"acc_stderr": 0.03749850709174021, |
|
"acc_norm": 0.2518518518518518, |
|
"acc_norm_stderr": 0.03749850709174021 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542127, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.37872340425531914, |
|
"acc_stderr": 0.03170995606040655, |
|
"acc_norm": 0.37872340425531914, |
|
"acc_norm_stderr": 0.03170995606040655 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.25903614457831325, |
|
"acc_stderr": 0.03410646614071857, |
|
"acc_norm": 0.25903614457831325, |
|
"acc_norm_stderr": 0.03410646614071857 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.36012861736334406, |
|
"acc_stderr": 0.02726429759980402, |
|
"acc_norm": 0.36012861736334406, |
|
"acc_norm_stderr": 0.02726429759980402 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.3273542600896861, |
|
"acc_stderr": 0.03149384670994131, |
|
"acc_norm": 0.3273542600896861, |
|
"acc_norm_stderr": 0.03149384670994131 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.37404580152671757, |
|
"acc_stderr": 0.04243869242230524, |
|
"acc_norm": 0.37404580152671757, |
|
"acc_norm_stderr": 0.04243869242230524 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.29292929292929293, |
|
"acc_stderr": 0.032424979581788166, |
|
"acc_norm": 0.29292929292929293, |
|
"acc_norm_stderr": 0.032424979581788166 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.36551724137931035, |
|
"acc_stderr": 0.04013124195424387, |
|
"acc_norm": 0.36551724137931035, |
|
"acc_norm_stderr": 0.04013124195424387 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2647058823529412, |
|
"acc_stderr": 0.04389869956808778, |
|
"acc_norm": 0.2647058823529412, |
|
"acc_norm_stderr": 0.04389869956808778 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3319327731092437, |
|
"acc_stderr": 0.030588697013783663, |
|
"acc_norm": 0.3319327731092437, |
|
"acc_norm_stderr": 0.030588697013783663 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2794871794871795, |
|
"acc_stderr": 0.022752388839776823, |
|
"acc_norm": 0.2794871794871795, |
|
"acc_norm_stderr": 0.022752388839776823 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939098, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939098 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.37962962962962965, |
|
"acc_stderr": 0.04691521224077742, |
|
"acc_norm": 0.37962962962962965, |
|
"acc_norm_stderr": 0.04691521224077742 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3103448275862069, |
|
"acc_stderr": 0.03255086769970103, |
|
"acc_norm": 0.3103448275862069, |
|
"acc_norm_stderr": 0.03255086769970103 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3419354838709677, |
|
"acc_stderr": 0.02698528957655273, |
|
"acc_norm": 0.3419354838709677, |
|
"acc_norm_stderr": 0.02698528957655273 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.44017094017094016, |
|
"acc_stderr": 0.032520741720630506, |
|
"acc_norm": 0.44017094017094016, |
|
"acc_norm_stderr": 0.032520741720630506 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.3018867924528302, |
|
"acc_stderr": 0.028254200344438672, |
|
"acc_norm": 0.3018867924528302, |
|
"acc_norm_stderr": 0.028254200344438672 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.32727272727272727, |
|
"acc_stderr": 0.04494290866252088, |
|
"acc_norm": 0.32727272727272727, |
|
"acc_norm_stderr": 0.04494290866252088 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2814814814814815, |
|
"acc_stderr": 0.02742001935094528, |
|
"acc_norm": 0.2814814814814815, |
|
"acc_norm_stderr": 0.02742001935094528 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.304635761589404, |
|
"acc_stderr": 0.03757949922943343, |
|
"acc_norm": 0.304635761589404, |
|
"acc_norm_stderr": 0.03757949922943343 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.4626865671641791, |
|
"acc_stderr": 0.03525675167467974, |
|
"acc_norm": 0.4626865671641791, |
|
"acc_norm_stderr": 0.03525675167467974 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.24855491329479767, |
|
"acc_stderr": 0.03295304696818318, |
|
"acc_norm": 0.24855491329479767, |
|
"acc_norm_stderr": 0.03295304696818318 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.022569897074918428, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.022569897074918428 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3125, |
|
"acc_stderr": 0.038760854559127644, |
|
"acc_norm": 0.3125, |
|
"acc_norm_stderr": 0.038760854559127644 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252606, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252606 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.3468208092485549, |
|
"acc_stderr": 0.025624723994030454, |
|
"acc_norm": 0.3468208092485549, |
|
"acc_norm_stderr": 0.025624723994030454 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.26993865030674846, |
|
"acc_stderr": 0.034878251684978906, |
|
"acc_norm": 0.26993865030674846, |
|
"acc_norm_stderr": 0.034878251684978906 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.3425925925925926, |
|
"acc_stderr": 0.02640614597362568, |
|
"acc_norm": 0.3425925925925926, |
|
"acc_norm_stderr": 0.02640614597362568 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.32124352331606215, |
|
"acc_stderr": 0.03369950868549068, |
|
"acc_norm": 0.32124352331606215, |
|
"acc_norm_stderr": 0.03369950868549068 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2543859649122807, |
|
"acc_stderr": 0.0409698513984367, |
|
"acc_norm": 0.2543859649122807, |
|
"acc_norm_stderr": 0.0409698513984367 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3211009174311927, |
|
"acc_stderr": 0.020018149772733744, |
|
"acc_norm": 0.3211009174311927, |
|
"acc_norm_stderr": 0.020018149772733744 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2698412698412698, |
|
"acc_stderr": 0.03970158273235172, |
|
"acc_norm": 0.2698412698412698, |
|
"acc_norm_stderr": 0.03970158273235172 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3431372549019608, |
|
"acc_stderr": 0.027184498909941613, |
|
"acc_norm": 0.3431372549019608, |
|
"acc_norm_stderr": 0.027184498909941613 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.49586776859504134, |
|
"acc_stderr": 0.04564198767432754, |
|
"acc_norm": 0.49586776859504134, |
|
"acc_norm_stderr": 0.04564198767432754 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3026315789473684, |
|
"acc_stderr": 0.037385206761196686, |
|
"acc_norm": 0.3026315789473684, |
|
"acc_norm_stderr": 0.037385206761196686 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2908496732026144, |
|
"acc_stderr": 0.018373116915903966, |
|
"acc_norm": 0.2908496732026144, |
|
"acc_norm_stderr": 0.018373116915903966 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2907801418439716, |
|
"acc_stderr": 0.027090664368353178, |
|
"acc_norm": 0.2907801418439716, |
|
"acc_norm_stderr": 0.027090664368353178 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.26785714285714285, |
|
"acc_stderr": 0.04203277291467764, |
|
"acc_norm": 0.26785714285714285, |
|
"acc_norm_stderr": 0.04203277291467764 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.37037037037037035, |
|
"acc_stderr": 0.03293377139415191, |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.03293377139415191 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24692737430167597, |
|
"acc_stderr": 0.014422292204808852, |
|
"acc_norm": 0.24692737430167597, |
|
"acc_norm_stderr": 0.014422292204808852 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.22058823529411764, |
|
"acc_stderr": 0.025187786660227248, |
|
"acc_norm": 0.22058823529411764, |
|
"acc_norm_stderr": 0.025187786660227248 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.35918367346938773, |
|
"acc_stderr": 0.030713560455108493, |
|
"acc_norm": 0.35918367346938773, |
|
"acc_norm_stderr": 0.030713560455108493 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.31223628691983124, |
|
"acc_stderr": 0.030165137867847, |
|
"acc_norm": 0.31223628691983124, |
|
"acc_norm_stderr": 0.030165137867847 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.25097783572359844, |
|
"acc_stderr": 0.011073730299187224, |
|
"acc_norm": 0.25097783572359844, |
|
"acc_norm_stderr": 0.011073730299187224 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.27941176470588236, |
|
"acc_stderr": 0.031493281045079556, |
|
"acc_norm": 0.27941176470588236, |
|
"acc_norm_stderr": 0.031493281045079556 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.296969696969697, |
|
"acc_stderr": 0.03567969772268049, |
|
"acc_norm": 0.296969696969697, |
|
"acc_norm_stderr": 0.03567969772268049 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.28886168910648713, |
|
"mc1_stderr": 0.0158663464013843, |
|
"mc2": 0.4504635842487325, |
|
"mc2_stderr": 0.01536359300418303 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.27036599763872493, |
|
"acc_stderr": 0.015270152942068406, |
|
"acc_norm": 0.35182998819362454, |
|
"acc_norm_stderr": 0.016418206451218057 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "cepiloth/ko-llama2-finetune-ex5", |
|
"model_sha": "72d3e9fcbf33373b484f2beb26751ac0bf06af65", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |