|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.30119453924914674, |
|
"acc_stderr": 0.013406741767847626, |
|
"acc_norm": 0.3583617747440273, |
|
"acc_norm_stderr": 0.014012883334859859 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3458474407488548, |
|
"acc_stderr": 0.004746716805735756, |
|
"acc_norm": 0.42939653455486954, |
|
"acc_norm_stderr": 0.004939784311448984 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.2807017543859649, |
|
"acc_stderr": 0.03446296217088426, |
|
"acc_norm": 0.2807017543859649, |
|
"acc_norm_stderr": 0.03446296217088426 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.1941747572815534, |
|
"acc_stderr": 0.03916667762822584, |
|
"acc_norm": 0.1941747572815534, |
|
"acc_norm_stderr": 0.03916667762822584 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.29757343550446996, |
|
"acc_stderr": 0.016349111912909418, |
|
"acc_norm": 0.29757343550446996, |
|
"acc_norm_stderr": 0.016349111912909418 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2740740740740741, |
|
"acc_stderr": 0.03853254836552003, |
|
"acc_norm": 0.2740740740740741, |
|
"acc_norm_stderr": 0.03853254836552003 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2936170212765957, |
|
"acc_stderr": 0.029771642712491227, |
|
"acc_norm": 0.2936170212765957, |
|
"acc_norm_stderr": 0.029771642712491227 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.26506024096385544, |
|
"acc_stderr": 0.03436024037944967, |
|
"acc_norm": 0.26506024096385544, |
|
"acc_norm_stderr": 0.03436024037944967 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2733118971061093, |
|
"acc_stderr": 0.02531176597542612, |
|
"acc_norm": 0.2733118971061093, |
|
"acc_norm_stderr": 0.02531176597542612 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.23318385650224216, |
|
"acc_stderr": 0.02838039114709472, |
|
"acc_norm": 0.23318385650224216, |
|
"acc_norm_stderr": 0.02838039114709472 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.24427480916030533, |
|
"acc_stderr": 0.03768335959728742, |
|
"acc_norm": 0.24427480916030533, |
|
"acc_norm_stderr": 0.03768335959728742 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.04020151261036845, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.04020151261036845 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.25757575757575757, |
|
"acc_stderr": 0.03115626951964684, |
|
"acc_norm": 0.25757575757575757, |
|
"acc_norm_stderr": 0.03115626951964684 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2896551724137931, |
|
"acc_stderr": 0.03780019230438015, |
|
"acc_norm": 0.2896551724137931, |
|
"acc_norm_stderr": 0.03780019230438015 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.12745098039215685, |
|
"acc_stderr": 0.03318224921942077, |
|
"acc_norm": 0.12745098039215685, |
|
"acc_norm_stderr": 0.03318224921942077 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.2605042016806723, |
|
"acc_stderr": 0.028510251512341933, |
|
"acc_norm": 0.2605042016806723, |
|
"acc_norm_stderr": 0.028510251512341933 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2205128205128205, |
|
"acc_stderr": 0.021020672680827912, |
|
"acc_norm": 0.2205128205128205, |
|
"acc_norm_stderr": 0.021020672680827912 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542127, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542127 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.26851851851851855, |
|
"acc_stderr": 0.04284467968052191, |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.04284467968052191 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2561576354679803, |
|
"acc_stderr": 0.0307127300709826, |
|
"acc_norm": 0.2561576354679803, |
|
"acc_norm_stderr": 0.0307127300709826 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.29354838709677417, |
|
"acc_stderr": 0.025906087021319295, |
|
"acc_norm": 0.29354838709677417, |
|
"acc_norm_stderr": 0.025906087021319295 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.26495726495726496, |
|
"acc_stderr": 0.028911208802749465, |
|
"acc_norm": 0.26495726495726496, |
|
"acc_norm_stderr": 0.028911208802749465 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.23773584905660378, |
|
"acc_stderr": 0.026199808807561932, |
|
"acc_norm": 0.23773584905660378, |
|
"acc_norm_stderr": 0.026199808807561932 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.32727272727272727, |
|
"acc_stderr": 0.04494290866252089, |
|
"acc_norm": 0.32727272727272727, |
|
"acc_norm_stderr": 0.04494290866252089 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.27037037037037037, |
|
"acc_stderr": 0.027080372815145658, |
|
"acc_norm": 0.27037037037037037, |
|
"acc_norm_stderr": 0.027080372815145658 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.23841059602649006, |
|
"acc_stderr": 0.03479185572599659, |
|
"acc_norm": 0.23841059602649006, |
|
"acc_norm_stderr": 0.03479185572599659 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.29850746268656714, |
|
"acc_stderr": 0.032357437893550424, |
|
"acc_norm": 0.29850746268656714, |
|
"acc_norm_stderr": 0.032357437893550424 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2254335260115607, |
|
"acc_stderr": 0.031862098516411454, |
|
"acc_norm": 0.2254335260115607, |
|
"acc_norm_stderr": 0.031862098516411454 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.21428571428571427, |
|
"acc_stderr": 0.021132859182754444, |
|
"acc_norm": 0.21428571428571427, |
|
"acc_norm_stderr": 0.021132859182754444 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.19444444444444445, |
|
"acc_stderr": 0.03309615177059007, |
|
"acc_norm": 0.19444444444444445, |
|
"acc_norm_stderr": 0.03309615177059007 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.16, |
|
"acc_stderr": 0.0368452949177471, |
|
"acc_norm": 0.16, |
|
"acc_norm_stderr": 0.0368452949177471 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720685, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720685 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.2774566473988439, |
|
"acc_stderr": 0.024105712607754307, |
|
"acc_norm": 0.2774566473988439, |
|
"acc_norm_stderr": 0.024105712607754307 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.24539877300613497, |
|
"acc_stderr": 0.03380939813943354, |
|
"acc_norm": 0.24539877300613497, |
|
"acc_norm_stderr": 0.03380939813943354 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.3117283950617284, |
|
"acc_stderr": 0.02577311116963045, |
|
"acc_norm": 0.3117283950617284, |
|
"acc_norm_stderr": 0.02577311116963045 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.04512608598542126, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.04512608598542126 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.21761658031088082, |
|
"acc_stderr": 0.02977866303775295, |
|
"acc_norm": 0.21761658031088082, |
|
"acc_norm_stderr": 0.02977866303775295 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.24561403508771928, |
|
"acc_stderr": 0.04049339297748141, |
|
"acc_norm": 0.24561403508771928, |
|
"acc_norm_stderr": 0.04049339297748141 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.23486238532110093, |
|
"acc_stderr": 0.018175110510343585, |
|
"acc_norm": 0.23486238532110093, |
|
"acc_norm_stderr": 0.018175110510343585 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2619047619047619, |
|
"acc_stderr": 0.03932537680392871, |
|
"acc_norm": 0.2619047619047619, |
|
"acc_norm_stderr": 0.03932537680392871 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3137254901960784, |
|
"acc_stderr": 0.02656892101545715, |
|
"acc_norm": 0.3137254901960784, |
|
"acc_norm_stderr": 0.02656892101545715 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720684, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720684 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.33884297520661155, |
|
"acc_stderr": 0.04320767807536669, |
|
"acc_norm": 0.33884297520661155, |
|
"acc_norm_stderr": 0.04320767807536669 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.29605263157894735, |
|
"acc_stderr": 0.03715062154998905, |
|
"acc_norm": 0.29605263157894735, |
|
"acc_norm_stderr": 0.03715062154998905 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.29411764705882354, |
|
"acc_stderr": 0.0184334276494019, |
|
"acc_norm": 0.29411764705882354, |
|
"acc_norm_stderr": 0.0184334276494019 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2872340425531915, |
|
"acc_stderr": 0.026992199173064356, |
|
"acc_norm": 0.2872340425531915, |
|
"acc_norm_stderr": 0.026992199173064356 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.2767857142857143, |
|
"acc_stderr": 0.042466243366976256, |
|
"acc_norm": 0.2767857142857143, |
|
"acc_norm_stderr": 0.042466243366976256 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.20833333333333334, |
|
"acc_stderr": 0.027696910713093936, |
|
"acc_norm": 0.20833333333333334, |
|
"acc_norm_stderr": 0.027696910713093936 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2435754189944134, |
|
"acc_stderr": 0.01435591196476786, |
|
"acc_norm": 0.2435754189944134, |
|
"acc_norm_stderr": 0.01435591196476786 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421276, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421276 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.18382352941176472, |
|
"acc_stderr": 0.023529242185193106, |
|
"acc_norm": 0.18382352941176472, |
|
"acc_norm_stderr": 0.023529242185193106 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.24081632653061225, |
|
"acc_stderr": 0.027372942201788163, |
|
"acc_norm": 0.24081632653061225, |
|
"acc_norm_stderr": 0.027372942201788163 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.2911392405063291, |
|
"acc_stderr": 0.02957160106575337, |
|
"acc_norm": 0.2911392405063291, |
|
"acc_norm_stderr": 0.02957160106575337 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.27640156453715775, |
|
"acc_stderr": 0.01142215319455358, |
|
"acc_norm": 0.27640156453715775, |
|
"acc_norm_stderr": 0.01142215319455358 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.24509803921568626, |
|
"acc_stderr": 0.030190282453501943, |
|
"acc_norm": 0.24509803921568626, |
|
"acc_norm_stderr": 0.030190282453501943 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.26666666666666666, |
|
"acc_stderr": 0.03453131801885416, |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.03453131801885416 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.22031823745410037, |
|
"mc1_stderr": 0.014509045171487283, |
|
"mc2": 0.3876715630562864, |
|
"mc2_stderr": 0.014780799577275159 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.2668240850059032, |
|
"acc_stderr": 0.015206575684565892, |
|
"acc_norm": 0.40731995277449823, |
|
"acc_norm_stderr": 0.01689245669519127 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Herry443/LLaMA2-ko-7B-KNUT-v0.1", |
|
"model_sha": "823d2fece402a057d1a68be83c80985d57a37471", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |