{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.2901023890784983, "acc_stderr": 0.01326157367752077, "acc_norm": 0.34812286689419797, "acc_norm_stderr": 0.013921008595179335 }, "harness|ko_hellaswag|10": { "acc": 0.35012945628360886, "acc_stderr": 0.004760354191370866, "acc_norm": 0.4374626568412667, "acc_norm_stderr": 0.0049505983006675565 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4269005847953216, "acc_stderr": 0.03793620616529917, "acc_norm": 0.4269005847953216, "acc_norm_stderr": 0.03793620616529917 }, "harness|ko_mmlu_management|5": { "acc": 0.39805825242718446, "acc_stderr": 0.0484674825397724, "acc_norm": 0.39805825242718446, "acc_norm_stderr": 0.0484674825397724 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.41379310344827586, "acc_stderr": 0.017612204084663775, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.017612204084663775 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.2962962962962963, "acc_stderr": 0.039446241625011175, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.039446241625011175 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.35319148936170214, "acc_stderr": 0.031245325202761926, "acc_norm": 0.35319148936170214, "acc_norm_stderr": 0.031245325202761926 }, "harness|ko_mmlu_virology|5": { "acc": 0.35542168674698793, "acc_stderr": 0.03726214354322415, "acc_norm": 0.35542168674698793, "acc_norm_stderr": 0.03726214354322415 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.3858520900321543, "acc_stderr": 0.027648149599751464, "acc_norm": 0.3858520900321543, "acc_norm_stderr": 0.027648149599751464 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.38565022421524664, "acc_stderr": 0.03266842214289201, "acc_norm": 0.38565022421524664, "acc_norm_stderr": 0.03266842214289201 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.33587786259541985, "acc_stderr": 0.041423137719966634, "acc_norm": 0.33587786259541985, "acc_norm_stderr": 0.041423137719966634 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.3484848484848485, "acc_stderr": 0.03394853965156402, "acc_norm": 0.3484848484848485, "acc_norm_stderr": 0.03394853965156402 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.296551724137931, "acc_stderr": 0.03806142687309993, "acc_norm": 0.296551724137931, "acc_norm_stderr": 0.03806142687309993 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.30392156862745096, "acc_stderr": 0.045766654032077636, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.045766654032077636 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.31092436974789917, "acc_stderr": 0.03006676158297793, "acc_norm": 0.31092436974789917, "acc_norm_stderr": 0.03006676158297793 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.36666666666666664, "acc_stderr": 0.024433016466052452, "acc_norm": 0.36666666666666664, "acc_norm_stderr": 0.024433016466052452 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.51, "acc_stderr": 0.05024183937956913, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956913 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.42592592592592593, "acc_stderr": 0.0478034362693679, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.0478034362693679 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3497536945812808, "acc_stderr": 0.03355400904969565, "acc_norm": 0.3497536945812808, "acc_norm_stderr": 0.03355400904969565 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3935483870967742, "acc_stderr": 0.027791878753132274, "acc_norm": 0.3935483870967742, "acc_norm_stderr": 0.027791878753132274 }, "harness|ko_mmlu_marketing|5": { "acc": 0.5427350427350427, "acc_stderr": 0.03263622596380688, "acc_norm": 0.5427350427350427, "acc_norm_stderr": 0.03263622596380688 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.3018867924528302, "acc_stderr": 0.028254200344438662, "acc_norm": 0.3018867924528302, "acc_norm_stderr": 0.028254200344438662 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.4, "acc_stderr": 0.0469237132203465, "acc_norm": 0.4, "acc_norm_stderr": 0.0469237132203465 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3296296296296296, "acc_stderr": 0.028661201116524586, "acc_norm": 0.3296296296296296, "acc_norm_stderr": 0.028661201116524586 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2847682119205298, "acc_stderr": 0.03684881521389024, "acc_norm": 0.2847682119205298, "acc_norm_stderr": 0.03684881521389024 }, "harness|ko_mmlu_sociology|5": { "acc": 0.4427860696517413, "acc_stderr": 0.03512310964123937, "acc_norm": 0.4427860696517413, "acc_norm_stderr": 0.03512310964123937 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3236994219653179, "acc_stderr": 0.0356760379963917, "acc_norm": 0.3236994219653179, "acc_norm_stderr": 0.0356760379963917 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.36507936507936506, "acc_stderr": 0.02479606060269995, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.02479606060269995 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2847222222222222, "acc_stderr": 0.03773809990686935, "acc_norm": 0.2847222222222222, "acc_norm_stderr": 0.03773809990686935 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.38439306358381503, "acc_stderr": 0.026189666966272035, "acc_norm": 0.38439306358381503, "acc_norm_stderr": 0.026189666966272035 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.34355828220858897, "acc_stderr": 0.037311335196738925, "acc_norm": 0.34355828220858897, "acc_norm_stderr": 0.037311335196738925 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.35185185185185186, "acc_stderr": 0.026571483480719967, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.026571483480719967 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.38860103626943004, "acc_stderr": 0.035177397963731316, "acc_norm": 0.38860103626943004, "acc_norm_stderr": 0.035177397963731316 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022056, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022056 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.3908256880733945, "acc_stderr": 0.020920058346111065, "acc_norm": 0.3908256880733945, "acc_norm_stderr": 0.020920058346111065 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.041905964388711366, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.041905964388711366 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.3660130718954248, "acc_stderr": 0.027582811415159624, "acc_norm": 0.3660130718954248, "acc_norm_stderr": 0.027582811415159624 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_international_law|5": { "acc": 0.48760330578512395, "acc_stderr": 0.04562951548180765, "acc_norm": 0.48760330578512395, "acc_norm_stderr": 0.04562951548180765 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3684210526315789, "acc_stderr": 0.03925523381052932, "acc_norm": 0.3684210526315789, "acc_norm_stderr": 0.03925523381052932 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3104575163398693, "acc_stderr": 0.01871806705262323, "acc_norm": 0.3104575163398693, "acc_norm_stderr": 0.01871806705262323 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2872340425531915, "acc_stderr": 0.026992199173064356, "acc_norm": 0.2872340425531915, "acc_norm_stderr": 0.026992199173064356 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.29464285714285715, "acc_stderr": 0.0432704093257873, "acc_norm": 0.29464285714285715, "acc_norm_stderr": 0.0432704093257873 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.2824074074074074, "acc_stderr": 0.030701372111510927, "acc_norm": 0.2824074074074074, "acc_norm_stderr": 0.030701372111510927 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.21564245810055865, "acc_stderr": 0.013754835975482336, "acc_norm": 0.21564245810055865, "acc_norm_stderr": 0.013754835975482336 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.33455882352941174, "acc_stderr": 0.028661996202335307, "acc_norm": 0.33455882352941174, "acc_norm_stderr": 0.028661996202335307 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.3224489795918367, "acc_stderr": 0.029923100563683906, "acc_norm": 0.3224489795918367, "acc_norm_stderr": 0.029923100563683906 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.4641350210970464, "acc_stderr": 0.03246338898055659, "acc_norm": 0.4641350210970464, "acc_norm_stderr": 0.03246338898055659 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2907431551499348, "acc_stderr": 0.011598062372851974, "acc_norm": 0.2907431551499348, "acc_norm_stderr": 0.011598062372851974 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.37745098039215685, "acc_stderr": 0.03402272044340705, "acc_norm": 0.37745098039215685, "acc_norm_stderr": 0.03402272044340705 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.3939393939393939, "acc_stderr": 0.0381549430868893, "acc_norm": 0.3939393939393939, "acc_norm_stderr": 0.0381549430868893 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.29008567931456547, "mc1_stderr": 0.01588623687420952, "mc2": 0.4699106773315303, "mc2_stderr": 0.01582978440702906 }, "harness|ko_commongen_v2|2": { "acc": 0.22904368358913813, "acc_stderr": 0.014447372277253822, "acc_norm": 0.24557260920897284, "acc_norm_stderr": 0.014798357154972826 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "caisarl76/Mistral-7B-eng-kor-cot-combined", "model_sha": "d7e959c88fdc316602494d1ffd2bf52d33371f89", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }