{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3506825938566553, "acc_stderr": 0.01394463593072609, "acc_norm": 0.40784982935153585, "acc_norm_stderr": 0.014361097288449696 }, "harness|ko_hellaswag|10": { "acc": 0.3966341366261701, "acc_stderr": 0.004881990487628913, "acc_norm": 0.527185819557857, "acc_norm_stderr": 0.004982400368939667 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.49122807017543857, "acc_stderr": 0.038342347441649924, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.038342347441649924 }, "harness|ko_mmlu_management|5": { "acc": 0.6504854368932039, "acc_stderr": 0.047211885060971716, "acc_norm": 0.6504854368932039, "acc_norm_stderr": 0.047211885060971716 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5593869731800766, "acc_stderr": 0.01775339697390848, "acc_norm": 0.5593869731800766, "acc_norm_stderr": 0.01775339697390848 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4740740740740741, "acc_stderr": 0.04313531696750574, "acc_norm": 0.4740740740740741, "acc_norm_stderr": 0.04313531696750574 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720683, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720683 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.41702127659574467, "acc_stderr": 0.03223276266711712, "acc_norm": 0.41702127659574467, "acc_norm_stderr": 0.03223276266711712 }, "harness|ko_mmlu_virology|5": { "acc": 0.3855421686746988, "acc_stderr": 0.0378913442461155, "acc_norm": 0.3855421686746988, "acc_norm_stderr": 0.0378913442461155 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5209003215434084, "acc_stderr": 0.02837327096106942, "acc_norm": 0.5209003215434084, "acc_norm_stderr": 0.02837327096106942 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4349775784753363, "acc_stderr": 0.033272833702713445, "acc_norm": 0.4349775784753363, "acc_norm_stderr": 0.033272833702713445 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.5572519083969466, "acc_stderr": 0.04356447202665069, "acc_norm": 0.5572519083969466, "acc_norm_stderr": 0.04356447202665069 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.6212121212121212, "acc_stderr": 0.03456088731993747, "acc_norm": 0.6212121212121212, "acc_norm_stderr": 0.03456088731993747 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.43448275862068964, "acc_stderr": 0.04130740879555498, "acc_norm": 0.43448275862068964, "acc_norm_stderr": 0.04130740879555498 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179964, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179964 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.5042016806722689, "acc_stderr": 0.03247734334448111, "acc_norm": 0.5042016806722689, "acc_norm_stderr": 0.03247734334448111 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4717948717948718, "acc_stderr": 0.0253106392549339, "acc_norm": 0.4717948717948718, "acc_norm_stderr": 0.0253106392549339 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4722222222222222, "acc_stderr": 0.04826217294139894, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.04826217294139894 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.37438423645320196, "acc_stderr": 0.03405155380561952, "acc_norm": 0.37438423645320196, "acc_norm_stderr": 0.03405155380561952 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.46774193548387094, "acc_stderr": 0.02838474778881333, "acc_norm": 0.46774193548387094, "acc_norm_stderr": 0.02838474778881333 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7094017094017094, "acc_stderr": 0.029745048572674078, "acc_norm": 0.7094017094017094, "acc_norm_stderr": 0.029745048572674078 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4867924528301887, "acc_stderr": 0.030762134874500482, "acc_norm": 0.4867924528301887, "acc_norm_stderr": 0.030762134874500482 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.4727272727272727, "acc_stderr": 0.04782001791380063, "acc_norm": 0.4727272727272727, "acc_norm_stderr": 0.04782001791380063 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.027309140588230182, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.027309140588230182 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.038227469376587525, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.038227469376587525 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5373134328358209, "acc_stderr": 0.035256751674679745, "acc_norm": 0.5373134328358209, "acc_norm_stderr": 0.035256751674679745 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.4393063583815029, "acc_stderr": 0.03784271932887467, "acc_norm": 0.4393063583815029, "acc_norm_stderr": 0.03784271932887467 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.3386243386243386, "acc_stderr": 0.02437319786798305, "acc_norm": 0.3386243386243386, "acc_norm_stderr": 0.02437319786798305 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4166666666666667, "acc_stderr": 0.041227287076512825, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.041227287076512825 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.56, "acc_stderr": 0.049888765156985884, "acc_norm": 0.56, "acc_norm_stderr": 0.049888765156985884 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5317919075144508, "acc_stderr": 0.026864624366756653, "acc_norm": 0.5317919075144508, "acc_norm_stderr": 0.026864624366756653 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.50920245398773, "acc_stderr": 0.03927705600787443, "acc_norm": 0.50920245398773, "acc_norm_stderr": 0.03927705600787443 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4506172839506173, "acc_stderr": 0.027684721415656203, "acc_norm": 0.4506172839506173, "acc_norm_stderr": 0.027684721415656203 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5699481865284974, "acc_stderr": 0.03572954333144808, "acc_norm": 0.5699481865284974, "acc_norm_stderr": 0.03572954333144808 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.3157894736842105, "acc_stderr": 0.04372748290278008, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.04372748290278008 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.6275229357798165, "acc_stderr": 0.0207283684576385, "acc_norm": 0.6275229357798165, "acc_norm_stderr": 0.0207283684576385 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.04134913018303316, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.04134913018303316 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4117647058823529, "acc_stderr": 0.028180596328259293, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.028180596328259293 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|ko_mmlu_international_law|5": { "acc": 0.7024793388429752, "acc_stderr": 0.04173349148083499, "acc_norm": 0.7024793388429752, "acc_norm_stderr": 0.04173349148083499 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.40789473684210525, "acc_stderr": 0.0399930971277747, "acc_norm": 0.40789473684210525, "acc_norm_stderr": 0.0399930971277747 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.39215686274509803, "acc_stderr": 0.019751726508762626, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.019751726508762626 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.31560283687943264, "acc_stderr": 0.02772498944950931, "acc_norm": 0.31560283687943264, "acc_norm_stderr": 0.02772498944950931 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.3482142857142857, "acc_stderr": 0.04521829902833585, "acc_norm": 0.3482142857142857, "acc_norm_stderr": 0.04521829902833585 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3055555555555556, "acc_stderr": 0.031415546294025425, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.031415546294025425 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.25139664804469275, "acc_stderr": 0.01450897945355398, "acc_norm": 0.25139664804469275, "acc_norm_stderr": 0.01450897945355398 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.41, "acc_stderr": 0.04943110704237102, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237102 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3602941176470588, "acc_stderr": 0.029163128570670733, "acc_norm": 0.3602941176470588, "acc_norm_stderr": 0.029163128570670733 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.42857142857142855, "acc_stderr": 0.031680911612338825, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.031680911612338825 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5864978902953587, "acc_stderr": 0.03205649904851859, "acc_norm": 0.5864978902953587, "acc_norm_stderr": 0.03205649904851859 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3259452411994785, "acc_stderr": 0.01197150729498278, "acc_norm": 0.3259452411994785, "acc_norm_stderr": 0.01197150729498278 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5343137254901961, "acc_stderr": 0.03501038327635897, "acc_norm": 0.5343137254901961, "acc_norm_stderr": 0.03501038327635897 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5757575757575758, "acc_stderr": 0.038592681420702636, "acc_norm": 0.5757575757575758, "acc_norm_stderr": 0.038592681420702636 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.29253365973072215, "mc1_stderr": 0.015925597445286165, "mc2": 0.4452362200636334, "mc2_stderr": 0.01509557301201901 }, "harness|ko_commongen_v2|2": { "acc": 0.5301062573789846, "acc_stderr": 0.017159163590170223, "acc_norm": 0.5867768595041323, "acc_norm_stderr": 0.016929480234495226 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "DopeorNope/Yi_lee-v2-DPO-6B", "model_sha": "3c1d2d605a5c621cfa2351b4b9061519fde23730", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }