{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.26706484641638223, "acc_stderr": 0.01292893319649633, "acc_norm": 0.3046075085324232, "acc_norm_stderr": 0.013449522109932492 }, "harness|ko_hellaswag|10": { "acc": 0.3309101772555268, "acc_stderr": 0.004695791340502858, "acc_norm": 0.4010157339175463, "acc_norm_stderr": 0.0048910255336330226 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.3684210526315789, "acc_stderr": 0.036996580176568775, "acc_norm": 0.3684210526315789, "acc_norm_stderr": 0.036996580176568775 }, "harness|ko_mmlu_management|5": { "acc": 0.3883495145631068, "acc_stderr": 0.0482572933735639, "acc_norm": 0.3883495145631068, "acc_norm_stderr": 0.0482572933735639 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.2937420178799489, "acc_stderr": 0.016287759388491675, "acc_norm": 0.2937420178799489, "acc_norm_stderr": 0.016287759388491675 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.34814814814814815, "acc_stderr": 0.041153246103369526, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.041153246103369526 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720683, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720683 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.2297872340425532, "acc_stderr": 0.027501752944412424, "acc_norm": 0.2297872340425532, "acc_norm_stderr": 0.027501752944412424 }, "harness|ko_mmlu_virology|5": { "acc": 0.24096385542168675, "acc_stderr": 0.03329394119073532, "acc_norm": 0.24096385542168675, "acc_norm_stderr": 0.03329394119073532 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.2829581993569132, "acc_stderr": 0.02558306248998482, "acc_norm": 0.2829581993569132, "acc_norm_stderr": 0.02558306248998482 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.19730941704035873, "acc_stderr": 0.02670985334496796, "acc_norm": 0.19730941704035873, "acc_norm_stderr": 0.02670985334496796 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.3053435114503817, "acc_stderr": 0.040393149787245605, "acc_norm": 0.3053435114503817, "acc_norm_stderr": 0.040393149787245605 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.35858585858585856, "acc_stderr": 0.03416903640391521, "acc_norm": 0.35858585858585856, "acc_norm_stderr": 0.03416903640391521 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.25517241379310346, "acc_stderr": 0.03632984052707842, "acc_norm": 0.25517241379310346, "acc_norm_stderr": 0.03632984052707842 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.28431372549019607, "acc_stderr": 0.04488482852329017, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.04488482852329017 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.27310924369747897, "acc_stderr": 0.028942004040998167, "acc_norm": 0.27310924369747897, "acc_norm_stderr": 0.028942004040998167 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.3153846153846154, "acc_stderr": 0.023559646983189957, "acc_norm": 0.3153846153846154, "acc_norm_stderr": 0.023559646983189957 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3054187192118227, "acc_stderr": 0.03240661565868408, "acc_norm": 0.3054187192118227, "acc_norm_stderr": 0.03240661565868408 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.32903225806451614, "acc_stderr": 0.02672949906834996, "acc_norm": 0.32903225806451614, "acc_norm_stderr": 0.02672949906834996 }, "harness|ko_mmlu_marketing|5": { "acc": 0.38461538461538464, "acc_stderr": 0.03187195347942466, "acc_norm": 0.38461538461538464, "acc_norm_stderr": 0.03187195347942466 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2981132075471698, "acc_stderr": 0.028152837942493857, "acc_norm": 0.2981132075471698, "acc_norm_stderr": 0.028152837942493857 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.2909090909090909, "acc_stderr": 0.04350271442923243, "acc_norm": 0.2909090909090909, "acc_norm_stderr": 0.04350271442923243 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2814814814814815, "acc_stderr": 0.027420019350945273, "acc_norm": 0.2814814814814815, "acc_norm_stderr": 0.027420019350945273 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943342, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943342 }, "harness|ko_mmlu_sociology|5": { "acc": 0.36318407960199006, "acc_stderr": 0.034005985055990146, "acc_norm": 0.36318407960199006, "acc_norm_stderr": 0.034005985055990146 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.26011560693641617, "acc_stderr": 0.033450369167889904, "acc_norm": 0.26011560693641617, "acc_norm_stderr": 0.033450369167889904 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2830687830687831, "acc_stderr": 0.02320139293819498, "acc_norm": 0.2830687830687831, "acc_norm_stderr": 0.02320139293819498 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3263888888888889, "acc_stderr": 0.03921067198982266, "acc_norm": 0.3263888888888889, "acc_norm_stderr": 0.03921067198982266 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.2976878612716763, "acc_stderr": 0.024617055388677003, "acc_norm": 0.2976878612716763, "acc_norm_stderr": 0.024617055388677003 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3067484662576687, "acc_stderr": 0.036230899157241474, "acc_norm": 0.3067484662576687, "acc_norm_stderr": 0.036230899157241474 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.33641975308641975, "acc_stderr": 0.02628973494595293, "acc_norm": 0.33641975308641975, "acc_norm_stderr": 0.02628973494595293 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.32124352331606215, "acc_stderr": 0.033699508685490674, "acc_norm": 0.32124352331606215, "acc_norm_stderr": 0.033699508685490674 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.03999423879281336, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.03999423879281336 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.30091743119266057, "acc_stderr": 0.019664751366802114, "acc_norm": 0.30091743119266057, "acc_norm_stderr": 0.019664751366802114 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.04134913018303316, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.04134913018303316 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.3366013071895425, "acc_stderr": 0.027057974624494382, "acc_norm": 0.3366013071895425, "acc_norm_stderr": 0.027057974624494382 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|ko_mmlu_international_law|5": { "acc": 0.2809917355371901, "acc_stderr": 0.041032038305145124, "acc_norm": 0.2809917355371901, "acc_norm_stderr": 0.041032038305145124 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.34210526315789475, "acc_stderr": 0.03860731599316091, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.03860731599316091 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.2696078431372549, "acc_stderr": 0.017952449196987866, "acc_norm": 0.2696078431372549, "acc_norm_stderr": 0.017952449196987866 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2801418439716312, "acc_stderr": 0.026789172351140242, "acc_norm": 0.2801418439716312, "acc_norm_stderr": 0.026789172351140242 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.17857142857142858, "acc_stderr": 0.036352091215778065, "acc_norm": 0.17857142857142858, "acc_norm_stderr": 0.036352091215778065 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.38425925925925924, "acc_stderr": 0.03317354514310742, "acc_norm": 0.38425925925925924, "acc_norm_stderr": 0.03317354514310742 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24804469273743016, "acc_stderr": 0.01444415780826145, "acc_norm": 0.24804469273743016, "acc_norm_stderr": 0.01444415780826145 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.22, "acc_stderr": 0.04163331998932267, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932267 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.16911764705882354, "acc_stderr": 0.02277086801011303, "acc_norm": 0.16911764705882354, "acc_norm_stderr": 0.02277086801011303 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.3306122448979592, "acc_stderr": 0.03011642629654059, "acc_norm": 0.3306122448979592, "acc_norm_stderr": 0.03011642629654059 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.2489451476793249, "acc_stderr": 0.028146970599422644, "acc_norm": 0.2489451476793249, "acc_norm_stderr": 0.028146970599422644 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.26988265971316816, "acc_stderr": 0.011337381084250404, "acc_norm": 0.26988265971316816, "acc_norm_stderr": 0.011337381084250404 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.27450980392156865, "acc_stderr": 0.0313217980308329, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.0313217980308329 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.26666666666666666, "acc_stderr": 0.03453131801885416, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.03453131801885416 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.27539779681762544, "mc1_stderr": 0.015638135667775523, "mc2": 0.45638880812290744, "mc2_stderr": 0.01588078280533526 }, "harness|ko_commongen_v2|2": { "acc": 0.24557260920897284, "acc_stderr": 0.014798357154972823, "acc_norm": 0.2987012987012987, "acc_norm_stderr": 0.015735657391438285 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "heegyu/LIMA2-7b-hf", "model_sha": "6a1aa59cb7624f059728840ce68b20b1070ebdcb", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }