{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.4667235494880546, "acc_stderr": 0.014578995859605804, "acc_norm": 0.5358361774744027, "acc_norm_stderr": 0.014573813664735714 }, "harness|ko_hellaswag|10": { "acc": 0.4539932284405497, "acc_stderr": 0.004968613539309247, "acc_norm": 0.634833698466441, "acc_norm_stderr": 0.00480492760877314 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.6842105263157895, "acc_stderr": 0.03565079670708311, "acc_norm": 0.6842105263157895, "acc_norm_stderr": 0.03565079670708311 }, "harness|ko_mmlu_management|5": { "acc": 0.6893203883495146, "acc_stderr": 0.04582124160161551, "acc_norm": 0.6893203883495146, "acc_norm_stderr": 0.04582124160161551 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.7151979565772669, "acc_stderr": 0.016139174096522584, "acc_norm": 0.7151979565772669, "acc_norm_stderr": 0.016139174096522584 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.5037037037037037, "acc_stderr": 0.043192236258113324, "acc_norm": 0.5037037037037037, "acc_norm_stderr": 0.043192236258113324 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.5787234042553191, "acc_stderr": 0.032278345101462665, "acc_norm": 0.5787234042553191, "acc_norm_stderr": 0.032278345101462665 }, "harness|ko_mmlu_virology|5": { "acc": 0.5301204819277109, "acc_stderr": 0.03885425420866767, "acc_norm": 0.5301204819277109, "acc_norm_stderr": 0.03885425420866767 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.6430868167202572, "acc_stderr": 0.027210420375934023, "acc_norm": 0.6430868167202572, "acc_norm_stderr": 0.027210420375934023 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.6188340807174888, "acc_stderr": 0.03259625118416827, "acc_norm": 0.6188340807174888, "acc_norm_stderr": 0.03259625118416827 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.6335877862595419, "acc_stderr": 0.04225875451969639, "acc_norm": 0.6335877862595419, "acc_norm_stderr": 0.04225875451969639 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.49, "acc_stderr": 0.05024183937956913, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956913 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.02886977846026705, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.02886977846026705 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.5241379310344828, "acc_stderr": 0.0416180850350153, "acc_norm": 0.5241379310344828, "acc_norm_stderr": 0.0416180850350153 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.3431372549019608, "acc_stderr": 0.04724007352383888, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.04724007352383888 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.6512605042016807, "acc_stderr": 0.030956636328566548, "acc_norm": 0.6512605042016807, "acc_norm_stderr": 0.030956636328566548 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.5666666666666667, "acc_stderr": 0.0251246535258851, "acc_norm": 0.5666666666666667, "acc_norm_stderr": 0.0251246535258851 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.6574074074074074, "acc_stderr": 0.045879047413018105, "acc_norm": 0.6574074074074074, "acc_norm_stderr": 0.045879047413018105 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.43842364532019706, "acc_stderr": 0.03491207857486518, "acc_norm": 0.43842364532019706, "acc_norm_stderr": 0.03491207857486518 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.6741935483870968, "acc_stderr": 0.0266620105785671, "acc_norm": 0.6741935483870968, "acc_norm_stderr": 0.0266620105785671 }, "harness|ko_mmlu_marketing|5": { "acc": 0.811965811965812, "acc_stderr": 0.025598193686652258, "acc_norm": 0.811965811965812, "acc_norm_stderr": 0.025598193686652258 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.5886792452830188, "acc_stderr": 0.030285009259009798, "acc_norm": 0.5886792452830188, "acc_norm_stderr": 0.030285009259009798 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3074074074074074, "acc_stderr": 0.028133252578815632, "acc_norm": 0.3074074074074074, "acc_norm_stderr": 0.028133252578815632 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|ko_mmlu_sociology|5": { "acc": 0.7412935323383084, "acc_stderr": 0.030965903123573033, "acc_norm": 0.7412935323383084, "acc_norm_stderr": 0.030965903123573033 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.5433526011560693, "acc_stderr": 0.03798106566014498, "acc_norm": 0.5433526011560693, "acc_norm_stderr": 0.03798106566014498 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.42328042328042326, "acc_stderr": 0.025446365634406783, "acc_norm": 0.42328042328042326, "acc_norm_stderr": 0.025446365634406783 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.5763888888888888, "acc_stderr": 0.041321250197233685, "acc_norm": 0.5763888888888888, "acc_norm_stderr": 0.041321250197233685 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.6242774566473989, "acc_stderr": 0.02607431485165708, "acc_norm": 0.6242774566473989, "acc_norm_stderr": 0.02607431485165708 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.5705521472392638, "acc_stderr": 0.038890666191127236, "acc_norm": 0.5705521472392638, "acc_norm_stderr": 0.038890666191127236 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.6635802469135802, "acc_stderr": 0.026289734945952926, "acc_norm": 0.6635802469135802, "acc_norm_stderr": 0.026289734945952926 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.7668393782383419, "acc_stderr": 0.03051611137147601, "acc_norm": 0.7668393782383419, "acc_norm_stderr": 0.03051611137147601 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.047028804320496165, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.047028804320496165 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.7247706422018348, "acc_stderr": 0.019149093743155196, "acc_norm": 0.7247706422018348, "acc_norm_stderr": 0.019149093743155196 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.4365079365079365, "acc_stderr": 0.04435932892851466, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.04435932892851466 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.6470588235294118, "acc_stderr": 0.027363593284684965, "acc_norm": 0.6470588235294118, "acc_norm_stderr": 0.027363593284684965 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|ko_mmlu_international_law|5": { "acc": 0.7107438016528925, "acc_stderr": 0.041391127276354626, "acc_norm": 0.7107438016528925, "acc_norm_stderr": 0.041391127276354626 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.618421052631579, "acc_stderr": 0.03953173377749194, "acc_norm": 0.618421052631579, "acc_norm_stderr": 0.03953173377749194 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.5473856209150327, "acc_stderr": 0.020136790918492534, "acc_norm": 0.5473856209150327, "acc_norm_stderr": 0.020136790918492534 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.41843971631205673, "acc_stderr": 0.029427994039419994, "acc_norm": 0.41843971631205673, "acc_norm_stderr": 0.029427994039419994 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.5185185185185185, "acc_stderr": 0.03407632093854053, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.03407632093854053 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217892, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217892 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.77, "acc_stderr": 0.042295258468165044, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165044 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.5514705882352942, "acc_stderr": 0.030211479609121596, "acc_norm": 0.5514705882352942, "acc_norm_stderr": 0.030211479609121596 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.7510204081632653, "acc_stderr": 0.027682979522960234, "acc_norm": 0.7510204081632653, "acc_norm_stderr": 0.027682979522960234 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.02574490253229094, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.02574490253229094 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.42698826597131684, "acc_stderr": 0.012633353557534418, "acc_norm": 0.42698826597131684, "acc_norm_stderr": 0.012633353557534418 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.7696078431372549, "acc_stderr": 0.02955429260569508, "acc_norm": 0.7696078431372549, "acc_norm_stderr": 0.02955429260569508 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.7636363636363637, "acc_stderr": 0.03317505930009179, "acc_norm": 0.7636363636363637, "acc_norm_stderr": 0.03317505930009179 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.379436964504284, "mc1_stderr": 0.016987039266142992, "mc2": 0.543917694141216, "mc2_stderr": 0.015683013417932032 }, "harness|ko_commongen_v2|2": { "acc": 0.5596221959858324, "acc_stderr": 0.017067699774312967, "acc_norm": 0.5761511216056671, "acc_norm_stderr": 0.016989810834628253 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "JY623/KoSOLAR-10.7B-merge-v3.2", "model_sha": "2d91b66ad4dca0a9238c756d67112808657c0782", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }