{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.28071672354948807, "acc_stderr": 0.013131238126975588, "acc_norm": 0.31399317406143346, "acc_norm_stderr": 0.013562691224726295 }, "harness|ko_hellaswag|10": { "acc": 0.3169687313284206, "acc_stderr": 0.004643441945489851, "acc_norm": 0.37572196773551086, "acc_norm_stderr": 0.004833189651626803 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.2807017543859649, "acc_stderr": 0.03446296217088426, "acc_norm": 0.2807017543859649, "acc_norm_stderr": 0.03446296217088426 }, "harness|ko_mmlu_management|5": { "acc": 0.4563106796116505, "acc_stderr": 0.04931801994220414, "acc_norm": 0.4563106796116505, "acc_norm_stderr": 0.04931801994220414 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.3895274584929757, "acc_stderr": 0.017438082556264594, "acc_norm": 0.3895274584929757, "acc_norm_stderr": 0.017438082556264594 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.34814814814814815, "acc_stderr": 0.041153246103369526, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.041153246103369526 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3446808510638298, "acc_stderr": 0.03106898596312215, "acc_norm": 0.3446808510638298, "acc_norm_stderr": 0.03106898596312215 }, "harness|ko_mmlu_virology|5": { "acc": 0.3192771084337349, "acc_stderr": 0.0362933532994786, "acc_norm": 0.3192771084337349, "acc_norm_stderr": 0.0362933532994786 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.40514469453376206, "acc_stderr": 0.02788238379132595, "acc_norm": 0.40514469453376206, "acc_norm_stderr": 0.02788238379132595 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.34080717488789236, "acc_stderr": 0.03181149747055359, "acc_norm": 0.34080717488789236, "acc_norm_stderr": 0.03181149747055359 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.3969465648854962, "acc_stderr": 0.04291135671009225, "acc_norm": 0.3969465648854962, "acc_norm_stderr": 0.04291135671009225 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.3838383838383838, "acc_stderr": 0.03464881675016337, "acc_norm": 0.3838383838383838, "acc_norm_stderr": 0.03464881675016337 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4068965517241379, "acc_stderr": 0.04093793981266237, "acc_norm": 0.4068965517241379, "acc_norm_stderr": 0.04093793981266237 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617747, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617747 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.42436974789915966, "acc_stderr": 0.03210479051015776, "acc_norm": 0.42436974789915966, "acc_norm_stderr": 0.03210479051015776 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.3974358974358974, "acc_stderr": 0.024811920017903836, "acc_norm": 0.3974358974358974, "acc_norm_stderr": 0.024811920017903836 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5, "acc_stderr": 0.04833682445228318, "acc_norm": 0.5, "acc_norm_stderr": 0.04833682445228318 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.35960591133004927, "acc_stderr": 0.03376458246509567, "acc_norm": 0.35960591133004927, "acc_norm_stderr": 0.03376458246509567 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3870967741935484, "acc_stderr": 0.02770935967503249, "acc_norm": 0.3870967741935484, "acc_norm_stderr": 0.02770935967503249 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6111111111111112, "acc_stderr": 0.03193705726200293, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.03193705726200293 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.37358490566037733, "acc_stderr": 0.029773082713319878, "acc_norm": 0.37358490566037733, "acc_norm_stderr": 0.029773082713319878 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.45454545454545453, "acc_stderr": 0.04769300568972743, "acc_norm": 0.45454545454545453, "acc_norm_stderr": 0.04769300568972743 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3, "acc_stderr": 0.027940457136228416, "acc_norm": 0.3, "acc_norm_stderr": 0.027940457136228416 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.271523178807947, "acc_stderr": 0.036313298039696525, "acc_norm": 0.271523178807947, "acc_norm_stderr": 0.036313298039696525 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5074626865671642, "acc_stderr": 0.03535140084276719, "acc_norm": 0.5074626865671642, "acc_norm_stderr": 0.03535140084276719 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3872832369942196, "acc_stderr": 0.03714325906302065, "acc_norm": 0.3872832369942196, "acc_norm_stderr": 0.03714325906302065 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.3386243386243386, "acc_stderr": 0.024373197867983053, "acc_norm": 0.3386243386243386, "acc_norm_stderr": 0.024373197867983053 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3194444444444444, "acc_stderr": 0.03899073687357335, "acc_norm": 0.3194444444444444, "acc_norm_stderr": 0.03899073687357335 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.49, "acc_stderr": 0.05024183937956913, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956913 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.38439306358381503, "acc_stderr": 0.026189666966272035, "acc_norm": 0.38439306358381503, "acc_norm_stderr": 0.026189666966272035 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3803680981595092, "acc_stderr": 0.03814269893261837, "acc_norm": 0.3803680981595092, "acc_norm_stderr": 0.03814269893261837 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.35802469135802467, "acc_stderr": 0.026675611926037093, "acc_norm": 0.35802469135802467, "acc_norm_stderr": 0.026675611926037093 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.3160621761658031, "acc_stderr": 0.033553973696861736, "acc_norm": 0.3160621761658031, "acc_norm_stderr": 0.033553973696861736 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.32456140350877194, "acc_stderr": 0.04404556157374768, "acc_norm": 0.32456140350877194, "acc_norm_stderr": 0.04404556157374768 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.3889908256880734, "acc_stderr": 0.020902300887392866, "acc_norm": 0.3889908256880734, "acc_norm_stderr": 0.020902300887392866 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2698412698412698, "acc_stderr": 0.03970158273235172, "acc_norm": 0.2698412698412698, "acc_norm_stderr": 0.03970158273235172 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4019607843137255, "acc_stderr": 0.028074158947600663, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.028074158947600663 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5785123966942148, "acc_stderr": 0.045077322787750874, "acc_norm": 0.5785123966942148, "acc_norm_stderr": 0.045077322787750874 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.2565789473684211, "acc_stderr": 0.0355418036802569, "acc_norm": 0.2565789473684211, "acc_norm_stderr": 0.0355418036802569 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.33169934640522875, "acc_stderr": 0.019047485239360385, "acc_norm": 0.33169934640522875, "acc_norm_stderr": 0.019047485239360385 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.30141843971631205, "acc_stderr": 0.02737412888263115, "acc_norm": 0.30141843971631205, "acc_norm_stderr": 0.02737412888263115 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.32142857142857145, "acc_stderr": 0.04432804055291519, "acc_norm": 0.32142857142857145, "acc_norm_stderr": 0.04432804055291519 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.28703703703703703, "acc_stderr": 0.030851992993257013, "acc_norm": 0.28703703703703703, "acc_norm_stderr": 0.030851992993257013 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2569832402234637, "acc_stderr": 0.014614465821966344, "acc_norm": 0.2569832402234637, "acc_norm_stderr": 0.014614465821966344 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.27941176470588236, "acc_stderr": 0.027257202606114948, "acc_norm": 0.27941176470588236, "acc_norm_stderr": 0.027257202606114948 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.3306122448979592, "acc_stderr": 0.030116426296540603, "acc_norm": 0.3306122448979592, "acc_norm_stderr": 0.030116426296540603 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.4092827004219409, "acc_stderr": 0.03200704183359592, "acc_norm": 0.4092827004219409, "acc_norm_stderr": 0.03200704183359592 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2953063885267275, "acc_stderr": 0.011651061936208828, "acc_norm": 0.2953063885267275, "acc_norm_stderr": 0.011651061936208828 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.35294117647058826, "acc_stderr": 0.03354092437591519, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.03354092437591519 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.44242424242424244, "acc_stderr": 0.03878372113711274, "acc_norm": 0.44242424242424244, "acc_norm_stderr": 0.03878372113711274 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.29253365973072215, "mc1_stderr": 0.015925597445286165, "mc2": 0.47192954180885394, "mc2_stderr": 0.015632043165156227 }, "harness|ko_commongen_v2|2": { "acc": 0.3919716646989374, "acc_stderr": 0.01678433211942408, "acc_norm": 0.4734356552538371, "acc_norm_stderr": 0.017166075717577747 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "DopeorNope/Ko-Mixtral-v1.3-MoE-7Bx2", "model_sha": "8915c4e51561aa3c26da0ae802319eda218341d7", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }