{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3191126279863481, "acc_stderr": 0.013621696119173302, "acc_norm": 0.37627986348122866, "acc_norm_stderr": 0.014157022555407166 }, "harness|ko_hellaswag|10": { "acc": 0.36138219478191597, "acc_stderr": 0.004794191785967945, "acc_norm": 0.46614220274845647, "acc_norm_stderr": 0.004978328190775522 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.40350877192982454, "acc_stderr": 0.03762738699917055, "acc_norm": 0.40350877192982454, "acc_norm_stderr": 0.03762738699917055 }, "harness|ko_mmlu_management|5": { "acc": 0.5145631067961165, "acc_stderr": 0.04948637324026637, "acc_norm": 0.5145631067961165, "acc_norm_stderr": 0.04948637324026637 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4789272030651341, "acc_stderr": 0.0178640767862129, "acc_norm": 0.4789272030651341, "acc_norm_stderr": 0.0178640767862129 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.37037037037037035, "acc_stderr": 0.04171654161354543, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.04171654161354543 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.39148936170212767, "acc_stderr": 0.03190701242326812, "acc_norm": 0.39148936170212767, "acc_norm_stderr": 0.03190701242326812 }, "harness|ko_mmlu_virology|5": { "acc": 0.39759036144578314, "acc_stderr": 0.038099730845402184, "acc_norm": 0.39759036144578314, "acc_norm_stderr": 0.038099730845402184 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4437299035369775, "acc_stderr": 0.02821768355665232, "acc_norm": 0.4437299035369775, "acc_norm_stderr": 0.02821768355665232 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.3721973094170404, "acc_stderr": 0.03244305283008731, "acc_norm": 0.3721973094170404, "acc_norm_stderr": 0.03244305283008731 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.5419847328244275, "acc_stderr": 0.04369802690578756, "acc_norm": 0.5419847328244275, "acc_norm_stderr": 0.04369802690578756 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.51010101010101, "acc_stderr": 0.035616254886737454, "acc_norm": 0.51010101010101, "acc_norm_stderr": 0.035616254886737454 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4413793103448276, "acc_stderr": 0.04137931034482758, "acc_norm": 0.4413793103448276, "acc_norm_stderr": 0.04137931034482758 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237655, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237655 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.453781512605042, "acc_stderr": 0.03233943468182088, "acc_norm": 0.453781512605042, "acc_norm_stderr": 0.03233943468182088 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.3974358974358974, "acc_stderr": 0.024811920017903836, "acc_norm": 0.3974358974358974, "acc_norm_stderr": 0.024811920017903836 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5185185185185185, "acc_stderr": 0.04830366024635331, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.04830366024635331 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.35467980295566504, "acc_stderr": 0.03366124489051449, "acc_norm": 0.35467980295566504, "acc_norm_stderr": 0.03366124489051449 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.47419354838709676, "acc_stderr": 0.028406095057653315, "acc_norm": 0.47419354838709676, "acc_norm_stderr": 0.028406095057653315 }, "harness|ko_mmlu_marketing|5": { "acc": 0.5897435897435898, "acc_stderr": 0.03222414045241107, "acc_norm": 0.5897435897435898, "acc_norm_stderr": 0.03222414045241107 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.3849056603773585, "acc_stderr": 0.029946498567699945, "acc_norm": 0.3849056603773585, "acc_norm_stderr": 0.029946498567699945 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.4818181818181818, "acc_stderr": 0.04785964010794916, "acc_norm": 0.4818181818181818, "acc_norm_stderr": 0.04785964010794916 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.23703703703703705, "acc_stderr": 0.02592887613276611, "acc_norm": 0.23703703703703705, "acc_norm_stderr": 0.02592887613276611 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2980132450331126, "acc_stderr": 0.037345356767871984, "acc_norm": 0.2980132450331126, "acc_norm_stderr": 0.037345356767871984 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5422885572139303, "acc_stderr": 0.035228658640995975, "acc_norm": 0.5422885572139303, "acc_norm_stderr": 0.035228658640995975 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.37572254335260113, "acc_stderr": 0.036928207672648664, "acc_norm": 0.37572254335260113, "acc_norm_stderr": 0.036928207672648664 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2830687830687831, "acc_stderr": 0.023201392938194978, "acc_norm": 0.2830687830687831, "acc_norm_stderr": 0.023201392938194978 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2986111111111111, "acc_stderr": 0.03827052357950756, "acc_norm": 0.2986111111111111, "acc_norm_stderr": 0.03827052357950756 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.4595375722543353, "acc_stderr": 0.02683080599895224, "acc_norm": 0.4595375722543353, "acc_norm_stderr": 0.02683080599895224 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3619631901840491, "acc_stderr": 0.037757007291414416, "acc_norm": 0.3619631901840491, "acc_norm_stderr": 0.037757007291414416 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4012345679012346, "acc_stderr": 0.0272725828498398, "acc_norm": 0.4012345679012346, "acc_norm_stderr": 0.0272725828498398 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.49740932642487046, "acc_stderr": 0.03608390745384486, "acc_norm": 0.49740932642487046, "acc_norm_stderr": 0.03608390745384486 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04434600701584926, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04434600701584926 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.44770642201834865, "acc_stderr": 0.021319754962425462, "acc_norm": 0.44770642201834865, "acc_norm_stderr": 0.021319754962425462 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.373015873015873, "acc_stderr": 0.04325506042017086, "acc_norm": 0.373015873015873, "acc_norm_stderr": 0.04325506042017086 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4411764705882353, "acc_stderr": 0.028431095444176643, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.028431095444176643 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5702479338842975, "acc_stderr": 0.04519082021319774, "acc_norm": 0.5702479338842975, "acc_norm_stderr": 0.04519082021319774 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.48026315789473684, "acc_stderr": 0.040657710025626036, "acc_norm": 0.48026315789473684, "acc_norm_stderr": 0.040657710025626036 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.31699346405228757, "acc_stderr": 0.018824219512706207, "acc_norm": 0.31699346405228757, "acc_norm_stderr": 0.018824219512706207 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.30851063829787234, "acc_stderr": 0.027553366165101373, "acc_norm": 0.30851063829787234, "acc_norm_stderr": 0.027553366165101373 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.22321428571428573, "acc_stderr": 0.039523019677025116, "acc_norm": 0.22321428571428573, "acc_norm_stderr": 0.039523019677025116 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3888888888888889, "acc_stderr": 0.03324708911809117, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.03324708911809117 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.375, "acc_stderr": 0.029408372932278746, "acc_norm": 0.375, "acc_norm_stderr": 0.029408372932278746 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.43673469387755104, "acc_stderr": 0.031751952375833226, "acc_norm": 0.43673469387755104, "acc_norm_stderr": 0.031751952375833226 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.39662447257383965, "acc_stderr": 0.03184399873811226, "acc_norm": 0.39662447257383965, "acc_norm_stderr": 0.03184399873811226 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.31681877444589307, "acc_stderr": 0.011882349954723015, "acc_norm": 0.31681877444589307, "acc_norm_stderr": 0.011882349954723015 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.4019607843137255, "acc_stderr": 0.03441190023482466, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.03441190023482466 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.42424242424242425, "acc_stderr": 0.038592681420702615, "acc_norm": 0.42424242424242425, "acc_norm_stderr": 0.038592681420702615 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.24357405140758873, "mc1_stderr": 0.015026354824910782, "mc2": 0.41395274449910313, "mc2_stderr": 0.015033140507060082 }, "harness|ko_commongen_v2|2": { "acc": 0.3825265643447462, "acc_stderr": 0.016709165387228806, "acc_norm": 0.4781582054309327, "acc_norm_stderr": 0.017173944474294378 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "TheBloke/Llama-2-13B-fp16", "model_sha": "b2e65e8ad4bb35e5abaee0170ebd5fc2134a50bb", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }