{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.378839590443686, "acc_stderr": 0.014175915490000326, "acc_norm": 0.4522184300341297, "acc_norm_stderr": 0.014544519880633832 }, "harness|ko_hellaswag|10": { "acc": 0.4208325034853615, "acc_stderr": 0.004926837572202162, "acc_norm": 0.563433578968333, "acc_norm_stderr": 0.00494946256368134 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5029239766081871, "acc_stderr": 0.03834759370936839, "acc_norm": 0.5029239766081871, "acc_norm_stderr": 0.03834759370936839 }, "harness|ko_mmlu_management|5": { "acc": 0.4854368932038835, "acc_stderr": 0.04948637324026637, "acc_norm": 0.4854368932038835, "acc_norm_stderr": 0.04948637324026637 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5261813537675607, "acc_stderr": 0.017855434554041993, "acc_norm": 0.5261813537675607, "acc_norm_stderr": 0.017855434554041993 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4740740740740741, "acc_stderr": 0.04313531696750574, "acc_norm": 0.4740740740740741, "acc_norm_stderr": 0.04313531696750574 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206824, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206824 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.39148936170212767, "acc_stderr": 0.03190701242326812, "acc_norm": 0.39148936170212767, "acc_norm_stderr": 0.03190701242326812 }, "harness|ko_mmlu_virology|5": { "acc": 0.45180722891566266, "acc_stderr": 0.03874371556587953, "acc_norm": 0.45180722891566266, "acc_norm_stderr": 0.03874371556587953 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5048231511254019, "acc_stderr": 0.028396770444111298, "acc_norm": 0.5048231511254019, "acc_norm_stderr": 0.028396770444111298 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4977578475336323, "acc_stderr": 0.033557465352232634, "acc_norm": 0.4977578475336323, "acc_norm_stderr": 0.033557465352232634 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4732824427480916, "acc_stderr": 0.04379024936553894, "acc_norm": 0.4732824427480916, "acc_norm_stderr": 0.04379024936553894 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5505050505050505, "acc_stderr": 0.0354413249194797, "acc_norm": 0.5505050505050505, "acc_norm_stderr": 0.0354413249194797 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.41379310344827586, "acc_stderr": 0.04104269211806232, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.04104269211806232 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.042207736591714534, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.042207736591714534 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.453781512605042, "acc_stderr": 0.03233943468182088, "acc_norm": 0.453781512605042, "acc_norm_stderr": 0.03233943468182088 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.441025641025641, "acc_stderr": 0.02517404838400078, "acc_norm": 0.441025641025641, "acc_norm_stderr": 0.02517404838400078 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4537037037037037, "acc_stderr": 0.04812917324536823, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.04812917324536823 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3842364532019704, "acc_stderr": 0.0342239856565755, "acc_norm": 0.3842364532019704, "acc_norm_stderr": 0.0342239856565755 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.4935483870967742, "acc_stderr": 0.02844163823354051, "acc_norm": 0.4935483870967742, "acc_norm_stderr": 0.02844163823354051 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6410256410256411, "acc_stderr": 0.03142616993791924, "acc_norm": 0.6410256410256411, "acc_norm_stderr": 0.03142616993791924 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4339622641509434, "acc_stderr": 0.030503292013342596, "acc_norm": 0.4339622641509434, "acc_norm_stderr": 0.030503292013342596 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.509090909090909, "acc_stderr": 0.04788339768702861, "acc_norm": 0.509090909090909, "acc_norm_stderr": 0.04788339768702861 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.026842057873833706, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.026842057873833706 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526733, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526733 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5671641791044776, "acc_stderr": 0.03503490923673281, "acc_norm": 0.5671641791044776, "acc_norm_stderr": 0.03503490923673281 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.42196531791907516, "acc_stderr": 0.0376574669386515, "acc_norm": 0.42196531791907516, "acc_norm_stderr": 0.0376574669386515 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2804232804232804, "acc_stderr": 0.02313528797432563, "acc_norm": 0.2804232804232804, "acc_norm_stderr": 0.02313528797432563 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3680555555555556, "acc_stderr": 0.04032999053960718, "acc_norm": 0.3680555555555556, "acc_norm_stderr": 0.04032999053960718 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.49421965317919075, "acc_stderr": 0.026917296179149116, "acc_norm": 0.49421965317919075, "acc_norm_stderr": 0.026917296179149116 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4785276073619632, "acc_stderr": 0.03924746876751129, "acc_norm": 0.4785276073619632, "acc_norm_stderr": 0.03924746876751129 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.44753086419753085, "acc_stderr": 0.027667138569422704, "acc_norm": 0.44753086419753085, "acc_norm_stderr": 0.027667138569422704 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5233160621761658, "acc_stderr": 0.03604513672442202, "acc_norm": 0.5233160621761658, "acc_norm_stderr": 0.03604513672442202 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5431192660550459, "acc_stderr": 0.02135745878522621, "acc_norm": 0.5431192660550459, "acc_norm_stderr": 0.02135745878522621 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2619047619047619, "acc_stderr": 0.03932537680392868, "acc_norm": 0.2619047619047619, "acc_norm_stderr": 0.03932537680392868 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4542483660130719, "acc_stderr": 0.028509807802626567, "acc_norm": 0.4542483660130719, "acc_norm_stderr": 0.028509807802626567 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6363636363636364, "acc_stderr": 0.043913262867240704, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.043913262867240704 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.35526315789473684, "acc_stderr": 0.03894734487013316, "acc_norm": 0.35526315789473684, "acc_norm_stderr": 0.03894734487013316 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3464052287581699, "acc_stderr": 0.019249785691717217, "acc_norm": 0.3464052287581699, "acc_norm_stderr": 0.019249785691717217 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2907801418439716, "acc_stderr": 0.027090664368353178, "acc_norm": 0.2907801418439716, "acc_norm_stderr": 0.027090664368353178 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.24107142857142858, "acc_stderr": 0.04059867246952687, "acc_norm": 0.24107142857142858, "acc_norm_stderr": 0.04059867246952687 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.031674687068289784, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.031674687068289784 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4375, "acc_stderr": 0.030134614954403924, "acc_norm": 0.4375, "acc_norm_stderr": 0.030134614954403924 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.45714285714285713, "acc_stderr": 0.03189141832421397, "acc_norm": 0.45714285714285713, "acc_norm_stderr": 0.03189141832421397 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5232067510548524, "acc_stderr": 0.032512152011410174, "acc_norm": 0.5232067510548524, "acc_norm_stderr": 0.032512152011410174 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3050847457627119, "acc_stderr": 0.011759939618085455, "acc_norm": 0.3050847457627119, "acc_norm_stderr": 0.011759939618085455 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.4166666666666667, "acc_stderr": 0.03460228327239171, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.03460228327239171 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.4484848484848485, "acc_stderr": 0.038835659779569286, "acc_norm": 0.4484848484848485, "acc_norm_stderr": 0.038835659779569286 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.27050183598531213, "mc1_stderr": 0.015550778332842883, "mc2": 0.4216743604441881, "mc2_stderr": 0.014868064514296196 }, "harness|ko_commongen_v2|2": { "acc": 0.45454545454545453, "acc_stderr": 0.017119172208061504, "acc_norm": 0.5395513577331759, "acc_norm_stderr": 0.017136487626049846 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Kaeri-Jenti/llama-2-koen-13b-v1.3", "model_sha": "a926510aca20383788b1d49fc2a16edac5919f2c", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }