{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3165529010238908, "acc_stderr": 0.01359243151906808, "acc_norm": 0.38310580204778155, "acc_norm_stderr": 0.014206472661672876 }, "harness|ko_hellaswag|10": { "acc": 0.3536148177653854, "acc_stderr": 0.004771143074426131, "acc_norm": 0.45359490141406095, "acc_norm_stderr": 0.004968244611429387 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4269005847953216, "acc_stderr": 0.03793620616529917, "acc_norm": 0.4269005847953216, "acc_norm_stderr": 0.03793620616529917 }, "harness|ko_mmlu_management|5": { "acc": 0.5631067961165048, "acc_stderr": 0.04911147107365777, "acc_norm": 0.5631067961165048, "acc_norm_stderr": 0.04911147107365777 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4482758620689655, "acc_stderr": 0.017784034534992454, "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.017784034534992454 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.28888888888888886, "acc_stderr": 0.039154506304142495, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.039154506304142495 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.425531914893617, "acc_stderr": 0.03232146916224469, "acc_norm": 0.425531914893617, "acc_norm_stderr": 0.03232146916224469 }, "harness|ko_mmlu_virology|5": { "acc": 0.3253012048192771, "acc_stderr": 0.03647168523683227, "acc_norm": 0.3253012048192771, "acc_norm_stderr": 0.03647168523683227 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.3858520900321543, "acc_stderr": 0.027648149599751464, "acc_norm": 0.3858520900321543, "acc_norm_stderr": 0.027648149599751464 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.336322869955157, "acc_stderr": 0.031708824268455, "acc_norm": 0.336322869955157, "acc_norm_stderr": 0.031708824268455 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4580152671755725, "acc_stderr": 0.04369802690578756, "acc_norm": 0.4580152671755725, "acc_norm_stderr": 0.04369802690578756 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5202020202020202, "acc_stderr": 0.035594435655639196, "acc_norm": 0.5202020202020202, "acc_norm_stderr": 0.035594435655639196 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.36551724137931035, "acc_stderr": 0.04013124195424386, "acc_norm": 0.36551724137931035, "acc_norm_stderr": 0.04013124195424386 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.04158307533083286, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.04158307533083286 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.42016806722689076, "acc_stderr": 0.03206183783236153, "acc_norm": 0.42016806722689076, "acc_norm_stderr": 0.03206183783236153 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.3769230769230769, "acc_stderr": 0.024570975364225995, "acc_norm": 0.3769230769230769, "acc_norm_stderr": 0.024570975364225995 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.48148148148148145, "acc_stderr": 0.04830366024635331, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.04830366024635331 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.33004926108374383, "acc_stderr": 0.03308530426228258, "acc_norm": 0.33004926108374383, "acc_norm_stderr": 0.03308530426228258 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.4258064516129032, "acc_stderr": 0.028129112709165894, "acc_norm": 0.4258064516129032, "acc_norm_stderr": 0.028129112709165894 }, "harness|ko_mmlu_marketing|5": { "acc": 0.5641025641025641, "acc_stderr": 0.032485775115784, "acc_norm": 0.5641025641025641, "acc_norm_stderr": 0.032485775115784 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.3849056603773585, "acc_stderr": 0.029946498567699945, "acc_norm": 0.3849056603773585, "acc_norm_stderr": 0.029946498567699945 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.41818181818181815, "acc_stderr": 0.0472457740573157, "acc_norm": 0.41818181818181815, "acc_norm_stderr": 0.0472457740573157 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.0263357394040558, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.0263357394040558 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.03802039760107903, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.03802039760107903 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5174129353233831, "acc_stderr": 0.03533389234739245, "acc_norm": 0.5174129353233831, "acc_norm_stderr": 0.03533389234739245 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.36416184971098264, "acc_stderr": 0.03669072477416907, "acc_norm": 0.36416184971098264, "acc_norm_stderr": 0.03669072477416907 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2724867724867725, "acc_stderr": 0.022930973071633335, "acc_norm": 0.2724867724867725, "acc_norm_stderr": 0.022930973071633335 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3055555555555556, "acc_stderr": 0.03852084696008534, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.03852084696008534 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.3959537572254335, "acc_stderr": 0.026329813341946253, "acc_norm": 0.3959537572254335, "acc_norm_stderr": 0.026329813341946253 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3987730061349693, "acc_stderr": 0.03847021420456023, "acc_norm": 0.3987730061349693, "acc_norm_stderr": 0.03847021420456023 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.3549382716049383, "acc_stderr": 0.02662415247884585, "acc_norm": 0.3549382716049383, "acc_norm_stderr": 0.02662415247884585 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.43523316062176165, "acc_stderr": 0.03578038165008586, "acc_norm": 0.43523316062176165, "acc_norm_stderr": 0.03578038165008586 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.040493392977481404, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.040493392977481404 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.42201834862385323, "acc_stderr": 0.021174991407763178, "acc_norm": 0.42201834862385323, "acc_norm_stderr": 0.021174991407763178 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.0404061017820884, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.0404061017820884 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.41830065359477125, "acc_stderr": 0.02824513402438729, "acc_norm": 0.41830065359477125, "acc_norm_stderr": 0.02824513402438729 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6115702479338843, "acc_stderr": 0.04449270350068383, "acc_norm": 0.6115702479338843, "acc_norm_stderr": 0.04449270350068383 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3815789473684211, "acc_stderr": 0.03953173377749194, "acc_norm": 0.3815789473684211, "acc_norm_stderr": 0.03953173377749194 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.29411764705882354, "acc_stderr": 0.018433427649401896, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.018433427649401896 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2553191489361702, "acc_stderr": 0.026011992930902002, "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.026011992930902002 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.25892857142857145, "acc_stderr": 0.041577515398656284, "acc_norm": 0.25892857142857145, "acc_norm_stderr": 0.041577515398656284 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.38425925925925924, "acc_stderr": 0.03317354514310742, "acc_norm": 0.38425925925925924, "acc_norm_stderr": 0.03317354514310742 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.30514705882352944, "acc_stderr": 0.0279715413701706, "acc_norm": 0.30514705882352944, "acc_norm_stderr": 0.0279715413701706 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.3877551020408163, "acc_stderr": 0.03119223072679566, "acc_norm": 0.3877551020408163, "acc_norm_stderr": 0.03119223072679566 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.33755274261603374, "acc_stderr": 0.03078154910202622, "acc_norm": 0.33755274261603374, "acc_norm_stderr": 0.03078154910202622 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.27249022164276404, "acc_stderr": 0.011371658294311532, "acc_norm": 0.27249022164276404, "acc_norm_stderr": 0.011371658294311532 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.3333333333333333, "acc_stderr": 0.03308611113236434, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.03308611113236434 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.3696969696969697, "acc_stderr": 0.03769430314512566, "acc_norm": 0.3696969696969697, "acc_norm_stderr": 0.03769430314512566 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.3023255813953488, "mc1_stderr": 0.016077509266133033, "mc2": 0.46645373213159264, "mc2_stderr": 0.015378490920195349 }, "harness|ko_commongen_v2|2": { "acc": 0.33884297520661155, "acc_stderr": 0.016272952997019124, "acc_norm": 0.4014167650531287, "acc_norm_stderr": 0.01685290785872906 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "cepiloth/ko-llama2-13b-finetune", "model_sha": "15f8932879b2e7880baf3402b1a150f9ff36d370", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }