{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.30802047781569963, "acc_stderr": 0.013491429517292038, "acc_norm": 0.34726962457337884, "acc_norm_stderr": 0.013913034529620439 }, "harness|ko_hellaswag|10": { "acc": 0.37004580760804623, "acc_stderr": 0.004818298991012552, "acc_norm": 0.47231627165903206, "acc_norm_stderr": 0.004982127315605219 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.26900584795321636, "acc_stderr": 0.03401052620104088, "acc_norm": 0.26900584795321636, "acc_norm_stderr": 0.03401052620104088 }, "harness|ko_mmlu_management|5": { "acc": 0.2621359223300971, "acc_stderr": 0.043546310772605956, "acc_norm": 0.2621359223300971, "acc_norm_stderr": 0.043546310772605956 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.2988505747126437, "acc_stderr": 0.016369256815093127, "acc_norm": 0.2988505747126437, "acc_norm_stderr": 0.016369256815093127 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3111111111111111, "acc_stderr": 0.03999262876617724, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.03999262876617724 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.225531914893617, "acc_stderr": 0.02732107841738753, "acc_norm": 0.225531914893617, "acc_norm_stderr": 0.02732107841738753 }, "harness|ko_mmlu_virology|5": { "acc": 0.27710843373493976, "acc_stderr": 0.034843315926805875, "acc_norm": 0.27710843373493976, "acc_norm_stderr": 0.034843315926805875 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.33762057877813506, "acc_stderr": 0.02685882587948855, "acc_norm": 0.33762057877813506, "acc_norm_stderr": 0.02685882587948855 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.2645739910313901, "acc_stderr": 0.029605103217038332, "acc_norm": 0.2645739910313901, "acc_norm_stderr": 0.029605103217038332 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.25190839694656486, "acc_stderr": 0.038073871163060866, "acc_norm": 0.25190839694656486, "acc_norm_stderr": 0.038073871163060866 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.2777777777777778, "acc_stderr": 0.03191178226713547, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.03191178226713547 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.30344827586206896, "acc_stderr": 0.03831226048850333, "acc_norm": 0.30344827586206896, "acc_norm_stderr": 0.03831226048850333 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.042207736591714506, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.042207736591714506 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.21428571428571427, "acc_stderr": 0.02665353159671549, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.02665353159671549 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.21794871794871795, "acc_stderr": 0.02093244577446317, "acc_norm": 0.21794871794871795, "acc_norm_stderr": 0.02093244577446317 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.26, "acc_stderr": 0.04408440022768077, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768077 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.35185185185185186, "acc_stderr": 0.046166311118017125, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.046166311118017125 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.270935960591133, "acc_stderr": 0.03127090713297698, "acc_norm": 0.270935960591133, "acc_norm_stderr": 0.03127090713297698 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.267741935483871, "acc_stderr": 0.025189006660212385, "acc_norm": 0.267741935483871, "acc_norm_stderr": 0.025189006660212385 }, "harness|ko_mmlu_marketing|5": { "acc": 0.3076923076923077, "acc_stderr": 0.030236389942173106, "acc_norm": 0.3076923076923077, "acc_norm_stderr": 0.030236389942173106 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.27169811320754716, "acc_stderr": 0.027377706624670713, "acc_norm": 0.27169811320754716, "acc_norm_stderr": 0.027377706624670713 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.2545454545454545, "acc_stderr": 0.041723430387053825, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.041723430387053825 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.026719240783712166, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.026719240783712166 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.036586032627637426, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.036586032627637426 }, "harness|ko_mmlu_sociology|5": { "acc": 0.3482587064676617, "acc_stderr": 0.033687874661154596, "acc_norm": 0.3482587064676617, "acc_norm_stderr": 0.033687874661154596 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.24855491329479767, "acc_stderr": 0.03295304696818317, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.03295304696818317 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.24867724867724866, "acc_stderr": 0.022261817692400175, "acc_norm": 0.24867724867724866, "acc_norm_stderr": 0.022261817692400175 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2638888888888889, "acc_stderr": 0.03685651095897532, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.03685651095897532 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.18, "acc_stderr": 0.038612291966536955, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536955 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.30057803468208094, "acc_stderr": 0.024685316867257806, "acc_norm": 0.30057803468208094, "acc_norm_stderr": 0.024685316867257806 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3067484662576687, "acc_stderr": 0.036230899157241474, "acc_norm": 0.3067484662576687, "acc_norm_stderr": 0.036230899157241474 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.3055555555555556, "acc_stderr": 0.02563082497562135, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.02563082497562135 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.24352331606217617, "acc_stderr": 0.030975436386845436, "acc_norm": 0.24352331606217617, "acc_norm_stderr": 0.030975436386845436 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.04049339297748141, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.04049339297748141 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.29541284403669726, "acc_stderr": 0.019560619182976, "acc_norm": 0.29541284403669726, "acc_norm_stderr": 0.019560619182976 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.14285714285714285, "acc_stderr": 0.0312984318574381, "acc_norm": 0.14285714285714285, "acc_norm_stderr": 0.0312984318574381 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.30392156862745096, "acc_stderr": 0.026336613469046644, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.026336613469046644 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816508, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816508 }, "harness|ko_mmlu_international_law|5": { "acc": 0.4462809917355372, "acc_stderr": 0.0453793517794788, "acc_norm": 0.4462809917355372, "acc_norm_stderr": 0.0453793517794788 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3355263157894737, "acc_stderr": 0.038424985593952694, "acc_norm": 0.3355263157894737, "acc_norm_stderr": 0.038424985593952694 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3022875816993464, "acc_stderr": 0.018579232711113877, "acc_norm": 0.3022875816993464, "acc_norm_stderr": 0.018579232711113877 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2907801418439716, "acc_stderr": 0.027090664368353178, "acc_norm": 0.2907801418439716, "acc_norm_stderr": 0.027090664368353178 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.0420327729146776, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.0420327729146776 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.2037037037037037, "acc_stderr": 0.02746740180405799, "acc_norm": 0.2037037037037037, "acc_norm_stderr": 0.02746740180405799 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.17279411764705882, "acc_stderr": 0.022966067585581788, "acc_norm": 0.17279411764705882, "acc_norm_stderr": 0.022966067585581788 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.2163265306122449, "acc_stderr": 0.026358916334904028, "acc_norm": 0.2163265306122449, "acc_norm_stderr": 0.026358916334904028 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.3080168776371308, "acc_stderr": 0.030052389335605695, "acc_norm": 0.3080168776371308, "acc_norm_stderr": 0.030052389335605695 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.26140808344198174, "acc_stderr": 0.011222528169771312, "acc_norm": 0.26140808344198174, "acc_norm_stderr": 0.011222528169771312 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.29901960784313725, "acc_stderr": 0.03213325717373618, "acc_norm": 0.29901960784313725, "acc_norm_stderr": 0.03213325717373618 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.3393939393939394, "acc_stderr": 0.03697442205031595, "acc_norm": 0.3393939393939394, "acc_norm_stderr": 0.03697442205031595 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.23745410036719705, "mc1_stderr": 0.014896277441041867, "mc2": 0.3946101299678252, "mc2_stderr": 0.01496139592173614 }, "harness|ko_commongen_v2|2": { "acc": 0.4061032863849765, "acc_stderr": 0.016834837668044094, "acc_norm": 0.4460093896713615, "acc_norm_stderr": 0.017039561832563683 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "heegyu/llama-2-ko-7b-chat", "model_sha": "98096a3f4d095e42ba10daec38ad329d9576f4cd", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }