{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3225255972696246, "acc_stderr": 0.013659980894277375, "acc_norm": 0.38054607508532423, "acc_norm_stderr": 0.014188277712349819 }, "harness|ko_hellaswag|10": { "acc": 0.38179645488946423, "acc_stderr": 0.004848341560492151, "acc_norm": 0.4963154750049791, "acc_norm_stderr": 0.004989645929811438 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.42105263157894735, "acc_stderr": 0.037867207062342145, "acc_norm": 0.42105263157894735, "acc_norm_stderr": 0.037867207062342145 }, "harness|ko_mmlu_management|5": { "acc": 0.34951456310679613, "acc_stderr": 0.04721188506097172, "acc_norm": 0.34951456310679613, "acc_norm_stderr": 0.04721188506097172 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.39208173690932313, "acc_stderr": 0.01745852405014764, "acc_norm": 0.39208173690932313, "acc_norm_stderr": 0.01745852405014764 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.37037037037037035, "acc_stderr": 0.04171654161354544, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.04171654161354544 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.2723404255319149, "acc_stderr": 0.029101290698386705, "acc_norm": 0.2723404255319149, "acc_norm_stderr": 0.029101290698386705 }, "harness|ko_mmlu_virology|5": { "acc": 0.3253012048192771, "acc_stderr": 0.03647168523683227, "acc_norm": 0.3253012048192771, "acc_norm_stderr": 0.03647168523683227 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.39228295819935693, "acc_stderr": 0.027731258647011998, "acc_norm": 0.39228295819935693, "acc_norm_stderr": 0.027731258647011998 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.40358744394618834, "acc_stderr": 0.03292802819330313, "acc_norm": 0.40358744394618834, "acc_norm_stderr": 0.03292802819330313 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4961832061068702, "acc_stderr": 0.043851623256015534, "acc_norm": 0.4961832061068702, "acc_norm_stderr": 0.043851623256015534 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.3888888888888889, "acc_stderr": 0.0347327959083696, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.0347327959083696 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.43448275862068964, "acc_stderr": 0.04130740879555497, "acc_norm": 0.43448275862068964, "acc_norm_stderr": 0.04130740879555497 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3739495798319328, "acc_stderr": 0.031429466378837076, "acc_norm": 0.3739495798319328, "acc_norm_stderr": 0.031429466378837076 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.31794871794871793, "acc_stderr": 0.02361088430892786, "acc_norm": 0.31794871794871793, "acc_norm_stderr": 0.02361088430892786 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.3888888888888889, "acc_stderr": 0.0471282125742677, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.0471282125742677 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.26108374384236455, "acc_stderr": 0.030903796952114485, "acc_norm": 0.26108374384236455, "acc_norm_stderr": 0.030903796952114485 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.34516129032258064, "acc_stderr": 0.027045746573534327, "acc_norm": 0.34516129032258064, "acc_norm_stderr": 0.027045746573534327 }, "harness|ko_mmlu_marketing|5": { "acc": 0.5, "acc_stderr": 0.03275608910402091, "acc_norm": 0.5, "acc_norm_stderr": 0.03275608910402091 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.3433962264150943, "acc_stderr": 0.029224526469124792, "acc_norm": 0.3433962264150943, "acc_norm_stderr": 0.029224526469124792 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.35454545454545455, "acc_stderr": 0.04582004841505415, "acc_norm": 0.35454545454545455, "acc_norm_stderr": 0.04582004841505415 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.24444444444444444, "acc_stderr": 0.02620276653465215, "acc_norm": 0.24444444444444444, "acc_norm_stderr": 0.02620276653465215 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|ko_mmlu_sociology|5": { "acc": 0.43781094527363185, "acc_stderr": 0.035080801121998406, "acc_norm": 0.43781094527363185, "acc_norm_stderr": 0.035080801121998406 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3063583815028902, "acc_stderr": 0.03514942551267437, "acc_norm": 0.3063583815028902, "acc_norm_stderr": 0.03514942551267437 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.24338624338624337, "acc_stderr": 0.022101128787415426, "acc_norm": 0.24338624338624337, "acc_norm_stderr": 0.022101128787415426 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3263888888888889, "acc_stderr": 0.03921067198982266, "acc_norm": 0.3263888888888889, "acc_norm_stderr": 0.03921067198982266 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.4161849710982659, "acc_stderr": 0.02653818910470548, "acc_norm": 0.4161849710982659, "acc_norm_stderr": 0.02653818910470548 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.32515337423312884, "acc_stderr": 0.03680350371286461, "acc_norm": 0.32515337423312884, "acc_norm_stderr": 0.03680350371286461 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.39197530864197533, "acc_stderr": 0.02716368603827123, "acc_norm": 0.39197530864197533, "acc_norm_stderr": 0.02716368603827123 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.42487046632124353, "acc_stderr": 0.0356747133521254, "acc_norm": 0.42487046632124353, "acc_norm_stderr": 0.0356747133521254 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.04049339297748141, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.04049339297748141 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.3926605504587156, "acc_stderr": 0.020937505161201093, "acc_norm": 0.3926605504587156, "acc_norm_stderr": 0.020937505161201093 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.25396825396825395, "acc_stderr": 0.038932596106046734, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.038932596106046734 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4084967320261438, "acc_stderr": 0.028146405993096358, "acc_norm": 0.4084967320261438, "acc_norm_stderr": 0.028146405993096358 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_international_law|5": { "acc": 0.49586776859504134, "acc_stderr": 0.04564198767432754, "acc_norm": 0.49586776859504134, "acc_norm_stderr": 0.04564198767432754 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.27631578947368424, "acc_stderr": 0.03639057569952925, "acc_norm": 0.27631578947368424, "acc_norm_stderr": 0.03639057569952925 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3284313725490196, "acc_stderr": 0.01899970738316266, "acc_norm": 0.3284313725490196, "acc_norm_stderr": 0.01899970738316266 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2765957446808511, "acc_stderr": 0.026684564340460997, "acc_norm": 0.2765957446808511, "acc_norm_stderr": 0.026684564340460997 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.30357142857142855, "acc_stderr": 0.04364226155841044, "acc_norm": 0.30357142857142855, "acc_norm_stderr": 0.04364226155841044 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3425925925925926, "acc_stderr": 0.03236585252602157, "acc_norm": 0.3425925925925926, "acc_norm_stderr": 0.03236585252602157 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2569832402234637, "acc_stderr": 0.014614465821966344, "acc_norm": 0.2569832402234637, "acc_norm_stderr": 0.014614465821966344 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.41911764705882354, "acc_stderr": 0.02997280717046463, "acc_norm": 0.41911764705882354, "acc_norm_stderr": 0.02997280717046463 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.39183673469387753, "acc_stderr": 0.031251275910891656, "acc_norm": 0.39183673469387753, "acc_norm_stderr": 0.031251275910891656 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.48945147679324896, "acc_stderr": 0.032539983791662855, "acc_norm": 0.48945147679324896, "acc_norm_stderr": 0.032539983791662855 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.32529335071707954, "acc_stderr": 0.01196531153657153, "acc_norm": 0.32529335071707954, "acc_norm_stderr": 0.01196531153657153 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.35784313725490197, "acc_stderr": 0.033644872860882996, "acc_norm": 0.35784313725490197, "acc_norm_stderr": 0.033644872860882996 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.3393939393939394, "acc_stderr": 0.03697442205031596, "acc_norm": 0.3393939393939394, "acc_norm_stderr": 0.03697442205031596 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.24112607099143207, "mc1_stderr": 0.014974827279752332, "mc2": 0.37686510476734664, "mc2_stderr": 0.014752533377181794 }, "harness|ko_commongen_v2|2": { "acc": 0.21959858323494688, "acc_stderr": 0.01423274308558027, "acc_norm": 0.30932703659976385, "acc_norm_stderr": 0.015891320505520893 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "kyujinpy/KoT-platypus2-7B", "model_sha": "33eb53d72129db3b1936f07fd894a18b571d7ab6", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }