{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.28498293515358364, "acc_stderr": 0.013191348179838792, "acc_norm": 0.3174061433447099, "acc_norm_stderr": 0.01360223908803817 }, "harness|ko_hellaswag|10": { "acc": 0.371539533957379, "acc_stderr": 0.004822286556305217, "acc_norm": 0.4738099980083649, "acc_norm_stderr": 0.004982931565945953 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.18128654970760233, "acc_stderr": 0.029547741687640024, "acc_norm": 0.18128654970760233, "acc_norm_stderr": 0.029547741687640024 }, "harness|ko_mmlu_management|5": { "acc": 0.36893203883495146, "acc_stderr": 0.047776151811567386, "acc_norm": 0.36893203883495146, "acc_norm_stderr": 0.047776151811567386 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.20689655172413793, "acc_stderr": 0.014485656041669173, "acc_norm": 0.20689655172413793, "acc_norm_stderr": 0.014485656041669173 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.24444444444444444, "acc_stderr": 0.037125378336148665, "acc_norm": 0.24444444444444444, "acc_norm_stderr": 0.037125378336148665 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.19, "acc_stderr": 0.03942772444036623, "acc_norm": 0.19, "acc_norm_stderr": 0.03942772444036623 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.20851063829787234, "acc_stderr": 0.026556982117838746, "acc_norm": 0.20851063829787234, "acc_norm_stderr": 0.026556982117838746 }, "harness|ko_mmlu_virology|5": { "acc": 0.2891566265060241, "acc_stderr": 0.03529486801511115, "acc_norm": 0.2891566265060241, "acc_norm_stderr": 0.03529486801511115 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.2379421221864952, "acc_stderr": 0.024185150647818707, "acc_norm": 0.2379421221864952, "acc_norm_stderr": 0.024185150647818707 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.15246636771300448, "acc_stderr": 0.024126204813252863, "acc_norm": 0.15246636771300448, "acc_norm_stderr": 0.024126204813252863 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.2748091603053435, "acc_stderr": 0.03915345408847836, "acc_norm": 0.2748091603053435, "acc_norm_stderr": 0.03915345408847836 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.3181818181818182, "acc_stderr": 0.033184773338453315, "acc_norm": 0.3181818181818182, "acc_norm_stderr": 0.033184773338453315 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135303, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135303 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.04755129616062947, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.04755129616062947 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3445378151260504, "acc_stderr": 0.03086868260412162, "acc_norm": 0.3445378151260504, "acc_norm_stderr": 0.03086868260412162 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.36153846153846153, "acc_stderr": 0.024359581465396983, "acc_norm": 0.36153846153846153, "acc_norm_stderr": 0.024359581465396983 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536955, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536955 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.2037037037037037, "acc_stderr": 0.038935425188248475, "acc_norm": 0.2037037037037037, "acc_norm_stderr": 0.038935425188248475 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.28078817733990147, "acc_stderr": 0.0316185633535861, "acc_norm": 0.28078817733990147, "acc_norm_stderr": 0.0316185633535861 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3096774193548387, "acc_stderr": 0.026302774983517418, "acc_norm": 0.3096774193548387, "acc_norm_stderr": 0.026302774983517418 }, "harness|ko_mmlu_marketing|5": { "acc": 0.21794871794871795, "acc_stderr": 0.02704685763071668, "acc_norm": 0.21794871794871795, "acc_norm_stderr": 0.02704685763071668 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2981132075471698, "acc_stderr": 0.028152837942493864, "acc_norm": 0.2981132075471698, "acc_norm_stderr": 0.028152837942493864 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.24545454545454545, "acc_stderr": 0.04122066502878284, "acc_norm": 0.24545454545454545, "acc_norm_stderr": 0.04122066502878284 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.026719240783712166, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.026719240783712166 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.03802039760107903, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.03802039760107903 }, "harness|ko_mmlu_sociology|5": { "acc": 0.263681592039801, "acc_stderr": 0.03115715086935557, "acc_norm": 0.263681592039801, "acc_norm_stderr": 0.03115715086935557 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.32947976878612717, "acc_stderr": 0.03583901754736412, "acc_norm": 0.32947976878612717, "acc_norm_stderr": 0.03583901754736412 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2671957671957672, "acc_stderr": 0.02278967314577656, "acc_norm": 0.2671957671957672, "acc_norm_stderr": 0.02278967314577656 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2638888888888889, "acc_stderr": 0.03685651095897532, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.03685651095897532 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.2138728323699422, "acc_stderr": 0.022075709251757173, "acc_norm": 0.2138728323699422, "acc_norm_stderr": 0.022075709251757173 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2392638036809816, "acc_stderr": 0.03351953879521271, "acc_norm": 0.2392638036809816, "acc_norm_stderr": 0.03351953879521271 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.23765432098765432, "acc_stderr": 0.023683591837008553, "acc_norm": 0.23765432098765432, "acc_norm_stderr": 0.023683591837008553 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.045126085985421296, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421296 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.3626943005181347, "acc_stderr": 0.03469713791704372, "acc_norm": 0.3626943005181347, "acc_norm_stderr": 0.03469713791704372 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.040493392977481404, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.040493392977481404 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.3541284403669725, "acc_stderr": 0.020504729013829104, "acc_norm": 0.3541284403669725, "acc_norm_stderr": 0.020504729013829104 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.04306241259127153, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.04306241259127153 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.28431372549019607, "acc_stderr": 0.02582916327275748, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.02582916327275748 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.19, "acc_stderr": 0.03942772444036622, "acc_norm": 0.19, "acc_norm_stderr": 0.03942772444036622 }, "harness|ko_mmlu_international_law|5": { "acc": 0.14049586776859505, "acc_stderr": 0.031722334260021585, "acc_norm": 0.14049586776859505, "acc_norm_stderr": 0.031722334260021585 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3157894736842105, "acc_stderr": 0.0378272898086547, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.0378272898086547 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.2173202614379085, "acc_stderr": 0.016684820929148598, "acc_norm": 0.2173202614379085, "acc_norm_stderr": 0.016684820929148598 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2553191489361702, "acc_stderr": 0.026011992930902013, "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.026011992930902013 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.16071428571428573, "acc_stderr": 0.034859460964757394, "acc_norm": 0.16071428571428573, "acc_norm_stderr": 0.034859460964757394 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.4537037037037037, "acc_stderr": 0.033953227263757976, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.033953227263757976 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.27262569832402234, "acc_stderr": 0.014893391735249608, "acc_norm": 0.27262569832402234, "acc_norm_stderr": 0.014893391735249608 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.2, "acc_stderr": 0.04020151261036846, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036846 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4411764705882353, "acc_stderr": 0.030161911930767102, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.030161911930767102 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4, "acc_stderr": 0.03136250240935892, "acc_norm": 0.4, "acc_norm_stderr": 0.03136250240935892 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.21940928270042195, "acc_stderr": 0.026939106581553945, "acc_norm": 0.21940928270042195, "acc_norm_stderr": 0.026939106581553945 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2438070404172099, "acc_stderr": 0.010966507972178475, "acc_norm": 0.2438070404172099, "acc_norm_stderr": 0.010966507972178475 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.24509803921568626, "acc_stderr": 0.03019028245350194, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.03019028245350194 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.2545454545454545, "acc_stderr": 0.03401506715249039, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.03401506715249039 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.26560587515299877, "mc1_stderr": 0.015461027627253595, "mc2": 0.40641374284087445, "mc2_stderr": 0.014952562897051682 }, "harness|ko_commongen_v2|2": { "acc": 0.2857142857142857, "acc_stderr": 0.015531620786986736, "acc_norm": 0.3246753246753247, "acc_norm_stderr": 0.016098883939346463 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "jiwoochris/polyglot_350", "model_sha": "9fb5a66197344b0ec71467e384620bd610668339", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }