{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.30802047781569963, "acc_stderr": 0.01349142951729204, "acc_norm": 0.36945392491467577, "acc_norm_stderr": 0.014104578366491904 }, "harness|ko_hellaswag|10": { "acc": 0.35620394343756223, "acc_stderr": 0.004778978031389642, "acc_norm": 0.45488946425014937, "acc_norm_stderr": 0.004969431900874307 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4093567251461988, "acc_stderr": 0.03771283107626544, "acc_norm": 0.4093567251461988, "acc_norm_stderr": 0.03771283107626544 }, "harness|ko_mmlu_management|5": { "acc": 0.49514563106796117, "acc_stderr": 0.049505043821289195, "acc_norm": 0.49514563106796117, "acc_norm_stderr": 0.049505043821289195 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.44699872286079184, "acc_stderr": 0.017779225233394213, "acc_norm": 0.44699872286079184, "acc_norm_stderr": 0.017779225233394213 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.34074074074074073, "acc_stderr": 0.040943762699967946, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.040943762699967946 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206824, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206824 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.39574468085106385, "acc_stderr": 0.03196758697835362, "acc_norm": 0.39574468085106385, "acc_norm_stderr": 0.03196758697835362 }, "harness|ko_mmlu_virology|5": { "acc": 0.3313253012048193, "acc_stderr": 0.03664314777288088, "acc_norm": 0.3313253012048193, "acc_norm_stderr": 0.03664314777288088 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.36012861736334406, "acc_stderr": 0.027264297599804012, "acc_norm": 0.36012861736334406, "acc_norm_stderr": 0.027264297599804012 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.336322869955157, "acc_stderr": 0.031708824268455, "acc_norm": 0.336322869955157, "acc_norm_stderr": 0.031708824268455 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.45038167938931295, "acc_stderr": 0.04363643698524779, "acc_norm": 0.45038167938931295, "acc_norm_stderr": 0.04363643698524779 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.4898989898989899, "acc_stderr": 0.035616254886737454, "acc_norm": 0.4898989898989899, "acc_norm_stderr": 0.035616254886737454 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.36551724137931035, "acc_stderr": 0.040131241954243856, "acc_norm": 0.36551724137931035, "acc_norm_stderr": 0.040131241954243856 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.042801058373643966, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.042801058373643966 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.42016806722689076, "acc_stderr": 0.03206183783236152, "acc_norm": 0.42016806722689076, "acc_norm_stderr": 0.03206183783236152 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.3769230769230769, "acc_stderr": 0.024570975364225995, "acc_norm": 0.3769230769230769, "acc_norm_stderr": 0.024570975364225995 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4537037037037037, "acc_stderr": 0.048129173245368216, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.048129173245368216 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.30049261083743845, "acc_stderr": 0.03225799476233485, "acc_norm": 0.30049261083743845, "acc_norm_stderr": 0.03225799476233485 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.44193548387096776, "acc_stderr": 0.028251557906849738, "acc_norm": 0.44193548387096776, "acc_norm_stderr": 0.028251557906849738 }, "harness|ko_mmlu_marketing|5": { "acc": 0.594017094017094, "acc_stderr": 0.03217180182641086, "acc_norm": 0.594017094017094, "acc_norm_stderr": 0.03217180182641086 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4226415094339623, "acc_stderr": 0.030402331445769537, "acc_norm": 0.4226415094339623, "acc_norm_stderr": 0.030402331445769537 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.44545454545454544, "acc_stderr": 0.047605488214603246, "acc_norm": 0.44545454545454544, "acc_norm_stderr": 0.047605488214603246 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.026842057873833706, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.026842057873833706 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.037804458505267334, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.037804458505267334 }, "harness|ko_mmlu_sociology|5": { "acc": 0.48756218905472637, "acc_stderr": 0.03534439848539579, "acc_norm": 0.48756218905472637, "acc_norm_stderr": 0.03534439848539579 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.41040462427745666, "acc_stderr": 0.03750757044895537, "acc_norm": 0.41040462427745666, "acc_norm_stderr": 0.03750757044895537 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.0242785680243077, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.0242785680243077 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3472222222222222, "acc_stderr": 0.039812405437178615, "acc_norm": 0.3472222222222222, "acc_norm_stderr": 0.039812405437178615 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.43641618497109824, "acc_stderr": 0.026700545424943677, "acc_norm": 0.43641618497109824, "acc_norm_stderr": 0.026700545424943677 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.38650306748466257, "acc_stderr": 0.03825825548848607, "acc_norm": 0.38650306748466257, "acc_norm_stderr": 0.03825825548848607 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.36728395061728397, "acc_stderr": 0.026822801759507887, "acc_norm": 0.36728395061728397, "acc_norm_stderr": 0.026822801759507887 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.40414507772020725, "acc_stderr": 0.035415085788840193, "acc_norm": 0.40414507772020725, "acc_norm_stderr": 0.035415085788840193 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03947152782669415, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03947152782669415 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.41834862385321103, "acc_stderr": 0.02114954859644388, "acc_norm": 0.41834862385321103, "acc_norm_stderr": 0.02114954859644388 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04040610178208841, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04040610178208841 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4019607843137255, "acc_stderr": 0.02807415894760066, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.02807415894760066 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5702479338842975, "acc_stderr": 0.04519082021319773, "acc_norm": 0.5702479338842975, "acc_norm_stderr": 0.04519082021319773 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3355263157894737, "acc_stderr": 0.038424985593952694, "acc_norm": 0.3355263157894737, "acc_norm_stderr": 0.038424985593952694 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.28431372549019607, "acc_stderr": 0.018249024411207668, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.018249024411207668 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2695035460992908, "acc_stderr": 0.026469036818590634, "acc_norm": 0.2695035460992908, "acc_norm_stderr": 0.026469036818590634 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467764, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467764 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.03256850570293647, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.03256850570293647 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.35294117647058826, "acc_stderr": 0.02902942281568141, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.02902942281568141 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4163265306122449, "acc_stderr": 0.03155782816556164, "acc_norm": 0.4163265306122449, "acc_norm_stderr": 0.03155782816556164 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.33755274261603374, "acc_stderr": 0.03078154910202622, "acc_norm": 0.33755274261603374, "acc_norm_stderr": 0.03078154910202622 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.28748370273794005, "acc_stderr": 0.011559337355708505, "acc_norm": 0.28748370273794005, "acc_norm_stderr": 0.011559337355708505 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.36764705882352944, "acc_stderr": 0.03384132045674118, "acc_norm": 0.36764705882352944, "acc_norm_stderr": 0.03384132045674118 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.37575757575757573, "acc_stderr": 0.037818873532059816, "acc_norm": 0.37575757575757573, "acc_norm_stderr": 0.037818873532059816 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.29498164014687883, "mc1_stderr": 0.015964400965589685, "mc2": 0.4639200463938291, "mc2_stderr": 0.015440957243862982 }, "harness|ko_commongen_v2|2": { "acc": 0.3612750885478158, "acc_stderr": 0.016515463022412, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.017014038119297473 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "cepiloth/ko-llama2-13b-finetune-ex", "model_sha": "f1dcbe9a1ff2ea479a2094f5058226f796341bfd", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }