{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3779863481228669, "acc_stderr": 0.014169664520303096, "acc_norm": 0.4325938566552901, "acc_norm_stderr": 0.014478005694182531 }, "harness|ko_hellaswag|10": { "acc": 0.4039036048595897, "acc_stderr": 0.004896757857022551, "acc_norm": 0.5393347938657638, "acc_norm_stderr": 0.004974316807920405 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5087719298245614, "acc_stderr": 0.038342347441649924, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.038342347441649924 }, "harness|ko_mmlu_management|5": { "acc": 0.44660194174757284, "acc_stderr": 0.049224241534589326, "acc_norm": 0.44660194174757284, "acc_norm_stderr": 0.049224241534589326 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4495530012771392, "acc_stderr": 0.017788725283507337, "acc_norm": 0.4495530012771392, "acc_norm_stderr": 0.017788725283507337 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3925925925925926, "acc_stderr": 0.042185062153688786, "acc_norm": 0.3925925925925926, "acc_norm_stderr": 0.042185062153688786 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.33617021276595743, "acc_stderr": 0.030881618520676942, "acc_norm": 0.33617021276595743, "acc_norm_stderr": 0.030881618520676942 }, "harness|ko_mmlu_virology|5": { "acc": 0.3433734939759036, "acc_stderr": 0.03696584317010601, "acc_norm": 0.3433734939759036, "acc_norm_stderr": 0.03696584317010601 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4694533762057878, "acc_stderr": 0.02834504586484068, "acc_norm": 0.4694533762057878, "acc_norm_stderr": 0.02834504586484068 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.3632286995515695, "acc_stderr": 0.03227790442850499, "acc_norm": 0.3632286995515695, "acc_norm_stderr": 0.03227790442850499 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.44274809160305345, "acc_stderr": 0.04356447202665069, "acc_norm": 0.44274809160305345, "acc_norm_stderr": 0.04356447202665069 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621502, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621502 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.51010101010101, "acc_stderr": 0.035616254886737454, "acc_norm": 0.51010101010101, "acc_norm_stderr": 0.035616254886737454 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3586206896551724, "acc_stderr": 0.03996629574876719, "acc_norm": 0.3586206896551724, "acc_norm_stderr": 0.03996629574876719 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237655, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237655 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.42016806722689076, "acc_stderr": 0.03206183783236152, "acc_norm": 0.42016806722689076, "acc_norm_stderr": 0.03206183783236152 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4282051282051282, "acc_stderr": 0.025088301454694834, "acc_norm": 0.4282051282051282, "acc_norm_stderr": 0.025088301454694834 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.37962962962962965, "acc_stderr": 0.04691521224077742, "acc_norm": 0.37962962962962965, "acc_norm_stderr": 0.04691521224077742 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.33004926108374383, "acc_stderr": 0.03308530426228258, "acc_norm": 0.33004926108374383, "acc_norm_stderr": 0.03308530426228258 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.41935483870967744, "acc_stderr": 0.02807158890109185, "acc_norm": 0.41935483870967744, "acc_norm_stderr": 0.02807158890109185 }, "harness|ko_mmlu_marketing|5": { "acc": 0.5854700854700855, "acc_stderr": 0.03227396567623779, "acc_norm": 0.5854700854700855, "acc_norm_stderr": 0.03227396567623779 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.42641509433962266, "acc_stderr": 0.03043779434298305, "acc_norm": 0.42641509433962266, "acc_norm_stderr": 0.03043779434298305 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.41818181818181815, "acc_stderr": 0.04724577405731571, "acc_norm": 0.41818181818181815, "acc_norm_stderr": 0.04724577405731571 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.027840811495871927, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.027840811495871927 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5323383084577115, "acc_stderr": 0.03528131472933607, "acc_norm": 0.5323383084577115, "acc_norm_stderr": 0.03528131472933607 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.4393063583815029, "acc_stderr": 0.037842719328874674, "acc_norm": 0.4393063583815029, "acc_norm_stderr": 0.037842719328874674 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.30423280423280424, "acc_stderr": 0.023695415009463087, "acc_norm": 0.30423280423280424, "acc_norm_stderr": 0.023695415009463087 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3958333333333333, "acc_stderr": 0.04089465449325582, "acc_norm": 0.3958333333333333, "acc_norm_stderr": 0.04089465449325582 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.56, "acc_stderr": 0.049888765156985884, "acc_norm": 0.56, "acc_norm_stderr": 0.049888765156985884 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.407514450867052, "acc_stderr": 0.026454578146931498, "acc_norm": 0.407514450867052, "acc_norm_stderr": 0.026454578146931498 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.39263803680981596, "acc_stderr": 0.03836740907831029, "acc_norm": 0.39263803680981596, "acc_norm_stderr": 0.03836740907831029 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4166666666666667, "acc_stderr": 0.02743162372241502, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.02743162372241502 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.46632124352331605, "acc_stderr": 0.036002440698671784, "acc_norm": 0.46632124352331605, "acc_norm_stderr": 0.036002440698671784 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.0414243971948936, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.0414243971948936 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.4990825688073395, "acc_stderr": 0.021437287056051215, "acc_norm": 0.4990825688073395, "acc_norm_stderr": 0.021437287056051215 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.04134913018303316, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.04134913018303316 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.43790849673202614, "acc_stderr": 0.02840830202033269, "acc_norm": 0.43790849673202614, "acc_norm_stderr": 0.02840830202033269 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6363636363636364, "acc_stderr": 0.043913262867240704, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.043913262867240704 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.46710526315789475, "acc_stderr": 0.04060127035236395, "acc_norm": 0.46710526315789475, "acc_norm_stderr": 0.04060127035236395 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3562091503267974, "acc_stderr": 0.0193733324207245, "acc_norm": 0.3562091503267974, "acc_norm_stderr": 0.0193733324207245 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.32269503546099293, "acc_stderr": 0.027889139300534785, "acc_norm": 0.32269503546099293, "acc_norm_stderr": 0.027889139300534785 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.32142857142857145, "acc_stderr": 0.04432804055291519, "acc_norm": 0.32142857142857145, "acc_norm_stderr": 0.04432804055291519 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.4166666666666667, "acc_stderr": 0.033622774366080424, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.033622774366080424 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24022346368715083, "acc_stderr": 0.01428834380392531, "acc_norm": 0.24022346368715083, "acc_norm_stderr": 0.01428834380392531 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3492647058823529, "acc_stderr": 0.02895975519682485, "acc_norm": 0.3492647058823529, "acc_norm_stderr": 0.02895975519682485 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4122448979591837, "acc_stderr": 0.03151236044674281, "acc_norm": 0.4122448979591837, "acc_norm_stderr": 0.03151236044674281 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5232067510548524, "acc_stderr": 0.032512152011410174, "acc_norm": 0.5232067510548524, "acc_norm_stderr": 0.032512152011410174 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3239895697522816, "acc_stderr": 0.011952840809646563, "acc_norm": 0.3239895697522816, "acc_norm_stderr": 0.011952840809646563 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.44607843137254904, "acc_stderr": 0.03488845451304974, "acc_norm": 0.44607843137254904, "acc_norm_stderr": 0.03488845451304974 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.46060606060606063, "acc_stderr": 0.03892207016552013, "acc_norm": 0.46060606060606063, "acc_norm_stderr": 0.03892207016552013 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2864137086903305, "mc1_stderr": 0.01582614243950234, "mc2": 0.44508082063982635, "mc2_stderr": 0.014978253495446162 }, "harness|ko_commongen_v2|2": { "acc": 0.40968122786304606, "acc_stderr": 0.01690756819221947, "acc_norm": 0.5112160566706021, "acc_norm_stderr": 0.01718602846948929 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.14", "model_sha": "06b824795d8f7b9efa5cbe1c3a7b21e7c939bf8b", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }