{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3916382252559727, "acc_stderr": 0.014264122124938215, "acc_norm": 0.45733788395904434, "acc_norm_stderr": 0.014558106543924067 }, "harness|ko_hellaswag|10": { "acc": 0.4204341764588727, "acc_stderr": 0.004926198483948701, "acc_norm": 0.5538737303326031, "acc_norm_stderr": 0.004960732382255241 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5321637426900585, "acc_stderr": 0.03826882417660368, "acc_norm": 0.5321637426900585, "acc_norm_stderr": 0.03826882417660368 }, "harness|ko_mmlu_management|5": { "acc": 0.5339805825242718, "acc_stderr": 0.04939291447273481, "acc_norm": 0.5339805825242718, "acc_norm_stderr": 0.04939291447273481 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4955300127713921, "acc_stderr": 0.017879248970584388, "acc_norm": 0.4955300127713921, "acc_norm_stderr": 0.017879248970584388 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4148148148148148, "acc_stderr": 0.042561937679014075, "acc_norm": 0.4148148148148148, "acc_norm_stderr": 0.042561937679014075 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.047609522856952365, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952365 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3446808510638298, "acc_stderr": 0.03106898596312215, "acc_norm": 0.3446808510638298, "acc_norm_stderr": 0.03106898596312215 }, "harness|ko_mmlu_virology|5": { "acc": 0.42771084337349397, "acc_stderr": 0.038515976837185335, "acc_norm": 0.42771084337349397, "acc_norm_stderr": 0.038515976837185335 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.45980707395498394, "acc_stderr": 0.028306190403305696, "acc_norm": 0.45980707395498394, "acc_norm_stderr": 0.028306190403305696 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.3901345291479821, "acc_stderr": 0.03273766725459157, "acc_norm": 0.3901345291479821, "acc_norm_stderr": 0.03273766725459157 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.48854961832061067, "acc_stderr": 0.043841400240780176, "acc_norm": 0.48854961832061067, "acc_norm_stderr": 0.043841400240780176 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5606060606060606, "acc_stderr": 0.035360859475294805, "acc_norm": 0.5606060606060606, "acc_norm_stderr": 0.035360859475294805 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.41379310344827586, "acc_stderr": 0.04104269211806232, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.04104269211806232 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.04280105837364396, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.04280105837364396 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.03214536859788639, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.03214536859788639 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.37948717948717947, "acc_stderr": 0.024603626924097417, "acc_norm": 0.37948717948717947, "acc_norm_stderr": 0.024603626924097417 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4537037037037037, "acc_stderr": 0.04812917324536823, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.04812917324536823 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3842364532019704, "acc_stderr": 0.034223985656575515, "acc_norm": 0.3842364532019704, "acc_norm_stderr": 0.034223985656575515 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.43870967741935485, "acc_stderr": 0.028229497320317216, "acc_norm": 0.43870967741935485, "acc_norm_stderr": 0.028229497320317216 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6452991452991453, "acc_stderr": 0.03134250486245402, "acc_norm": 0.6452991452991453, "acc_norm_stderr": 0.03134250486245402 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.44150943396226416, "acc_stderr": 0.030561590426731837, "acc_norm": 0.44150943396226416, "acc_norm_stderr": 0.030561590426731837 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.509090909090909, "acc_stderr": 0.0478833976870286, "acc_norm": 0.509090909090909, "acc_norm_stderr": 0.0478833976870286 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.0263357394040558, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.0263357394040558 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|ko_mmlu_sociology|5": { "acc": 0.572139303482587, "acc_stderr": 0.03498541988407795, "acc_norm": 0.572139303482587, "acc_norm_stderr": 0.03498541988407795 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3699421965317919, "acc_stderr": 0.03681229633394319, "acc_norm": 0.3699421965317919, "acc_norm_stderr": 0.03681229633394319 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.3201058201058201, "acc_stderr": 0.024026846392873506, "acc_norm": 0.3201058201058201, "acc_norm_stderr": 0.024026846392873506 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3611111111111111, "acc_stderr": 0.040166600304512336, "acc_norm": 0.3611111111111111, "acc_norm_stderr": 0.040166600304512336 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.4479768786127168, "acc_stderr": 0.026772990653361826, "acc_norm": 0.4479768786127168, "acc_norm_stderr": 0.026772990653361826 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4294478527607362, "acc_stderr": 0.038890666191127216, "acc_norm": 0.4294478527607362, "acc_norm_stderr": 0.038890666191127216 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4537037037037037, "acc_stderr": 0.0277012284685426, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.0277012284685426 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.45595854922279794, "acc_stderr": 0.035944137112724366, "acc_norm": 0.45595854922279794, "acc_norm_stderr": 0.035944137112724366 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.04142439719489361, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.04142439719489361 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.47155963302752296, "acc_stderr": 0.021402615697348047, "acc_norm": 0.47155963302752296, "acc_norm_stderr": 0.021402615697348047 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.30158730158730157, "acc_stderr": 0.041049472699033945, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.041049472699033945 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.43137254901960786, "acc_stderr": 0.028358956313423552, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.028358956313423552 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6033057851239669, "acc_stderr": 0.044658697805310094, "acc_norm": 0.6033057851239669, "acc_norm_stderr": 0.044658697805310094 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.4407894736842105, "acc_stderr": 0.04040311062490436, "acc_norm": 0.4407894736842105, "acc_norm_stderr": 0.04040311062490436 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3349673202614379, "acc_stderr": 0.01909422816700032, "acc_norm": 0.3349673202614379, "acc_norm_stderr": 0.01909422816700032 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3404255319148936, "acc_stderr": 0.02826765748265014, "acc_norm": 0.3404255319148936, "acc_norm_stderr": 0.02826765748265014 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.20535714285714285, "acc_stderr": 0.038342410214190735, "acc_norm": 0.20535714285714285, "acc_norm_stderr": 0.038342410214190735 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3055555555555556, "acc_stderr": 0.03141554629402543, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.03141554629402543 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.27941176470588236, "acc_stderr": 0.02725720260611495, "acc_norm": 0.27941176470588236, "acc_norm_stderr": 0.02725720260611495 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.49387755102040815, "acc_stderr": 0.032006820201639086, "acc_norm": 0.49387755102040815, "acc_norm_stderr": 0.032006820201639086 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5232067510548524, "acc_stderr": 0.03251215201141018, "acc_norm": 0.5232067510548524, "acc_norm_stderr": 0.03251215201141018 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3076923076923077, "acc_stderr": 0.011787910251664587, "acc_norm": 0.3076923076923077, "acc_norm_stderr": 0.011787910251664587 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.43137254901960786, "acc_stderr": 0.03476099060501636, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.03476099060501636 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5151515151515151, "acc_stderr": 0.03902551007374448, "acc_norm": 0.5151515151515151, "acc_norm_stderr": 0.03902551007374448 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.29008567931456547, "mc1_stderr": 0.01588623687420952, "mc2": 0.4626627507088543, "mc2_stderr": 0.015202563658823371 }, "harness|ko_commongen_v2|2": { "acc": 0.51357733175915, "acc_stderr": 0.01718401506040145, "acc_norm": 0.5442739079102715, "acc_norm_stderr": 0.01712282914329265 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "jiwoochris/ko-llama2-13b-v6", "model_sha": "2606639bb18ca27586615693d937d41d1a756391", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }