{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.38139931740614336, "acc_stderr": 0.014194389086685251, "acc_norm": 0.45307167235494883, "acc_norm_stderr": 0.014546892052005628 }, "harness|ko_hellaswag|10": { "acc": 0.41057558255327625, "acc_stderr": 0.004909328992915067, "acc_norm": 0.5488946425014938, "acc_norm_stderr": 0.004965866098318175 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.30994152046783624, "acc_stderr": 0.035469769593931624, "acc_norm": 0.30994152046783624, "acc_norm_stderr": 0.035469769593931624 }, "harness|ko_mmlu_management|5": { "acc": 0.32038834951456313, "acc_stderr": 0.046202840822800406, "acc_norm": 0.32038834951456313, "acc_norm_stderr": 0.046202840822800406 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.31417624521072796, "acc_stderr": 0.016599291735884893, "acc_norm": 0.31417624521072796, "acc_norm_stderr": 0.016599291735884893 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.34074074074074073, "acc_stderr": 0.04094376269996793, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.04094376269996793 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.2553191489361702, "acc_stderr": 0.02850485647051419, "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.02850485647051419 }, "harness|ko_mmlu_virology|5": { "acc": 0.3072289156626506, "acc_stderr": 0.035915667978246635, "acc_norm": 0.3072289156626506, "acc_norm_stderr": 0.035915667978246635 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.3954983922829582, "acc_stderr": 0.027770918531427834, "acc_norm": 0.3954983922829582, "acc_norm_stderr": 0.027770918531427834 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.29596412556053814, "acc_stderr": 0.03063659134869982, "acc_norm": 0.29596412556053814, "acc_norm_stderr": 0.03063659134869982 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.3816793893129771, "acc_stderr": 0.04260735157644561, "acc_norm": 0.3816793893129771, "acc_norm_stderr": 0.04260735157644561 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.3181818181818182, "acc_stderr": 0.03318477333845332, "acc_norm": 0.3181818181818182, "acc_norm_stderr": 0.03318477333845332 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2827586206896552, "acc_stderr": 0.037528339580033376, "acc_norm": 0.2827586206896552, "acc_norm_stderr": 0.037528339580033376 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.09803921568627451, "acc_stderr": 0.029589188531613252, "acc_norm": 0.09803921568627451, "acc_norm_stderr": 0.029589188531613252 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.29831932773109243, "acc_stderr": 0.02971914287634286, "acc_norm": 0.29831932773109243, "acc_norm_stderr": 0.02971914287634286 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2743589743589744, "acc_stderr": 0.022622765767493214, "acc_norm": 0.2743589743589744, "acc_norm_stderr": 0.022622765767493214 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.3425925925925926, "acc_stderr": 0.045879047413018105, "acc_norm": 0.3425925925925926, "acc_norm_stderr": 0.045879047413018105 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.2857142857142857, "acc_stderr": 0.03178529710642748, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.03178529710642748 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.34838709677419355, "acc_stderr": 0.027104826328100944, "acc_norm": 0.34838709677419355, "acc_norm_stderr": 0.027104826328100944 }, "harness|ko_mmlu_marketing|5": { "acc": 0.37606837606837606, "acc_stderr": 0.031733936329694824, "acc_norm": 0.37606837606837606, "acc_norm_stderr": 0.031733936329694824 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.27547169811320754, "acc_stderr": 0.027495663683724046, "acc_norm": 0.27547169811320754, "acc_norm_stderr": 0.027495663683724046 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.2909090909090909, "acc_stderr": 0.04350271442923243, "acc_norm": 0.2909090909090909, "acc_norm_stderr": 0.04350271442923243 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.25555555555555554, "acc_stderr": 0.026593939101844065, "acc_norm": 0.25555555555555554, "acc_norm_stderr": 0.026593939101844065 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2582781456953642, "acc_stderr": 0.035737053147634576, "acc_norm": 0.2582781456953642, "acc_norm_stderr": 0.035737053147634576 }, "harness|ko_mmlu_sociology|5": { "acc": 0.373134328358209, "acc_stderr": 0.034198326081760065, "acc_norm": 0.373134328358209, "acc_norm_stderr": 0.034198326081760065 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3063583815028902, "acc_stderr": 0.03514942551267438, "acc_norm": 0.3063583815028902, "acc_norm_stderr": 0.03514942551267438 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2830687830687831, "acc_stderr": 0.023201392938194978, "acc_norm": 0.2830687830687831, "acc_norm_stderr": 0.023201392938194978 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3611111111111111, "acc_stderr": 0.04016660030451233, "acc_norm": 0.3611111111111111, "acc_norm_stderr": 0.04016660030451233 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.12, "acc_stderr": 0.03265986323710906, "acc_norm": 0.12, "acc_norm_stderr": 0.03265986323710906 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.37, "acc_stderr": 0.048523658709390974, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709390974 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.37572254335260113, "acc_stderr": 0.02607431485165708, "acc_norm": 0.37572254335260113, "acc_norm_stderr": 0.02607431485165708 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3006134969325153, "acc_stderr": 0.0360251131880677, "acc_norm": 0.3006134969325153, "acc_norm_stderr": 0.0360251131880677 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.35802469135802467, "acc_stderr": 0.0266756119260371, "acc_norm": 0.35802469135802467, "acc_norm_stderr": 0.0266756119260371 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.34196891191709844, "acc_stderr": 0.03423465100104281, "acc_norm": 0.34196891191709844, "acc_norm_stderr": 0.03423465100104281 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.30701754385964913, "acc_stderr": 0.0433913832257986, "acc_norm": 0.30701754385964913, "acc_norm_stderr": 0.0433913832257986 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.3174311926605505, "acc_stderr": 0.019957152198460504, "acc_norm": 0.3174311926605505, "acc_norm_stderr": 0.019957152198460504 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2777777777777778, "acc_stderr": 0.04006168083848876, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.04006168083848876 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.3137254901960784, "acc_stderr": 0.02656892101545716, "acc_norm": 0.3137254901960784, "acc_norm_stderr": 0.02656892101545716 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_international_law|5": { "acc": 0.4380165289256198, "acc_stderr": 0.045291468044357915, "acc_norm": 0.4380165289256198, "acc_norm_stderr": 0.045291468044357915 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3223684210526316, "acc_stderr": 0.03803510248351586, "acc_norm": 0.3223684210526316, "acc_norm_stderr": 0.03803510248351586 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3137254901960784, "acc_stderr": 0.01877168389352817, "acc_norm": 0.3137254901960784, "acc_norm_stderr": 0.01877168389352817 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.31560283687943264, "acc_stderr": 0.027724989449509317, "acc_norm": 0.31560283687943264, "acc_norm_stderr": 0.027724989449509317 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.22321428571428573, "acc_stderr": 0.03952301967702511, "acc_norm": 0.22321428571428573, "acc_norm_stderr": 0.03952301967702511 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.23148148148148148, "acc_stderr": 0.028765111718046944, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.028765111718046944 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2435754189944134, "acc_stderr": 0.01435591196476786, "acc_norm": 0.2435754189944134, "acc_norm_stderr": 0.01435591196476786 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.22058823529411764, "acc_stderr": 0.02518778666022727, "acc_norm": 0.22058823529411764, "acc_norm_stderr": 0.02518778666022727 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.40816326530612246, "acc_stderr": 0.03146465712827423, "acc_norm": 0.40816326530612246, "acc_norm_stderr": 0.03146465712827423 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.43037974683544306, "acc_stderr": 0.032230171959375976, "acc_norm": 0.43037974683544306, "acc_norm_stderr": 0.032230171959375976 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.29791395045632335, "acc_stderr": 0.011680717340400057, "acc_norm": 0.29791395045632335, "acc_norm_stderr": 0.011680717340400057 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.3431372549019608, "acc_stderr": 0.033321399446680854, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.033321399446680854 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.3151515151515151, "acc_stderr": 0.0362773057502241, "acc_norm": 0.3151515151515151, "acc_norm_stderr": 0.0362773057502241 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.27906976744186046, "mc1_stderr": 0.015702107090627884, "mc2": 0.43717065836326097, "mc2_stderr": 0.014982579691917674 }, "harness|ko_commongen_v2|2": { "acc": 0.32585596221959856, "acc_stderr": 0.016114023894800322, "acc_norm": 0.3754427390791027, "acc_norm_stderr": 0.016648411589511088 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v7", "model_sha": "c0836cce043af8ee88da9cb52b2032d3fa8c5ddd", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }