{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.2977815699658703, "acc_stderr": 0.01336308010724449, "acc_norm": 0.3370307167235495, "acc_norm_stderr": 0.013813476652902272 }, "harness|ko_hellaswag|10": { "acc": 0.38458474407488547, "acc_stderr": 0.004855027248398158, "acc_norm": 0.4970125473013344, "acc_norm_stderr": 0.004989692344313998 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.1695906432748538, "acc_stderr": 0.028782108105401712, "acc_norm": 0.1695906432748538, "acc_norm_stderr": 0.028782108105401712 }, "harness|ko_mmlu_management|5": { "acc": 0.3300970873786408, "acc_stderr": 0.0465614711001235, "acc_norm": 0.3300970873786408, "acc_norm_stderr": 0.0465614711001235 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.20561941251596424, "acc_stderr": 0.014452500456785823, "acc_norm": 0.20561941251596424, "acc_norm_stderr": 0.014452500456785823 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.22962962962962963, "acc_stderr": 0.036333844140734636, "acc_norm": 0.22962962962962963, "acc_norm_stderr": 0.036333844140734636 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.17872340425531916, "acc_stderr": 0.025045373272050957, "acc_norm": 0.17872340425531916, "acc_norm_stderr": 0.025045373272050957 }, "harness|ko_mmlu_virology|5": { "acc": 0.20481927710843373, "acc_stderr": 0.031417842916639245, "acc_norm": 0.20481927710843373, "acc_norm_stderr": 0.031417842916639245 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.24115755627009647, "acc_stderr": 0.024296594034763426, "acc_norm": 0.24115755627009647, "acc_norm_stderr": 0.024296594034763426 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.1031390134529148, "acc_stderr": 0.020412564289839272, "acc_norm": 0.1031390134529148, "acc_norm_stderr": 0.020412564289839272 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.2748091603053435, "acc_stderr": 0.039153454088478354, "acc_norm": 0.2748091603053435, "acc_norm_stderr": 0.039153454088478354 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.3333333333333333, "acc_stderr": 0.03358618145732523, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.03358618145732523 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2413793103448276, "acc_stderr": 0.03565998174135302, "acc_norm": 0.2413793103448276, "acc_norm_stderr": 0.03565998174135302 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.3627450980392157, "acc_stderr": 0.04784060704105653, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.04784060704105653 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3487394957983193, "acc_stderr": 0.030956636328566548, "acc_norm": 0.3487394957983193, "acc_norm_stderr": 0.030956636328566548 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.36153846153846153, "acc_stderr": 0.024359581465396983, "acc_norm": 0.36153846153846153, "acc_norm_stderr": 0.024359581465396983 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.2, "acc_stderr": 0.040201512610368445, "acc_norm": 0.2, "acc_norm_stderr": 0.040201512610368445 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.21296296296296297, "acc_stderr": 0.03957835471980981, "acc_norm": 0.21296296296296297, "acc_norm_stderr": 0.03957835471980981 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.27586206896551724, "acc_stderr": 0.03144712581678241, "acc_norm": 0.27586206896551724, "acc_norm_stderr": 0.03144712581678241 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3193548387096774, "acc_stderr": 0.02652270967466777, "acc_norm": 0.3193548387096774, "acc_norm_stderr": 0.02652270967466777 }, "harness|ko_mmlu_marketing|5": { "acc": 0.17094017094017094, "acc_stderr": 0.024662496845209828, "acc_norm": 0.17094017094017094, "acc_norm_stderr": 0.024662496845209828 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.3018867924528302, "acc_stderr": 0.02825420034443866, "acc_norm": 0.3018867924528302, "acc_norm_stderr": 0.02825420034443866 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.22727272727272727, "acc_stderr": 0.040139645540727735, "acc_norm": 0.22727272727272727, "acc_norm_stderr": 0.040139645540727735 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085626, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085626 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|ko_mmlu_sociology|5": { "acc": 0.26865671641791045, "acc_stderr": 0.03134328358208954, "acc_norm": 0.26865671641791045, "acc_norm_stderr": 0.03134328358208954 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.34104046242774566, "acc_stderr": 0.036146654241808254, "acc_norm": 0.34104046242774566, "acc_norm_stderr": 0.036146654241808254 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.24603174603174602, "acc_stderr": 0.022182037202948365, "acc_norm": 0.24603174603174602, "acc_norm_stderr": 0.022182037202948365 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.2254335260115607, "acc_stderr": 0.022497230190967547, "acc_norm": 0.2254335260115607, "acc_norm_stderr": 0.022497230190967547 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2392638036809816, "acc_stderr": 0.033519538795212696, "acc_norm": 0.2392638036809816, "acc_norm_stderr": 0.033519538795212696 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.22530864197530864, "acc_stderr": 0.02324620264781975, "acc_norm": 0.22530864197530864, "acc_norm_stderr": 0.02324620264781975 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720685, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720685 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.36787564766839376, "acc_stderr": 0.034801756684660366, "acc_norm": 0.36787564766839376, "acc_norm_stderr": 0.034801756684660366 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03947152782669415, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03947152782669415 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.3541284403669725, "acc_stderr": 0.0205047290138291, "acc_norm": 0.3541284403669725, "acc_norm_stderr": 0.0205047290138291 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.373015873015873, "acc_stderr": 0.04325506042017086, "acc_norm": 0.373015873015873, "acc_norm_stderr": 0.04325506042017086 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.2973856209150327, "acc_stderr": 0.026173908506718576, "acc_norm": 0.2973856209150327, "acc_norm_stderr": 0.026173908506718576 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|ko_mmlu_international_law|5": { "acc": 0.12396694214876033, "acc_stderr": 0.03008309871603522, "acc_norm": 0.12396694214876033, "acc_norm_stderr": 0.03008309871603522 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.32894736842105265, "acc_stderr": 0.03823428969926606, "acc_norm": 0.32894736842105265, "acc_norm_stderr": 0.03823428969926606 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.21241830065359477, "acc_stderr": 0.016547148636203147, "acc_norm": 0.21241830065359477, "acc_norm_stderr": 0.016547148636203147 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.24468085106382978, "acc_stderr": 0.025645553622266733, "acc_norm": 0.24468085106382978, "acc_norm_stderr": 0.025645553622266733 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.16964285714285715, "acc_stderr": 0.03562367850095391, "acc_norm": 0.16964285714285715, "acc_norm_stderr": 0.03562367850095391 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.27262569832402234, "acc_stderr": 0.014893391735249608, "acc_norm": 0.27262569832402234, "acc_norm_stderr": 0.014893391735249608 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4485294117647059, "acc_stderr": 0.030211479609121593, "acc_norm": 0.4485294117647059, "acc_norm_stderr": 0.030211479609121593 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4163265306122449, "acc_stderr": 0.03155782816556163, "acc_norm": 0.4163265306122449, "acc_norm_stderr": 0.03155782816556163 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.19831223628691982, "acc_stderr": 0.025955020841621112, "acc_norm": 0.19831223628691982, "acc_norm_stderr": 0.025955020841621112 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.24315514993481094, "acc_stderr": 0.010956556654417356, "acc_norm": 0.24315514993481094, "acc_norm_stderr": 0.010956556654417356 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.25980392156862747, "acc_stderr": 0.030778554678693257, "acc_norm": 0.25980392156862747, "acc_norm_stderr": 0.030778554678693257 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.23636363636363636, "acc_stderr": 0.033175059300091805, "acc_norm": 0.23636363636363636, "acc_norm_stderr": 0.033175059300091805 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2423500611995104, "mc1_stderr": 0.015000674373570342, "mc2": 0.4081734277840062, "mc2_stderr": 0.014989124693241153 }, "harness|ko_commongen_v2|2": { "acc": 0.2687793427230047, "acc_stderr": 0.015196983421381469, "acc_norm": 0.3380281690140845, "acc_norm_stderr": 0.016215540194273168 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "DILAB-HYU/KoQuality-Polyglot-5.8b", "model_sha": "3bd0773198883587e1ced9f32a1763da2b64a536", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }