{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3037542662116041, "acc_stderr": 0.01343890918477875, "acc_norm": 0.34044368600682595, "acc_norm_stderr": 0.013847460518892978 }, "harness|ko_hellaswag|10": { "acc": 0.394443337980482, "acc_stderr": 0.004877319683639072, "acc_norm": 0.5136427006572396, "acc_norm_stderr": 0.004987923636628548 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.2807017543859649, "acc_stderr": 0.03446296217088426, "acc_norm": 0.2807017543859649, "acc_norm_stderr": 0.03446296217088426 }, "harness|ko_mmlu_management|5": { "acc": 0.21359223300970873, "acc_stderr": 0.04058042015646034, "acc_norm": 0.21359223300970873, "acc_norm_stderr": 0.04058042015646034 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.26947637292464877, "acc_stderr": 0.01586624307321507, "acc_norm": 0.26947637292464877, "acc_norm_stderr": 0.01586624307321507 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.31851851851851853, "acc_stderr": 0.04024778401977111, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.04024778401977111 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.23404255319148937, "acc_stderr": 0.02767845257821238, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.02767845257821238 }, "harness|ko_mmlu_virology|5": { "acc": 0.23493975903614459, "acc_stderr": 0.03300533186128922, "acc_norm": 0.23493975903614459, "acc_norm_stderr": 0.03300533186128922 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.2829581993569132, "acc_stderr": 0.02558306248998482, "acc_norm": 0.2829581993569132, "acc_norm_stderr": 0.02558306248998482 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.20179372197309417, "acc_stderr": 0.02693611191280227, "acc_norm": 0.20179372197309417, "acc_norm_stderr": 0.02693611191280227 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.2748091603053435, "acc_stderr": 0.03915345408847835, "acc_norm": 0.2748091603053435, "acc_norm_stderr": 0.03915345408847835 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.2676767676767677, "acc_stderr": 0.03154449888270287, "acc_norm": 0.2676767676767677, "acc_norm_stderr": 0.03154449888270287 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2827586206896552, "acc_stderr": 0.03752833958003336, "acc_norm": 0.2827586206896552, "acc_norm_stderr": 0.03752833958003336 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.2549019607843137, "acc_stderr": 0.04336432707993178, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.04336432707993178 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.02755361446786381, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.02755361446786381 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2641025641025641, "acc_stderr": 0.022352193737453268, "acc_norm": 0.2641025641025641, "acc_norm_stderr": 0.022352193737453268 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.28, "acc_stderr": 0.045126085985421255, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421255 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.2037037037037037, "acc_stderr": 0.038935425188248475, "acc_norm": 0.2037037037037037, "acc_norm_stderr": 0.038935425188248475 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.26108374384236455, "acc_stderr": 0.030903796952114475, "acc_norm": 0.26108374384236455, "acc_norm_stderr": 0.030903796952114475 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.24838709677419354, "acc_stderr": 0.024580028921481006, "acc_norm": 0.24838709677419354, "acc_norm_stderr": 0.024580028921481006 }, "harness|ko_mmlu_marketing|5": { "acc": 0.21367521367521367, "acc_stderr": 0.026853450377009164, "acc_norm": 0.21367521367521367, "acc_norm_stderr": 0.026853450377009164 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.28679245283018867, "acc_stderr": 0.027834912527544057, "acc_norm": 0.28679245283018867, "acc_norm_stderr": 0.027834912527544057 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.24545454545454545, "acc_stderr": 0.04122066502878284, "acc_norm": 0.24545454545454545, "acc_norm_stderr": 0.04122066502878284 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2111111111111111, "acc_stderr": 0.024882116857655078, "acc_norm": 0.2111111111111111, "acc_norm_stderr": 0.024882116857655078 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|ko_mmlu_sociology|5": { "acc": 0.263681592039801, "acc_stderr": 0.03115715086935555, "acc_norm": 0.263681592039801, "acc_norm_stderr": 0.03115715086935555 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.35260115606936415, "acc_stderr": 0.03643037168958548, "acc_norm": 0.35260115606936415, "acc_norm_stderr": 0.03643037168958548 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2671957671957672, "acc_stderr": 0.022789673145776564, "acc_norm": 0.2671957671957672, "acc_norm_stderr": 0.022789673145776564 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2986111111111111, "acc_stderr": 0.038270523579507554, "acc_norm": 0.2986111111111111, "acc_norm_stderr": 0.038270523579507554 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.23699421965317918, "acc_stderr": 0.02289408248992599, "acc_norm": 0.23699421965317918, "acc_norm_stderr": 0.02289408248992599 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.25766871165644173, "acc_stderr": 0.03436150827846917, "acc_norm": 0.25766871165644173, "acc_norm_stderr": 0.03436150827846917 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.29012345679012347, "acc_stderr": 0.025251173936495026, "acc_norm": 0.29012345679012347, "acc_norm_stderr": 0.025251173936495026 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.24352331606217617, "acc_stderr": 0.030975436386845436, "acc_norm": 0.24352331606217617, "acc_norm_stderr": 0.030975436386845436 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.03999423879281336, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.03999423879281336 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.27522935779816515, "acc_stderr": 0.0191490937431552, "acc_norm": 0.27522935779816515, "acc_norm_stderr": 0.0191490937431552 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.23015873015873015, "acc_stderr": 0.03764950879790607, "acc_norm": 0.23015873015873015, "acc_norm_stderr": 0.03764950879790607 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.2581699346405229, "acc_stderr": 0.025058503316958157, "acc_norm": 0.2581699346405229, "acc_norm_stderr": 0.025058503316958157 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.22, "acc_stderr": 0.041633319989322674, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322674 }, "harness|ko_mmlu_international_law|5": { "acc": 0.30578512396694213, "acc_stderr": 0.04205953933884124, "acc_norm": 0.30578512396694213, "acc_norm_stderr": 0.04205953933884124 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3092105263157895, "acc_stderr": 0.037610708698674805, "acc_norm": 0.3092105263157895, "acc_norm_stderr": 0.037610708698674805 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.24822695035460993, "acc_stderr": 0.025770015644290392, "acc_norm": 0.24822695035460993, "acc_norm_stderr": 0.025770015644290392 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.23214285714285715, "acc_stderr": 0.04007341809755806, "acc_norm": 0.23214285714285715, "acc_norm_stderr": 0.04007341809755806 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3888888888888889, "acc_stderr": 0.033247089118091176, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.033247089118091176 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24134078212290502, "acc_stderr": 0.01431099954796146, "acc_norm": 0.24134078212290502, "acc_norm_stderr": 0.01431099954796146 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.34191176470588236, "acc_stderr": 0.02881472242225417, "acc_norm": 0.34191176470588236, "acc_norm_stderr": 0.02881472242225417 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.30612244897959184, "acc_stderr": 0.02950489645459596, "acc_norm": 0.30612244897959184, "acc_norm_stderr": 0.02950489645459596 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.2489451476793249, "acc_stderr": 0.028146970599422644, "acc_norm": 0.2489451476793249, "acc_norm_stderr": 0.028146970599422644 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.27450980392156865, "acc_stderr": 0.03132179803083291, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.03132179803083291 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.2727272727272727, "acc_stderr": 0.03477691162163659, "acc_norm": 0.2727272727272727, "acc_norm_stderr": 0.03477691162163659 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2582619339045288, "mc1_stderr": 0.01532182168847619, "mc2": 0.4062486938859843, "mc2_stderr": 0.014871974864786166 }, "harness|ko_commongen_v2|2": { "acc": 0.31641086186540734, "acc_stderr": 0.015989617951065477, "acc_norm": 0.3778040141676505, "acc_norm_stderr": 0.016669082840694963 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "DILAB-HYU/koquality-polyglot-ko-12.8b", "model_sha": "8db9d0a47a6dc69b8fd405f4053c723a4c54696a", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }