{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.30802047781569963, "acc_stderr": 0.01349142951729204, "acc_norm": 0.3515358361774744, "acc_norm_stderr": 0.013952413699600938 }, "harness|ko_hellaswag|10": { "acc": 0.39533957379008167, "acc_stderr": 0.004879242848473461, "acc_norm": 0.5114519020115514, "acc_norm_stderr": 0.0049884724594180165 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|ko_mmlu_management|5": { "acc": 0.20388349514563106, "acc_stderr": 0.0398913985953177, "acc_norm": 0.20388349514563106, "acc_norm_stderr": 0.0398913985953177 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.2567049808429119, "acc_stderr": 0.015620480263064533, "acc_norm": 0.2567049808429119, "acc_norm_stderr": 0.015620480263064533 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.22962962962962963, "acc_stderr": 0.03633384414073465, "acc_norm": 0.22962962962962963, "acc_norm_stderr": 0.03633384414073465 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.2680851063829787, "acc_stderr": 0.02895734278834235, "acc_norm": 0.2680851063829787, "acc_norm_stderr": 0.02895734278834235 }, "harness|ko_mmlu_virology|5": { "acc": 0.2710843373493976, "acc_stderr": 0.034605799075530276, "acc_norm": 0.2710843373493976, "acc_norm_stderr": 0.034605799075530276 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.24437299035369775, "acc_stderr": 0.0244061620946689, "acc_norm": 0.24437299035369775, "acc_norm_stderr": 0.0244061620946689 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.22869955156950672, "acc_stderr": 0.028188240046929196, "acc_norm": 0.22869955156950672, "acc_norm_stderr": 0.028188240046929196 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.2824427480916031, "acc_stderr": 0.03948406125768361, "acc_norm": 0.2824427480916031, "acc_norm_stderr": 0.03948406125768361 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.18686868686868688, "acc_stderr": 0.02777253333421899, "acc_norm": 0.18686868686868688, "acc_norm_stderr": 0.02777253333421899 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2620689655172414, "acc_stderr": 0.03664666337225256, "acc_norm": 0.2620689655172414, "acc_norm_stderr": 0.03664666337225256 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.14705882352941177, "acc_stderr": 0.035240689515674495, "acc_norm": 0.14705882352941177, "acc_norm_stderr": 0.035240689515674495 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.026265024608275882, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.026265024608275882 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2076923076923077, "acc_stderr": 0.020567539567246787, "acc_norm": 0.2076923076923077, "acc_norm_stderr": 0.020567539567246787 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.26, "acc_stderr": 0.04408440022768077, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768077 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.19, "acc_stderr": 0.03942772444036623, "acc_norm": 0.19, "acc_norm_stderr": 0.03942772444036623 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.23148148148148148, "acc_stderr": 0.04077494709252627, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.04077494709252627 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.1724137931034483, "acc_stderr": 0.026577672183036572, "acc_norm": 0.1724137931034483, "acc_norm_stderr": 0.026577672183036572 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.20967741935483872, "acc_stderr": 0.02315787934908353, "acc_norm": 0.20967741935483872, "acc_norm_stderr": 0.02315787934908353 }, "harness|ko_mmlu_marketing|5": { "acc": 0.2863247863247863, "acc_stderr": 0.02961432369045665, "acc_norm": 0.2863247863247863, "acc_norm_stderr": 0.02961432369045665 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.21132075471698114, "acc_stderr": 0.025125766484827845, "acc_norm": 0.21132075471698114, "acc_norm_stderr": 0.025125766484827845 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.22727272727272727, "acc_stderr": 0.04013964554072776, "acc_norm": 0.22727272727272727, "acc_norm_stderr": 0.04013964554072776 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.22962962962962963, "acc_stderr": 0.025644108639267645, "acc_norm": 0.22962962962962963, "acc_norm_stderr": 0.025644108639267645 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2251655629139073, "acc_stderr": 0.03410435282008936, "acc_norm": 0.2251655629139073, "acc_norm_stderr": 0.03410435282008936 }, "harness|ko_mmlu_sociology|5": { "acc": 0.23383084577114427, "acc_stderr": 0.029929415408348377, "acc_norm": 0.23383084577114427, "acc_norm_stderr": 0.029929415408348377 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.21965317919075145, "acc_stderr": 0.031568093627031744, "acc_norm": 0.21965317919075145, "acc_norm_stderr": 0.031568093627031744 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.21693121693121692, "acc_stderr": 0.02122708244944504, "acc_norm": 0.21693121693121692, "acc_norm_stderr": 0.02122708244944504 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.25, "acc_stderr": 0.03621034121889507, "acc_norm": 0.25, "acc_norm_stderr": 0.03621034121889507 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.17, "acc_stderr": 0.03775251680686371, "acc_norm": 0.17, "acc_norm_stderr": 0.03775251680686371 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.2658959537572254, "acc_stderr": 0.02378620325550829, "acc_norm": 0.2658959537572254, "acc_norm_stderr": 0.02378620325550829 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2331288343558282, "acc_stderr": 0.033220157957767414, "acc_norm": 0.2331288343558282, "acc_norm_stderr": 0.033220157957767414 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.25308641975308643, "acc_stderr": 0.024191808600712992, "acc_norm": 0.25308641975308643, "acc_norm_stderr": 0.024191808600712992 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.21243523316062177, "acc_stderr": 0.029519282616817247, "acc_norm": 0.21243523316062177, "acc_norm_stderr": 0.029519282616817247 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813344, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813344 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.21100917431192662, "acc_stderr": 0.017493922404112648, "acc_norm": 0.21100917431192662, "acc_norm_stderr": 0.017493922404112648 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2222222222222222, "acc_stderr": 0.037184890068181146, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.037184890068181146 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.2549019607843137, "acc_stderr": 0.024954184324879905, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.024954184324879905 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_international_law|5": { "acc": 0.24793388429752067, "acc_stderr": 0.03941897526516304, "acc_norm": 0.24793388429752067, "acc_norm_stderr": 0.03941897526516304 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.19736842105263158, "acc_stderr": 0.03238981601699397, "acc_norm": 0.19736842105263158, "acc_norm_stderr": 0.03238981601699397 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.27124183006535946, "acc_stderr": 0.017986615304030312, "acc_norm": 0.27124183006535946, "acc_norm_stderr": 0.017986615304030312 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432407, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432407 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.38392857142857145, "acc_stderr": 0.04616143075028547, "acc_norm": 0.38392857142857145, "acc_norm_stderr": 0.04616143075028547 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.16666666666666666, "acc_stderr": 0.025416428388767474, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.025416428388767474 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24134078212290502, "acc_stderr": 0.014310999547961459, "acc_norm": 0.24134078212290502, "acc_norm_stderr": 0.014310999547961459 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.24, "acc_stderr": 0.04292346959909282, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909282 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206824, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206824 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.20220588235294118, "acc_stderr": 0.024398192986654924, "acc_norm": 0.20220588235294118, "acc_norm_stderr": 0.024398192986654924 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.22040816326530613, "acc_stderr": 0.0265370453121453, "acc_norm": 0.22040816326530613, "acc_norm_stderr": 0.0265370453121453 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.2489451476793249, "acc_stderr": 0.028146970599422644, "acc_norm": 0.2489451476793249, "acc_norm_stderr": 0.028146970599422644 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.24641460234680573, "acc_stderr": 0.011005971399927234, "acc_norm": 0.24641460234680573, "acc_norm_stderr": 0.011005971399927234 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.22549019607843138, "acc_stderr": 0.029331162294251735, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.029331162294251735 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.19393939393939394, "acc_stderr": 0.030874145136562097, "acc_norm": 0.19393939393939394, "acc_norm_stderr": 0.030874145136562097 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2484700122399021, "mc1_stderr": 0.0151274270965207, "mc2": 0.40538205465914606, "mc2_stderr": 0.01537488137847706 }, "harness|ko_commongen_v2|2": { "acc": 0.3789846517119244, "acc_stderr": 0.01667926068422928, "acc_norm": 0.4734356552538371, "acc_norm_stderr": 0.017166075717577747 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "KRAFTON/KORani-v1-13B", "model_sha": "a699d0cebc4815f33854bc83065a03fc9008473c", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }