|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.48293515358361777, |
|
"acc_stderr": 0.014602878388536598, |
|
"acc_norm": 0.5460750853242321, |
|
"acc_norm_stderr": 0.014549221105171864 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.46644094801832303, |
|
"acc_stderr": 0.004978529642140938, |
|
"acc_norm": 0.6319458275243975, |
|
"acc_norm_stderr": 0.00481290527906644 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.6783625730994152, |
|
"acc_stderr": 0.03582529442573122, |
|
"acc_norm": 0.6783625730994152, |
|
"acc_norm_stderr": 0.03582529442573122 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6893203883495146, |
|
"acc_stderr": 0.04582124160161551, |
|
"acc_norm": 0.6893203883495146, |
|
"acc_norm_stderr": 0.04582124160161551 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.7151979565772669, |
|
"acc_stderr": 0.016139174096522584, |
|
"acc_norm": 0.7151979565772669, |
|
"acc_norm_stderr": 0.016139174096522584 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4962962962962963, |
|
"acc_stderr": 0.043192236258113303, |
|
"acc_norm": 0.4962962962962963, |
|
"acc_norm_stderr": 0.043192236258113303 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.574468085106383, |
|
"acc_stderr": 0.032321469162244695, |
|
"acc_norm": 0.574468085106383, |
|
"acc_norm_stderr": 0.032321469162244695 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.5180722891566265, |
|
"acc_stderr": 0.03889951252827216, |
|
"acc_norm": 0.5180722891566265, |
|
"acc_norm_stderr": 0.03889951252827216 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.6430868167202572, |
|
"acc_stderr": 0.027210420375934023, |
|
"acc_norm": 0.6430868167202572, |
|
"acc_norm_stderr": 0.027210420375934023 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.6278026905829597, |
|
"acc_stderr": 0.03244305283008731, |
|
"acc_norm": 0.6278026905829597, |
|
"acc_norm_stderr": 0.03244305283008731 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.6335877862595419, |
|
"acc_stderr": 0.04225875451969639, |
|
"acc_norm": 0.6335877862595419, |
|
"acc_norm_stderr": 0.04225875451969639 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.7777777777777778, |
|
"acc_stderr": 0.029620227874790486, |
|
"acc_norm": 0.7777777777777778, |
|
"acc_norm_stderr": 0.029620227874790486 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.503448275862069, |
|
"acc_stderr": 0.04166567577101579, |
|
"acc_norm": 0.503448275862069, |
|
"acc_norm_stderr": 0.04166567577101579 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3137254901960784, |
|
"acc_stderr": 0.04617034827006716, |
|
"acc_norm": 0.3137254901960784, |
|
"acc_norm_stderr": 0.04617034827006716 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.6428571428571429, |
|
"acc_stderr": 0.031124619309328177, |
|
"acc_norm": 0.6428571428571429, |
|
"acc_norm_stderr": 0.031124619309328177 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.5871794871794872, |
|
"acc_stderr": 0.02496268356433178, |
|
"acc_norm": 0.5871794871794872, |
|
"acc_norm_stderr": 0.02496268356433178 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.58, |
|
"acc_stderr": 0.04960449637488583, |
|
"acc_norm": 0.58, |
|
"acc_norm_stderr": 0.04960449637488583 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.6388888888888888, |
|
"acc_stderr": 0.04643454608906275, |
|
"acc_norm": 0.6388888888888888, |
|
"acc_norm_stderr": 0.04643454608906275 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.41379310344827586, |
|
"acc_stderr": 0.03465304488406795, |
|
"acc_norm": 0.41379310344827586, |
|
"acc_norm_stderr": 0.03465304488406795 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.667741935483871, |
|
"acc_stderr": 0.02679556084812279, |
|
"acc_norm": 0.667741935483871, |
|
"acc_norm_stderr": 0.02679556084812279 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.8205128205128205, |
|
"acc_stderr": 0.025140935950335435, |
|
"acc_norm": 0.8205128205128205, |
|
"acc_norm_stderr": 0.025140935950335435 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5773584905660377, |
|
"acc_stderr": 0.03040233144576954, |
|
"acc_norm": 0.5773584905660377, |
|
"acc_norm_stderr": 0.03040233144576954 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.6363636363636364, |
|
"acc_stderr": 0.046075820907199756, |
|
"acc_norm": 0.6363636363636364, |
|
"acc_norm_stderr": 0.046075820907199756 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3148148148148148, |
|
"acc_stderr": 0.028317533496066485, |
|
"acc_norm": 0.3148148148148148, |
|
"acc_norm_stderr": 0.028317533496066485 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.37748344370860926, |
|
"acc_stderr": 0.0395802723112157, |
|
"acc_norm": 0.37748344370860926, |
|
"acc_norm_stderr": 0.0395802723112157 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.7611940298507462, |
|
"acc_stderr": 0.03014777593540922, |
|
"acc_norm": 0.7611940298507462, |
|
"acc_norm_stderr": 0.03014777593540922 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.5433526011560693, |
|
"acc_stderr": 0.03798106566014498, |
|
"acc_norm": 0.5433526011560693, |
|
"acc_norm_stderr": 0.03798106566014498 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.4126984126984127, |
|
"acc_stderr": 0.02535574126305526, |
|
"acc_norm": 0.4126984126984127, |
|
"acc_norm_stderr": 0.02535574126305526 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.5902777777777778, |
|
"acc_stderr": 0.04112490974670787, |
|
"acc_norm": 0.5902777777777778, |
|
"acc_norm_stderr": 0.04112490974670787 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.75, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.75, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.6098265895953757, |
|
"acc_stderr": 0.026261677607806646, |
|
"acc_norm": 0.6098265895953757, |
|
"acc_norm_stderr": 0.026261677607806646 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.6134969325153374, |
|
"acc_stderr": 0.03825825548848607, |
|
"acc_norm": 0.6134969325153374, |
|
"acc_norm_stderr": 0.03825825548848607 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.6604938271604939, |
|
"acc_stderr": 0.026348564412011628, |
|
"acc_norm": 0.6604938271604939, |
|
"acc_norm_stderr": 0.026348564412011628 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.772020725388601, |
|
"acc_stderr": 0.03027690994517826, |
|
"acc_norm": 0.772020725388601, |
|
"acc_norm_stderr": 0.03027690994517826 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.4824561403508772, |
|
"acc_stderr": 0.04700708033551038, |
|
"acc_norm": 0.4824561403508772, |
|
"acc_norm_stderr": 0.04700708033551038 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.7412844036697248, |
|
"acc_stderr": 0.018776052319619624, |
|
"acc_norm": 0.7412844036697248, |
|
"acc_norm_stderr": 0.018776052319619624 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.4365079365079365, |
|
"acc_stderr": 0.04435932892851466, |
|
"acc_norm": 0.4365079365079365, |
|
"acc_norm_stderr": 0.04435932892851466 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.6405228758169934, |
|
"acc_stderr": 0.027475969910660952, |
|
"acc_norm": 0.6405228758169934, |
|
"acc_norm_stderr": 0.027475969910660952 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.64, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.64, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.7107438016528925, |
|
"acc_stderr": 0.041391127276354626, |
|
"acc_norm": 0.7107438016528925, |
|
"acc_norm_stderr": 0.041391127276354626 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.6118421052631579, |
|
"acc_stderr": 0.03965842097512744, |
|
"acc_norm": 0.6118421052631579, |
|
"acc_norm_stderr": 0.03965842097512744 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.5408496732026143, |
|
"acc_stderr": 0.020160213617222516, |
|
"acc_norm": 0.5408496732026143, |
|
"acc_norm_stderr": 0.020160213617222516 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.41843971631205673, |
|
"acc_stderr": 0.029427994039419994, |
|
"acc_norm": 0.41843971631205673, |
|
"acc_norm_stderr": 0.029427994039419994 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.4107142857142857, |
|
"acc_stderr": 0.04669510663875191, |
|
"acc_norm": 0.4107142857142857, |
|
"acc_norm_stderr": 0.04669510663875191 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.5277777777777778, |
|
"acc_stderr": 0.0340470532865388, |
|
"acc_norm": 0.5277777777777778, |
|
"acc_norm_stderr": 0.0340470532865388 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.23575418994413408, |
|
"acc_stderr": 0.014196375686290804, |
|
"acc_norm": 0.23575418994413408, |
|
"acc_norm_stderr": 0.014196375686290804 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.76, |
|
"acc_stderr": 0.04292346959909283, |
|
"acc_norm": 0.76, |
|
"acc_norm_stderr": 0.04292346959909283 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.5698529411764706, |
|
"acc_stderr": 0.030074971917302875, |
|
"acc_norm": 0.5698529411764706, |
|
"acc_norm_stderr": 0.030074971917302875 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.7591836734693878, |
|
"acc_stderr": 0.02737294220178816, |
|
"acc_norm": 0.7591836734693878, |
|
"acc_norm_stderr": 0.02737294220178816 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.8016877637130801, |
|
"acc_stderr": 0.0259550208416211, |
|
"acc_norm": 0.8016877637130801, |
|
"acc_norm_stderr": 0.0259550208416211 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.42503259452411996, |
|
"acc_stderr": 0.012625879884891994, |
|
"acc_norm": 0.42503259452411996, |
|
"acc_norm_stderr": 0.012625879884891994 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.7745098039215687, |
|
"acc_stderr": 0.029331162294251742, |
|
"acc_norm": 0.7745098039215687, |
|
"acc_norm_stderr": 0.029331162294251742 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.7696969696969697, |
|
"acc_stderr": 0.03287666758603488, |
|
"acc_norm": 0.7696969696969697, |
|
"acc_norm_stderr": 0.03287666758603488 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.4173806609547124, |
|
"mc1_stderr": 0.01726289106327218, |
|
"mc2": 0.5720704055079059, |
|
"mc2_stderr": 0.015554344775507314 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.577331759149941, |
|
"acc_stderr": 0.016983506079577607, |
|
"acc_norm": 0.5879574970484062, |
|
"acc_norm_stderr": 0.01692227673852836 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "JY623/KoSOLAR-10.7B-merge-v3.0", |
|
"model_sha": "b3d3709be14ca40db5d696a2b175f1e4d6c2ea6e", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |