|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.26109215017064846, |
|
"acc_stderr": 0.012835523909473855, |
|
"acc_norm": 0.3097269624573379, |
|
"acc_norm_stderr": 0.013512058415238361 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.36606253734315874, |
|
"acc_stderr": 0.00480742334322458, |
|
"acc_norm": 0.46016729735112527, |
|
"acc_norm_stderr": 0.004973922192982238 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.2573099415204678, |
|
"acc_stderr": 0.03352799844161865, |
|
"acc_norm": 0.2573099415204678, |
|
"acc_norm_stderr": 0.03352799844161865 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.21359223300970873, |
|
"acc_stderr": 0.04058042015646034, |
|
"acc_norm": 0.21359223300970873, |
|
"acc_norm_stderr": 0.04058042015646034 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.2707535121328225, |
|
"acc_stderr": 0.015889888362560486, |
|
"acc_norm": 0.2707535121328225, |
|
"acc_norm_stderr": 0.015889888362560486 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.3037037037037037, |
|
"acc_stderr": 0.03972552884785138, |
|
"acc_norm": 0.3037037037037037, |
|
"acc_norm_stderr": 0.03972552884785138 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.047609522856952365, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.047609522856952365 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.225531914893617, |
|
"acc_stderr": 0.027321078417387533, |
|
"acc_norm": 0.225531914893617, |
|
"acc_norm_stderr": 0.027321078417387533 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.30120481927710846, |
|
"acc_stderr": 0.035716092300534796, |
|
"acc_norm": 0.30120481927710846, |
|
"acc_norm_stderr": 0.035716092300534796 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3022508038585209, |
|
"acc_stderr": 0.02608270069539966, |
|
"acc_norm": 0.3022508038585209, |
|
"acc_norm_stderr": 0.02608270069539966 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.2062780269058296, |
|
"acc_stderr": 0.027157150479563824, |
|
"acc_norm": 0.2062780269058296, |
|
"acc_norm_stderr": 0.027157150479563824 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2366412213740458, |
|
"acc_stderr": 0.037276735755969174, |
|
"acc_norm": 0.2366412213740458, |
|
"acc_norm_stderr": 0.037276735755969174 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036623, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036623 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.29292929292929293, |
|
"acc_stderr": 0.03242497958178817, |
|
"acc_norm": 0.29292929292929293, |
|
"acc_norm_stderr": 0.03242497958178817 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.2482758620689655, |
|
"acc_stderr": 0.03600105692727772, |
|
"acc_norm": 0.2482758620689655, |
|
"acc_norm_stderr": 0.03600105692727772 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2647058823529412, |
|
"acc_stderr": 0.04389869956808778, |
|
"acc_norm": 0.2647058823529412, |
|
"acc_norm_stderr": 0.04389869956808778 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.31932773109243695, |
|
"acc_stderr": 0.030283995525884403, |
|
"acc_norm": 0.31932773109243695, |
|
"acc_norm_stderr": 0.030283995525884403 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2923076923076923, |
|
"acc_stderr": 0.023060438380857737, |
|
"acc_norm": 0.2923076923076923, |
|
"acc_norm_stderr": 0.023060438380857737 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.16, |
|
"acc_stderr": 0.03684529491774709, |
|
"acc_norm": 0.16, |
|
"acc_norm_stderr": 0.03684529491774709 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.21296296296296297, |
|
"acc_stderr": 0.03957835471980981, |
|
"acc_norm": 0.21296296296296297, |
|
"acc_norm_stderr": 0.03957835471980981 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2955665024630542, |
|
"acc_stderr": 0.032104944337514575, |
|
"acc_norm": 0.2955665024630542, |
|
"acc_norm_stderr": 0.032104944337514575 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.2709677419354839, |
|
"acc_stderr": 0.025284416114900156, |
|
"acc_norm": 0.2709677419354839, |
|
"acc_norm_stderr": 0.025284416114900156 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.18376068376068377, |
|
"acc_stderr": 0.02537213967172293, |
|
"acc_norm": 0.18376068376068377, |
|
"acc_norm_stderr": 0.02537213967172293 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.22264150943396227, |
|
"acc_stderr": 0.025604233470899105, |
|
"acc_norm": 0.22264150943396227, |
|
"acc_norm_stderr": 0.025604233470899105 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.20909090909090908, |
|
"acc_stderr": 0.03895091015724136, |
|
"acc_norm": 0.20909090909090908, |
|
"acc_norm_stderr": 0.03895091015724136 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.26296296296296295, |
|
"acc_stderr": 0.026842057873833706, |
|
"acc_norm": 0.26296296296296295, |
|
"acc_norm_stderr": 0.026842057873833706 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.33112582781456956, |
|
"acc_stderr": 0.038425817186598696, |
|
"acc_norm": 0.33112582781456956, |
|
"acc_norm_stderr": 0.038425817186598696 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.2537313432835821, |
|
"acc_stderr": 0.030769444967296014, |
|
"acc_norm": 0.2537313432835821, |
|
"acc_norm_stderr": 0.030769444967296014 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.27167630057803466, |
|
"acc_stderr": 0.033917503223216586, |
|
"acc_norm": 0.27167630057803466, |
|
"acc_norm_stderr": 0.033917503223216586 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.25396825396825395, |
|
"acc_stderr": 0.022418042891113946, |
|
"acc_norm": 0.25396825396825395, |
|
"acc_norm_stderr": 0.022418042891113946 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.03745554791462457, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.03745554791462457 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.17, |
|
"acc_stderr": 0.03775251680686371, |
|
"acc_norm": 0.17, |
|
"acc_norm_stderr": 0.03775251680686371 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.042923469599092816, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.042923469599092816 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.25722543352601157, |
|
"acc_stderr": 0.023532925431044287, |
|
"acc_norm": 0.25722543352601157, |
|
"acc_norm_stderr": 0.023532925431044287 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3067484662576687, |
|
"acc_stderr": 0.036230899157241474, |
|
"acc_norm": 0.3067484662576687, |
|
"acc_norm_stderr": 0.036230899157241474 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.3055555555555556, |
|
"acc_stderr": 0.025630824975621344, |
|
"acc_norm": 0.3055555555555556, |
|
"acc_norm_stderr": 0.025630824975621344 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.36787564766839376, |
|
"acc_stderr": 0.034801756684660366, |
|
"acc_norm": 0.36787564766839376, |
|
"acc_norm_stderr": 0.034801756684660366 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.24561403508771928, |
|
"acc_stderr": 0.04049339297748141, |
|
"acc_norm": 0.24561403508771928, |
|
"acc_norm_stderr": 0.04049339297748141 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.22752293577981653, |
|
"acc_stderr": 0.0179744635787765, |
|
"acc_norm": 0.22752293577981653, |
|
"acc_norm_stderr": 0.0179744635787765 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.15873015873015872, |
|
"acc_stderr": 0.032684540130117436, |
|
"acc_norm": 0.15873015873015872, |
|
"acc_norm_stderr": 0.032684540130117436 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.24183006535947713, |
|
"acc_stderr": 0.024518195641879334, |
|
"acc_norm": 0.24183006535947713, |
|
"acc_norm_stderr": 0.024518195641879334 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.039427724440366234, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.039427724440366234 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.2231404958677686, |
|
"acc_stderr": 0.03800754475228733, |
|
"acc_norm": 0.2231404958677686, |
|
"acc_norm_stderr": 0.03800754475228733 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.29605263157894735, |
|
"acc_stderr": 0.03715062154998905, |
|
"acc_norm": 0.29605263157894735, |
|
"acc_norm_stderr": 0.03715062154998905 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.016819028375736386, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.016819028375736386 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.25177304964539005, |
|
"acc_stderr": 0.0258921511567094, |
|
"acc_norm": 0.25177304964539005, |
|
"acc_norm_stderr": 0.0258921511567094 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.22321428571428573, |
|
"acc_stderr": 0.03952301967702511, |
|
"acc_norm": 0.22321428571428573, |
|
"acc_norm_stderr": 0.03952301967702511 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.4722222222222222, |
|
"acc_stderr": 0.0340470532865388, |
|
"acc_norm": 0.4722222222222222, |
|
"acc_norm_stderr": 0.0340470532865388 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24692737430167597, |
|
"acc_stderr": 0.014422292204808852, |
|
"acc_norm": 0.24692737430167597, |
|
"acc_norm_stderr": 0.014422292204808852 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909284, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909284 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4411764705882353, |
|
"acc_stderr": 0.030161911930767102, |
|
"acc_norm": 0.4411764705882353, |
|
"acc_norm_stderr": 0.030161911930767102 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.31020408163265306, |
|
"acc_stderr": 0.029613459872484375, |
|
"acc_norm": 0.31020408163265306, |
|
"acc_norm_stderr": 0.029613459872484375 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.2489451476793249, |
|
"acc_stderr": 0.028146970599422644, |
|
"acc_norm": 0.2489451476793249, |
|
"acc_norm_stderr": 0.028146970599422644 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.24315514993481094, |
|
"acc_stderr": 0.010956556654417362, |
|
"acc_norm": 0.24315514993481094, |
|
"acc_norm_stderr": 0.010956556654417362 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.24019607843137256, |
|
"acc_stderr": 0.02998373305591361, |
|
"acc_norm": 0.24019607843137256, |
|
"acc_norm_stderr": 0.02998373305591361 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.24242424242424243, |
|
"acc_stderr": 0.03346409881055953, |
|
"acc_norm": 0.24242424242424243, |
|
"acc_norm_stderr": 0.03346409881055953 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2558139534883721, |
|
"mc1_stderr": 0.015274176219283335, |
|
"mc2": 0.4152993218865631, |
|
"mc2_stderr": 0.015196497707034719 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3116883116883117, |
|
"acc_stderr": 0.015924567607358338, |
|
"acc_norm": 0.39433293978748524, |
|
"acc_norm_stderr": 0.016802090674893213 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "DILAB-HYU/koquality-polyglot-3.8b", |
|
"model_sha": "c07be8b24386d148dae0b95cf1beecfd5ce1b695", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |