|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.36860068259385664, |
|
"acc_stderr": 0.0140978106780422, |
|
"acc_norm": 0.4206484641638225, |
|
"acc_norm_stderr": 0.014426211252508394 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4047002589125672, |
|
"acc_stderr": 0.004898308167211838, |
|
"acc_norm": 0.5395339573790081, |
|
"acc_norm_stderr": 0.004974159561342694 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4853801169590643, |
|
"acc_stderr": 0.038331852752130205, |
|
"acc_norm": 0.4853801169590643, |
|
"acc_norm_stderr": 0.038331852752130205 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5533980582524272, |
|
"acc_stderr": 0.04922424153458934, |
|
"acc_norm": 0.5533980582524272, |
|
"acc_norm_stderr": 0.04922424153458934 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5095785440613027, |
|
"acc_stderr": 0.01787668227534086, |
|
"acc_norm": 0.5095785440613027, |
|
"acc_norm_stderr": 0.01787668227534086 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.42962962962962964, |
|
"acc_stderr": 0.04276349494376599, |
|
"acc_norm": 0.42962962962962964, |
|
"acc_norm_stderr": 0.04276349494376599 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.044084400227680794, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.044084400227680794 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.33191489361702126, |
|
"acc_stderr": 0.030783736757745657, |
|
"acc_norm": 0.33191489361702126, |
|
"acc_norm_stderr": 0.030783736757745657 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3614457831325301, |
|
"acc_stderr": 0.037400593820293204, |
|
"acc_norm": 0.3614457831325301, |
|
"acc_norm_stderr": 0.037400593820293204 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4983922829581994, |
|
"acc_stderr": 0.02839794490780661, |
|
"acc_norm": 0.4983922829581994, |
|
"acc_norm_stderr": 0.02839794490780661 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4977578475336323, |
|
"acc_stderr": 0.03355746535223263, |
|
"acc_norm": 0.4977578475336323, |
|
"acc_norm_stderr": 0.03355746535223263 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.44274809160305345, |
|
"acc_stderr": 0.0435644720266507, |
|
"acc_norm": 0.44274809160305345, |
|
"acc_norm_stderr": 0.0435644720266507 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5454545454545454, |
|
"acc_stderr": 0.03547601494006938, |
|
"acc_norm": 0.5454545454545454, |
|
"acc_norm_stderr": 0.03547601494006938 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4482758620689655, |
|
"acc_stderr": 0.04144311810878151, |
|
"acc_norm": 0.4482758620689655, |
|
"acc_norm_stderr": 0.04144311810878151 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.24509803921568626, |
|
"acc_stderr": 0.04280105837364396, |
|
"acc_norm": 0.24509803921568626, |
|
"acc_norm_stderr": 0.04280105837364396 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.453781512605042, |
|
"acc_stderr": 0.032339434681820885, |
|
"acc_norm": 0.453781512605042, |
|
"acc_norm_stderr": 0.032339434681820885 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4230769230769231, |
|
"acc_stderr": 0.025049197876042335, |
|
"acc_norm": 0.4230769230769231, |
|
"acc_norm_stderr": 0.025049197876042335 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.04943110704237102, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.04943110704237102 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.46296296296296297, |
|
"acc_stderr": 0.04820403072760626, |
|
"acc_norm": 0.46296296296296297, |
|
"acc_norm_stderr": 0.04820403072760626 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3694581280788177, |
|
"acc_stderr": 0.03395970381998576, |
|
"acc_norm": 0.3694581280788177, |
|
"acc_norm_stderr": 0.03395970381998576 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.43548387096774194, |
|
"acc_stderr": 0.028206225591502744, |
|
"acc_norm": 0.43548387096774194, |
|
"acc_norm_stderr": 0.028206225591502744 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6452991452991453, |
|
"acc_stderr": 0.03134250486245402, |
|
"acc_norm": 0.6452991452991453, |
|
"acc_norm_stderr": 0.03134250486245402 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4226415094339623, |
|
"acc_stderr": 0.03040233144576954, |
|
"acc_norm": 0.4226415094339623, |
|
"acc_norm_stderr": 0.03040233144576954 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.04789131426105757, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.04789131426105757 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3148148148148148, |
|
"acc_stderr": 0.028317533496066475, |
|
"acc_norm": 0.3148148148148148, |
|
"acc_norm_stderr": 0.028317533496066475 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.304635761589404, |
|
"acc_stderr": 0.03757949922943343, |
|
"acc_norm": 0.304635761589404, |
|
"acc_norm_stderr": 0.03757949922943343 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5522388059701493, |
|
"acc_stderr": 0.035161847729521675, |
|
"acc_norm": 0.5522388059701493, |
|
"acc_norm_stderr": 0.035161847729521675 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3930635838150289, |
|
"acc_stderr": 0.0372424959581773, |
|
"acc_norm": 0.3930635838150289, |
|
"acc_norm_stderr": 0.0372424959581773 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.36772486772486773, |
|
"acc_stderr": 0.024833839825562427, |
|
"acc_norm": 0.36772486772486773, |
|
"acc_norm_stderr": 0.024833839825562427 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3541666666666667, |
|
"acc_stderr": 0.039994111357535424, |
|
"acc_norm": 0.3541666666666667, |
|
"acc_norm_stderr": 0.039994111357535424 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621504, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621504 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.04943110704237102, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.04943110704237102 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.4624277456647399, |
|
"acc_stderr": 0.026842985519615375, |
|
"acc_norm": 0.4624277456647399, |
|
"acc_norm_stderr": 0.026842985519615375 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4601226993865031, |
|
"acc_stderr": 0.0391585729143697, |
|
"acc_norm": 0.4601226993865031, |
|
"acc_norm_stderr": 0.0391585729143697 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.4783950617283951, |
|
"acc_stderr": 0.027794760105008746, |
|
"acc_norm": 0.4783950617283951, |
|
"acc_norm_stderr": 0.027794760105008746 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.49222797927461137, |
|
"acc_stderr": 0.03608003225569654, |
|
"acc_norm": 0.49222797927461137, |
|
"acc_norm_stderr": 0.03608003225569654 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.21052631578947367, |
|
"acc_stderr": 0.0383515395439942, |
|
"acc_norm": 0.21052631578947367, |
|
"acc_norm_stderr": 0.0383515395439942 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5376146788990825, |
|
"acc_stderr": 0.021376575274397576, |
|
"acc_norm": 0.5376146788990825, |
|
"acc_norm_stderr": 0.021376575274397576 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.42063492063492064, |
|
"acc_stderr": 0.04415438226743744, |
|
"acc_norm": 0.42063492063492064, |
|
"acc_norm_stderr": 0.04415438226743744 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.42483660130718953, |
|
"acc_stderr": 0.028304576673141107, |
|
"acc_norm": 0.42483660130718953, |
|
"acc_norm_stderr": 0.028304576673141107 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.47, |
|
"acc_stderr": 0.05016135580465919, |
|
"acc_norm": 0.47, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5950413223140496, |
|
"acc_stderr": 0.04481137755942469, |
|
"acc_norm": 0.5950413223140496, |
|
"acc_norm_stderr": 0.04481137755942469 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4407894736842105, |
|
"acc_stderr": 0.04040311062490436, |
|
"acc_norm": 0.4407894736842105, |
|
"acc_norm_stderr": 0.04040311062490436 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.36437908496732024, |
|
"acc_stderr": 0.019469518221573702, |
|
"acc_norm": 0.36437908496732024, |
|
"acc_norm_stderr": 0.019469518221573702 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3404255319148936, |
|
"acc_stderr": 0.028267657482650147, |
|
"acc_norm": 0.3404255319148936, |
|
"acc_norm_stderr": 0.028267657482650147 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.2767857142857143, |
|
"acc_stderr": 0.042466243366976256, |
|
"acc_norm": 0.2767857142857143, |
|
"acc_norm_stderr": 0.042466243366976256 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.033016908987210894, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.033016908987210894 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24804469273743016, |
|
"acc_stderr": 0.014444157808261453, |
|
"acc_norm": 0.24804469273743016, |
|
"acc_norm_stderr": 0.014444157808261453 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620333, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620333 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3382352941176471, |
|
"acc_stderr": 0.028739328513983583, |
|
"acc_norm": 0.3382352941176471, |
|
"acc_norm_stderr": 0.028739328513983583 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.47346938775510206, |
|
"acc_stderr": 0.03196412734523272, |
|
"acc_norm": 0.47346938775510206, |
|
"acc_norm_stderr": 0.03196412734523272 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.569620253164557, |
|
"acc_stderr": 0.03223017195937599, |
|
"acc_norm": 0.569620253164557, |
|
"acc_norm_stderr": 0.03223017195937599 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.35658409387222945, |
|
"acc_stderr": 0.012233642989273891, |
|
"acc_norm": 0.35658409387222945, |
|
"acc_norm_stderr": 0.012233642989273891 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.45098039215686275, |
|
"acc_stderr": 0.03492406104163614, |
|
"acc_norm": 0.45098039215686275, |
|
"acc_norm_stderr": 0.03492406104163614 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.49696969696969695, |
|
"acc_stderr": 0.03904272341431857, |
|
"acc_norm": 0.49696969696969695, |
|
"acc_norm_stderr": 0.03904272341431857 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2766217870257038, |
|
"mc1_stderr": 0.015659605755326905, |
|
"mc2": 0.43550201857978377, |
|
"mc2_stderr": 0.015311053526638174 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4498229043683589, |
|
"acc_stderr": 0.017103573343825708, |
|
"acc_norm": 0.5112160566706021, |
|
"acc_norm_stderr": 0.017186028469489294 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "kyujinpy/KoR-Orca-Platypus-13B", |
|
"model_sha": "66063590ce01dc70a30bcf04f1f8addd7e72f73b", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |