|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2960750853242321, |
|
"acc_stderr": 0.013340916085246263, |
|
"acc_norm": 0.3370307167235495, |
|
"acc_norm_stderr": 0.013813476652902265 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.35241983668591914, |
|
"acc_stderr": 0.004767475366689779, |
|
"acc_norm": 0.42252539334793865, |
|
"acc_norm_stderr": 0.004929517011508216 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4093567251461988, |
|
"acc_stderr": 0.037712831076265434, |
|
"acc_norm": 0.4093567251461988, |
|
"acc_norm_stderr": 0.037712831076265434 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.32038834951456313, |
|
"acc_stderr": 0.0462028408228004, |
|
"acc_norm": 0.32038834951456313, |
|
"acc_norm_stderr": 0.0462028408228004 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.37547892720306514, |
|
"acc_stderr": 0.01731661319718279, |
|
"acc_norm": 0.37547892720306514, |
|
"acc_norm_stderr": 0.01731661319718279 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.2740740740740741, |
|
"acc_stderr": 0.03853254836552003, |
|
"acc_norm": 0.2740740740740741, |
|
"acc_norm_stderr": 0.03853254836552003 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3446808510638298, |
|
"acc_stderr": 0.031068985963122145, |
|
"acc_norm": 0.3446808510638298, |
|
"acc_norm_stderr": 0.031068985963122145 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3192771084337349, |
|
"acc_stderr": 0.0362933532994786, |
|
"acc_norm": 0.3192771084337349, |
|
"acc_norm_stderr": 0.0362933532994786 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4115755627009646, |
|
"acc_stderr": 0.027950481494401266, |
|
"acc_norm": 0.4115755627009646, |
|
"acc_norm_stderr": 0.027950481494401266 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.37668161434977576, |
|
"acc_stderr": 0.03252113489929188, |
|
"acc_norm": 0.37668161434977576, |
|
"acc_norm_stderr": 0.03252113489929188 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3893129770992366, |
|
"acc_stderr": 0.04276486542814591, |
|
"acc_norm": 0.3893129770992366, |
|
"acc_norm_stderr": 0.04276486542814591 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.03358618145732524, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.03358618145732524 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3586206896551724, |
|
"acc_stderr": 0.039966295748767186, |
|
"acc_norm": 0.3586206896551724, |
|
"acc_norm_stderr": 0.039966295748767186 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.19607843137254902, |
|
"acc_stderr": 0.03950581861179962, |
|
"acc_norm": 0.19607843137254902, |
|
"acc_norm_stderr": 0.03950581861179962 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3403361344537815, |
|
"acc_stderr": 0.03077805742293167, |
|
"acc_norm": 0.3403361344537815, |
|
"acc_norm_stderr": 0.03077805742293167 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.33589743589743587, |
|
"acc_stderr": 0.02394672474156397, |
|
"acc_norm": 0.33589743589743587, |
|
"acc_norm_stderr": 0.02394672474156397 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.54, |
|
"acc_stderr": 0.05009082659620333, |
|
"acc_norm": 0.54, |
|
"acc_norm_stderr": 0.05009082659620333 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.21, |
|
"acc_stderr": 0.040936018074033256, |
|
"acc_norm": 0.21, |
|
"acc_norm_stderr": 0.040936018074033256 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4351851851851852, |
|
"acc_stderr": 0.04792898170907062, |
|
"acc_norm": 0.4351851851851852, |
|
"acc_norm_stderr": 0.04792898170907062 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.31527093596059114, |
|
"acc_stderr": 0.03269080871970187, |
|
"acc_norm": 0.31527093596059114, |
|
"acc_norm_stderr": 0.03269080871970187 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3258064516129032, |
|
"acc_stderr": 0.026662010578567107, |
|
"acc_norm": 0.3258064516129032, |
|
"acc_norm_stderr": 0.026662010578567107 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.5683760683760684, |
|
"acc_stderr": 0.0324483553531149, |
|
"acc_norm": 0.5683760683760684, |
|
"acc_norm_stderr": 0.0324483553531149 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.37358490566037733, |
|
"acc_stderr": 0.029773082713319878, |
|
"acc_norm": 0.37358490566037733, |
|
"acc_norm_stderr": 0.029773082713319878 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.45454545454545453, |
|
"acc_stderr": 0.04769300568972743, |
|
"acc_norm": 0.45454545454545453, |
|
"acc_norm_stderr": 0.04769300568972743 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.24814814814814815, |
|
"acc_stderr": 0.0263357394040558, |
|
"acc_norm": 0.24814814814814815, |
|
"acc_norm_stderr": 0.0263357394040558 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2913907284768212, |
|
"acc_stderr": 0.037101857261199946, |
|
"acc_norm": 0.2913907284768212, |
|
"acc_norm_stderr": 0.037101857261199946 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.42786069651741293, |
|
"acc_stderr": 0.03498541988407795, |
|
"acc_norm": 0.42786069651741293, |
|
"acc_norm_stderr": 0.03498541988407795 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3236994219653179, |
|
"acc_stderr": 0.035676037996391685, |
|
"acc_norm": 0.3236994219653179, |
|
"acc_norm_stderr": 0.035676037996391685 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.02141168439369418, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.02141168439369418 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3263888888888889, |
|
"acc_stderr": 0.03921067198982266, |
|
"acc_norm": 0.3263888888888889, |
|
"acc_norm_stderr": 0.03921067198982266 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.0498887651569859, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.0498887651569859 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.40173410404624277, |
|
"acc_stderr": 0.026394104177643634, |
|
"acc_norm": 0.40173410404624277, |
|
"acc_norm_stderr": 0.026394104177643634 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3128834355828221, |
|
"acc_stderr": 0.03642914578292404, |
|
"acc_norm": 0.3128834355828221, |
|
"acc_norm_stderr": 0.03642914578292404 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.3425925925925926, |
|
"acc_stderr": 0.026406145973625658, |
|
"acc_norm": 0.3425925925925926, |
|
"acc_norm_stderr": 0.026406145973625658 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.40932642487046633, |
|
"acc_stderr": 0.03548608168860806, |
|
"acc_norm": 0.40932642487046633, |
|
"acc_norm_stderr": 0.03548608168860806 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.24561403508771928, |
|
"acc_stderr": 0.04049339297748142, |
|
"acc_norm": 0.24561403508771928, |
|
"acc_norm_stderr": 0.04049339297748142 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.30642201834862387, |
|
"acc_stderr": 0.019765517220458523, |
|
"acc_norm": 0.30642201834862387, |
|
"acc_norm_stderr": 0.019765517220458523 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.30952380952380953, |
|
"acc_stderr": 0.04134913018303316, |
|
"acc_norm": 0.30952380952380953, |
|
"acc_norm_stderr": 0.04134913018303316 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4215686274509804, |
|
"acc_stderr": 0.028275490156791438, |
|
"acc_norm": 0.4215686274509804, |
|
"acc_norm_stderr": 0.028275490156791438 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5041322314049587, |
|
"acc_stderr": 0.045641987674327526, |
|
"acc_norm": 0.5041322314049587, |
|
"acc_norm_stderr": 0.045641987674327526 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.034597776068105365, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.034597776068105365 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3300653594771242, |
|
"acc_stderr": 0.019023726160724553, |
|
"acc_norm": 0.3300653594771242, |
|
"acc_norm_stderr": 0.019023726160724553 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2765957446808511, |
|
"acc_stderr": 0.026684564340460994, |
|
"acc_norm": 0.2765957446808511, |
|
"acc_norm_stderr": 0.026684564340460994 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.3482142857142857, |
|
"acc_stderr": 0.045218299028335865, |
|
"acc_norm": 0.3482142857142857, |
|
"acc_norm_stderr": 0.045218299028335865 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.24074074074074073, |
|
"acc_stderr": 0.029157522184605617, |
|
"acc_norm": 0.24074074074074073, |
|
"acc_norm_stderr": 0.029157522184605617 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24134078212290502, |
|
"acc_stderr": 0.014310999547961443, |
|
"acc_norm": 0.24134078212290502, |
|
"acc_norm_stderr": 0.014310999547961443 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.04975698519562428, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.04975698519562428 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.2610294117647059, |
|
"acc_stderr": 0.02667925227010312, |
|
"acc_norm": 0.2610294117647059, |
|
"acc_norm_stderr": 0.02667925227010312 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.40816326530612246, |
|
"acc_stderr": 0.03146465712827424, |
|
"acc_norm": 0.40816326530612246, |
|
"acc_norm_stderr": 0.03146465712827424 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.39662447257383965, |
|
"acc_stderr": 0.03184399873811225, |
|
"acc_norm": 0.39662447257383965, |
|
"acc_norm_stderr": 0.03184399873811225 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.27640156453715775, |
|
"acc_stderr": 0.011422153194553567, |
|
"acc_norm": 0.27640156453715775, |
|
"acc_norm_stderr": 0.011422153194553567 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.3431372549019608, |
|
"acc_stderr": 0.033321399446680854, |
|
"acc_norm": 0.3431372549019608, |
|
"acc_norm_stderr": 0.033321399446680854 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.3515151515151515, |
|
"acc_stderr": 0.037282069986826503, |
|
"acc_norm": 0.3515151515151515, |
|
"acc_norm_stderr": 0.037282069986826503 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2692778457772338, |
|
"mc1_stderr": 0.015528566637087305, |
|
"mc2": 0.44326975161880294, |
|
"mc2_stderr": 0.015781962014868475 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.31995277449822906, |
|
"acc_stderr": 0.016037153840280538, |
|
"acc_norm": 0.3955135773317591, |
|
"acc_norm_stderr": 0.016810815902206042 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "KRAFTON/KORani-v2-13B", |
|
"model_sha": "12dbb4046d3fabb3b64c3eab2ecc91faec1af9e9", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |