|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.4590443686006826, |
|
"acc_stderr": 0.014562291073601234, |
|
"acc_norm": 0.5170648464163823, |
|
"acc_norm_stderr": 0.014602878388536591 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4457279426409082, |
|
"acc_stderr": 0.004960299952519394, |
|
"acc_norm": 0.6086436964748058, |
|
"acc_norm_stderr": 0.004870563921220625 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.6491228070175439, |
|
"acc_stderr": 0.03660298834049164, |
|
"acc_norm": 0.6491228070175439, |
|
"acc_norm_stderr": 0.03660298834049164 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.6893203883495146, |
|
"acc_stderr": 0.0458212416016155, |
|
"acc_norm": 0.6893203883495146, |
|
"acc_norm_stderr": 0.0458212416016155 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.669220945083014, |
|
"acc_stderr": 0.01682481846256376, |
|
"acc_norm": 0.669220945083014, |
|
"acc_norm_stderr": 0.01682481846256376 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.42962962962962964, |
|
"acc_stderr": 0.04276349494376599, |
|
"acc_norm": 0.42962962962962964, |
|
"acc_norm_stderr": 0.04276349494376599 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421255, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421255 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.48936170212765956, |
|
"acc_stderr": 0.03267862331014063, |
|
"acc_norm": 0.48936170212765956, |
|
"acc_norm_stderr": 0.03267862331014063 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.4759036144578313, |
|
"acc_stderr": 0.03887971849597264, |
|
"acc_norm": 0.4759036144578313, |
|
"acc_norm_stderr": 0.03887971849597264 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5852090032154341, |
|
"acc_stderr": 0.027982680459759567, |
|
"acc_norm": 0.5852090032154341, |
|
"acc_norm_stderr": 0.027982680459759567 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5829596412556054, |
|
"acc_stderr": 0.03309266936071721, |
|
"acc_norm": 0.5829596412556054, |
|
"acc_norm_stderr": 0.03309266936071721 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.5801526717557252, |
|
"acc_stderr": 0.04328577215262973, |
|
"acc_norm": 0.5801526717557252, |
|
"acc_norm_stderr": 0.04328577215262973 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956914, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956914 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.7424242424242424, |
|
"acc_stderr": 0.031156269519646847, |
|
"acc_norm": 0.7424242424242424, |
|
"acc_norm_stderr": 0.031156269519646847 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4827586206896552, |
|
"acc_stderr": 0.04164188720169377, |
|
"acc_norm": 0.4827586206896552, |
|
"acc_norm_stderr": 0.04164188720169377 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3627450980392157, |
|
"acc_stderr": 0.047840607041056527, |
|
"acc_norm": 0.3627450980392157, |
|
"acc_norm_stderr": 0.047840607041056527 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.6512605042016807, |
|
"acc_stderr": 0.030956636328566545, |
|
"acc_norm": 0.6512605042016807, |
|
"acc_norm_stderr": 0.030956636328566545 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.5358974358974359, |
|
"acc_stderr": 0.025285585990017862, |
|
"acc_norm": 0.5358974358974359, |
|
"acc_norm_stderr": 0.025285585990017862 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.59, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.59, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.04793724854411018, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.04793724854411018 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.6388888888888888, |
|
"acc_stderr": 0.04643454608906275, |
|
"acc_norm": 0.6388888888888888, |
|
"acc_norm_stderr": 0.04643454608906275 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.39408866995073893, |
|
"acc_stderr": 0.03438157967036543, |
|
"acc_norm": 0.39408866995073893, |
|
"acc_norm_stderr": 0.03438157967036543 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.6258064516129033, |
|
"acc_stderr": 0.027528904299845683, |
|
"acc_norm": 0.6258064516129033, |
|
"acc_norm_stderr": 0.027528904299845683 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.8034188034188035, |
|
"acc_stderr": 0.02603538609895129, |
|
"acc_norm": 0.8034188034188035, |
|
"acc_norm_stderr": 0.02603538609895129 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.5509433962264151, |
|
"acc_stderr": 0.030612730713641092, |
|
"acc_norm": 0.5509433962264151, |
|
"acc_norm_stderr": 0.030612730713641092 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5727272727272728, |
|
"acc_stderr": 0.047381987035454834, |
|
"acc_norm": 0.5727272727272728, |
|
"acc_norm_stderr": 0.047381987035454834 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3592592592592593, |
|
"acc_stderr": 0.02925290592725198, |
|
"acc_norm": 0.3592592592592593, |
|
"acc_norm_stderr": 0.02925290592725198 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3443708609271523, |
|
"acc_stderr": 0.03879687024073327, |
|
"acc_norm": 0.3443708609271523, |
|
"acc_norm_stderr": 0.03879687024073327 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.7064676616915423, |
|
"acc_stderr": 0.032200241045342054, |
|
"acc_norm": 0.7064676616915423, |
|
"acc_norm_stderr": 0.032200241045342054 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4682080924855491, |
|
"acc_stderr": 0.03804749744364764, |
|
"acc_norm": 0.4682080924855491, |
|
"acc_norm_stderr": 0.03804749744364764 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.43915343915343913, |
|
"acc_stderr": 0.02555992055053101, |
|
"acc_norm": 0.43915343915343913, |
|
"acc_norm_stderr": 0.02555992055053101 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.5625, |
|
"acc_stderr": 0.04148415739394154, |
|
"acc_norm": 0.5625, |
|
"acc_norm_stderr": 0.04148415739394154 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.75, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.75, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5895953757225434, |
|
"acc_stderr": 0.02648339204209818, |
|
"acc_norm": 0.5895953757225434, |
|
"acc_norm_stderr": 0.02648339204209818 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.49079754601226994, |
|
"acc_stderr": 0.039277056007874414, |
|
"acc_norm": 0.49079754601226994, |
|
"acc_norm_stderr": 0.039277056007874414 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.6203703703703703, |
|
"acc_stderr": 0.027002521034516468, |
|
"acc_norm": 0.6203703703703703, |
|
"acc_norm_stderr": 0.027002521034516468 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.7202072538860104, |
|
"acc_stderr": 0.03239637046735703, |
|
"acc_norm": 0.7202072538860104, |
|
"acc_norm_stderr": 0.03239637046735703 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.45614035087719296, |
|
"acc_stderr": 0.046854730419077895, |
|
"acc_norm": 0.45614035087719296, |
|
"acc_norm_stderr": 0.046854730419077895 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.7119266055045872, |
|
"acc_stderr": 0.019416445892636025, |
|
"acc_norm": 0.7119266055045872, |
|
"acc_norm_stderr": 0.019416445892636025 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.044444444444444495, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.044444444444444495 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.5490196078431373, |
|
"acc_stderr": 0.02849199358617156, |
|
"acc_norm": 0.5490196078431373, |
|
"acc_norm_stderr": 0.02849199358617156 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.61, |
|
"acc_stderr": 0.049020713000019756, |
|
"acc_norm": 0.61, |
|
"acc_norm_stderr": 0.049020713000019756 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.743801652892562, |
|
"acc_stderr": 0.03984979653302871, |
|
"acc_norm": 0.743801652892562, |
|
"acc_norm_stderr": 0.03984979653302871 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5789473684210527, |
|
"acc_stderr": 0.040179012759817494, |
|
"acc_norm": 0.5789473684210527, |
|
"acc_norm_stderr": 0.040179012759817494 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.5098039215686274, |
|
"acc_stderr": 0.02022394600507431, |
|
"acc_norm": 0.5098039215686274, |
|
"acc_norm_stderr": 0.02022394600507431 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.41843971631205673, |
|
"acc_stderr": 0.02942799403941999, |
|
"acc_norm": 0.41843971631205673, |
|
"acc_norm_stderr": 0.02942799403941999 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.41964285714285715, |
|
"acc_stderr": 0.04684099321077106, |
|
"acc_norm": 0.41964285714285715, |
|
"acc_norm_stderr": 0.04684099321077106 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.5231481481481481, |
|
"acc_stderr": 0.034063153607115065, |
|
"acc_norm": 0.5231481481481481, |
|
"acc_norm_stderr": 0.034063153607115065 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.1877094972067039, |
|
"acc_stderr": 0.013059605303257065, |
|
"acc_norm": 0.1877094972067039, |
|
"acc_norm_stderr": 0.013059605303257065 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.69, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.69, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.5183823529411765, |
|
"acc_stderr": 0.03035230339535196, |
|
"acc_norm": 0.5183823529411765, |
|
"acc_norm_stderr": 0.03035230339535196 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5591836734693878, |
|
"acc_stderr": 0.03178419114175363, |
|
"acc_norm": 0.5591836734693878, |
|
"acc_norm_stderr": 0.03178419114175363 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.7679324894514767, |
|
"acc_stderr": 0.02747974455080851, |
|
"acc_norm": 0.7679324894514767, |
|
"acc_norm_stderr": 0.02747974455080851 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.41851368970013036, |
|
"acc_stderr": 0.01259950560833648, |
|
"acc_norm": 0.41851368970013036, |
|
"acc_norm_stderr": 0.01259950560833648 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.6519607843137255, |
|
"acc_stderr": 0.03343311240488418, |
|
"acc_norm": 0.6519607843137255, |
|
"acc_norm_stderr": 0.03343311240488418 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.6848484848484848, |
|
"acc_stderr": 0.0362773057502241, |
|
"acc_norm": 0.6848484848484848, |
|
"acc_norm_stderr": 0.0362773057502241 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.30966952264381886, |
|
"mc1_stderr": 0.0161857443551449, |
|
"mc2": 0.47119481512690015, |
|
"mc2_stderr": 0.0153427666032473 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.5749704840613932, |
|
"acc_stderr": 0.016996016308362887, |
|
"acc_norm": 0.6233766233766234, |
|
"acc_norm_stderr": 0.016658799874051985 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Megastudy/M-SOLAR-10.7B-v1.1-beta", |
|
"model_sha": "2bdd9a00217c3fdd9ec6f4d966e5383a529bd0f5", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |