|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.31569965870307165, |
|
"acc_stderr": 0.013582571095815291, |
|
"acc_norm": 0.3660409556313993, |
|
"acc_norm_stderr": 0.01407722310847014 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3599880501892053, |
|
"acc_stderr": 0.004790155370993447, |
|
"acc_norm": 0.45956980681139215, |
|
"acc_norm_stderr": 0.004973442060741622 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.43859649122807015, |
|
"acc_stderr": 0.038057975055904594, |
|
"acc_norm": 0.43859649122807015, |
|
"acc_norm_stderr": 0.038057975055904594 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.4368932038834951, |
|
"acc_stderr": 0.04911147107365777, |
|
"acc_norm": 0.4368932038834951, |
|
"acc_norm_stderr": 0.04911147107365777 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.48659003831417624, |
|
"acc_stderr": 0.017873531736510385, |
|
"acc_norm": 0.48659003831417624, |
|
"acc_norm_stderr": 0.017873531736510385 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.37037037037037035, |
|
"acc_stderr": 0.041716541613545426, |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.041716541613545426 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.37872340425531914, |
|
"acc_stderr": 0.03170995606040655, |
|
"acc_norm": 0.37872340425531914, |
|
"acc_norm_stderr": 0.03170995606040655 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3433734939759036, |
|
"acc_stderr": 0.03696584317010602, |
|
"acc_norm": 0.3433734939759036, |
|
"acc_norm_stderr": 0.03696584317010602 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4565916398713826, |
|
"acc_stderr": 0.028290869054197598, |
|
"acc_norm": 0.4565916398713826, |
|
"acc_norm_stderr": 0.028290869054197598 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.3721973094170404, |
|
"acc_stderr": 0.03244305283008732, |
|
"acc_norm": 0.3721973094170404, |
|
"acc_norm_stderr": 0.03244305283008732 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4580152671755725, |
|
"acc_stderr": 0.04369802690578757, |
|
"acc_norm": 0.4580152671755725, |
|
"acc_norm_stderr": 0.04369802690578757 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.4595959595959596, |
|
"acc_stderr": 0.035507024651313425, |
|
"acc_norm": 0.4595959595959596, |
|
"acc_norm_stderr": 0.035507024651313425 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3931034482758621, |
|
"acc_stderr": 0.040703290137070705, |
|
"acc_norm": 0.3931034482758621, |
|
"acc_norm_stderr": 0.040703290137070705 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.04092563958237655, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.04092563958237655 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.38235294117647056, |
|
"acc_stderr": 0.031566630992154156, |
|
"acc_norm": 0.38235294117647056, |
|
"acc_norm_stderr": 0.031566630992154156 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.38461538461538464, |
|
"acc_stderr": 0.02466674491518724, |
|
"acc_norm": 0.38461538461538464, |
|
"acc_norm_stderr": 0.02466674491518724 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956911, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956911 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.46296296296296297, |
|
"acc_stderr": 0.04820403072760628, |
|
"acc_norm": 0.46296296296296297, |
|
"acc_norm_stderr": 0.04820403072760628 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3694581280788177, |
|
"acc_stderr": 0.03395970381998574, |
|
"acc_norm": 0.3694581280788177, |
|
"acc_norm_stderr": 0.03395970381998574 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4483870967741935, |
|
"acc_stderr": 0.028292056830112735, |
|
"acc_norm": 0.4483870967741935, |
|
"acc_norm_stderr": 0.028292056830112735 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.594017094017094, |
|
"acc_stderr": 0.03217180182641086, |
|
"acc_norm": 0.594017094017094, |
|
"acc_norm_stderr": 0.03217180182641086 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.3886792452830189, |
|
"acc_stderr": 0.030000485448675986, |
|
"acc_norm": 0.3886792452830189, |
|
"acc_norm_stderr": 0.030000485448675986 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4909090909090909, |
|
"acc_stderr": 0.0478833976870286, |
|
"acc_norm": 0.4909090909090909, |
|
"acc_norm_stderr": 0.0478833976870286 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.24074074074074073, |
|
"acc_stderr": 0.02606715922227578, |
|
"acc_norm": 0.24074074074074073, |
|
"acc_norm_stderr": 0.02606715922227578 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.25165562913907286, |
|
"acc_stderr": 0.03543304234389985, |
|
"acc_norm": 0.25165562913907286, |
|
"acc_norm_stderr": 0.03543304234389985 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.572139303482587, |
|
"acc_stderr": 0.03498541988407795, |
|
"acc_norm": 0.572139303482587, |
|
"acc_norm_stderr": 0.03498541988407795 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.32947976878612717, |
|
"acc_stderr": 0.03583901754736411, |
|
"acc_norm": 0.32947976878612717, |
|
"acc_norm_stderr": 0.03583901754736411 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2751322751322751, |
|
"acc_stderr": 0.023000086859068642, |
|
"acc_norm": 0.2751322751322751, |
|
"acc_norm_stderr": 0.023000086859068642 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3541666666666667, |
|
"acc_stderr": 0.03999411135753543, |
|
"acc_norm": 0.3541666666666667, |
|
"acc_norm_stderr": 0.03999411135753543 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.047937248544110196, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.047937248544110196 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.57, |
|
"acc_stderr": 0.04975698519562427, |
|
"acc_norm": 0.57, |
|
"acc_norm_stderr": 0.04975698519562427 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.43641618497109824, |
|
"acc_stderr": 0.026700545424943684, |
|
"acc_norm": 0.43641618497109824, |
|
"acc_norm_stderr": 0.026700545424943684 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.36809815950920244, |
|
"acc_stderr": 0.03789213935838396, |
|
"acc_norm": 0.36809815950920244, |
|
"acc_norm_stderr": 0.03789213935838396 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.404320987654321, |
|
"acc_stderr": 0.027306625297327688, |
|
"acc_norm": 0.404320987654321, |
|
"acc_norm_stderr": 0.027306625297327688 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421276, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421276 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.47150259067357514, |
|
"acc_stderr": 0.03602573571288442, |
|
"acc_norm": 0.47150259067357514, |
|
"acc_norm_stderr": 0.03602573571288442 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.3157894736842105, |
|
"acc_stderr": 0.04372748290278009, |
|
"acc_norm": 0.3157894736842105, |
|
"acc_norm_stderr": 0.04372748290278009 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.42935779816513764, |
|
"acc_stderr": 0.021222286397236518, |
|
"acc_norm": 0.42935779816513764, |
|
"acc_norm_stderr": 0.021222286397236518 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2698412698412698, |
|
"acc_stderr": 0.03970158273235173, |
|
"acc_norm": 0.2698412698412698, |
|
"acc_norm_stderr": 0.03970158273235173 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.028452639985088006, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.028452639985088006 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.628099173553719, |
|
"acc_stderr": 0.044120158066245044, |
|
"acc_norm": 0.628099173553719, |
|
"acc_norm_stderr": 0.044120158066245044 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4473684210526316, |
|
"acc_stderr": 0.04046336883978251, |
|
"acc_norm": 0.4473684210526316, |
|
"acc_norm_stderr": 0.04046336883978251 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3300653594771242, |
|
"acc_stderr": 0.019023726160724553, |
|
"acc_norm": 0.3300653594771242, |
|
"acc_norm_stderr": 0.019023726160724553 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3120567375886525, |
|
"acc_stderr": 0.027640120545169938, |
|
"acc_norm": 0.3120567375886525, |
|
"acc_norm_stderr": 0.027640120545169938 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04109974682633932, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04109974682633932 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.30092592592592593, |
|
"acc_stderr": 0.031280390843298825, |
|
"acc_norm": 0.30092592592592593, |
|
"acc_norm_stderr": 0.031280390843298825 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.33088235294117646, |
|
"acc_stderr": 0.02858270975389843, |
|
"acc_norm": 0.33088235294117646, |
|
"acc_norm_stderr": 0.02858270975389843 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.3673469387755102, |
|
"acc_stderr": 0.030862144921087565, |
|
"acc_norm": 0.3673469387755102, |
|
"acc_norm_stderr": 0.030862144921087565 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.4641350210970464, |
|
"acc_stderr": 0.03246338898055659, |
|
"acc_norm": 0.4641350210970464, |
|
"acc_norm_stderr": 0.03246338898055659 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.29726205997392435, |
|
"acc_stderr": 0.011673346173086034, |
|
"acc_norm": 0.29726205997392435, |
|
"acc_norm_stderr": 0.011673346173086034 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.35784313725490197, |
|
"acc_stderr": 0.033644872860882996, |
|
"acc_norm": 0.35784313725490197, |
|
"acc_norm_stderr": 0.033644872860882996 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.43636363636363634, |
|
"acc_stderr": 0.03872592983524754, |
|
"acc_norm": 0.43636363636363634, |
|
"acc_norm_stderr": 0.03872592983524754 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2741738066095471, |
|
"mc1_stderr": 0.015616518497219374, |
|
"mc2": 0.4349357238291092, |
|
"mc2_stderr": 0.015145789899523338 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.42502951593860683, |
|
"acc_stderr": 0.016996016308362887, |
|
"acc_norm": 0.5395513577331759, |
|
"acc_norm_stderr": 0.017136487626049846 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "AIdenU/LLAMA-2-13b-koen-Y24_v1.0", |
|
"model_sha": "29322c0eaa54ff261284806e15aba5ecb93edcab", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |