|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.47696245733788395, |
|
"acc_stderr": 0.014595873205358269, |
|
"acc_norm": 0.5324232081911263, |
|
"acc_norm_stderr": 0.014580637569995426 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.46773551085441145, |
|
"acc_stderr": 0.004979381876712608, |
|
"acc_norm": 0.6227843059151563, |
|
"acc_norm_stderr": 0.004836990373261561 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5263157894736842, |
|
"acc_stderr": 0.03829509868994727, |
|
"acc_norm": 0.5263157894736842, |
|
"acc_norm_stderr": 0.03829509868994727 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.47572815533980584, |
|
"acc_stderr": 0.049449010929737795, |
|
"acc_norm": 0.47572815533980584, |
|
"acc_norm_stderr": 0.049449010929737795 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5632183908045977, |
|
"acc_stderr": 0.017736470837800694, |
|
"acc_norm": 0.5632183908045977, |
|
"acc_norm_stderr": 0.017736470837800694 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4740740740740741, |
|
"acc_stderr": 0.04313531696750574, |
|
"acc_norm": 0.4740740740740741, |
|
"acc_norm_stderr": 0.04313531696750574 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768077, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768077 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.40425531914893614, |
|
"acc_stderr": 0.03208115750788684, |
|
"acc_norm": 0.40425531914893614, |
|
"acc_norm_stderr": 0.03208115750788684 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3855421686746988, |
|
"acc_stderr": 0.03789134424611548, |
|
"acc_norm": 0.3855421686746988, |
|
"acc_norm_stderr": 0.03789134424611548 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5176848874598071, |
|
"acc_stderr": 0.02838032284907713, |
|
"acc_norm": 0.5176848874598071, |
|
"acc_norm_stderr": 0.02838032284907713 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5291479820627802, |
|
"acc_stderr": 0.03350073248773404, |
|
"acc_norm": 0.5291479820627802, |
|
"acc_norm_stderr": 0.03350073248773404 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.44274809160305345, |
|
"acc_stderr": 0.043564472026650695, |
|
"acc_norm": 0.44274809160305345, |
|
"acc_norm_stderr": 0.043564472026650695 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5959595959595959, |
|
"acc_stderr": 0.03496130972056126, |
|
"acc_norm": 0.5959595959595959, |
|
"acc_norm_stderr": 0.03496130972056126 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4206896551724138, |
|
"acc_stderr": 0.0411391498118926, |
|
"acc_norm": 0.4206896551724138, |
|
"acc_norm_stderr": 0.0411391498118926 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.04220773659171453, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.04220773659171453 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.5084033613445378, |
|
"acc_stderr": 0.0324739027656967, |
|
"acc_norm": 0.5084033613445378, |
|
"acc_norm_stderr": 0.0324739027656967 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4564102564102564, |
|
"acc_stderr": 0.02525448542479961, |
|
"acc_norm": 0.4564102564102564, |
|
"acc_norm_stderr": 0.02525448542479961 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.51, |
|
"acc_stderr": 0.05024183937956912, |
|
"acc_norm": 0.51, |
|
"acc_norm_stderr": 0.05024183937956912 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.48148148148148145, |
|
"acc_stderr": 0.04830366024635331, |
|
"acc_norm": 0.48148148148148145, |
|
"acc_norm_stderr": 0.04830366024635331 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3842364532019704, |
|
"acc_stderr": 0.034223985656575515, |
|
"acc_norm": 0.3842364532019704, |
|
"acc_norm_stderr": 0.034223985656575515 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4612903225806452, |
|
"acc_stderr": 0.028358634859836925, |
|
"acc_norm": 0.4612903225806452, |
|
"acc_norm_stderr": 0.028358634859836925 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6452991452991453, |
|
"acc_stderr": 0.031342504862454025, |
|
"acc_norm": 0.6452991452991453, |
|
"acc_norm_stderr": 0.031342504862454025 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.46037735849056605, |
|
"acc_stderr": 0.03067609659938918, |
|
"acc_norm": 0.46037735849056605, |
|
"acc_norm_stderr": 0.03067609659938918 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5818181818181818, |
|
"acc_stderr": 0.04724577405731572, |
|
"acc_norm": 0.5818181818181818, |
|
"acc_norm_stderr": 0.04724577405731572 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.24814814814814815, |
|
"acc_stderr": 0.0263357394040558, |
|
"acc_norm": 0.24814814814814815, |
|
"acc_norm_stderr": 0.0263357394040558 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2781456953642384, |
|
"acc_stderr": 0.036586032627637426, |
|
"acc_norm": 0.2781456953642384, |
|
"acc_norm_stderr": 0.036586032627637426 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5920398009950248, |
|
"acc_stderr": 0.03475116365194092, |
|
"acc_norm": 0.5920398009950248, |
|
"acc_norm_stderr": 0.03475116365194092 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.42196531791907516, |
|
"acc_stderr": 0.0376574669386515, |
|
"acc_norm": 0.42196531791907516, |
|
"acc_norm_stderr": 0.0376574669386515 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.30687830687830686, |
|
"acc_stderr": 0.023752928712112147, |
|
"acc_norm": 0.30687830687830686, |
|
"acc_norm_stderr": 0.023752928712112147 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3958333333333333, |
|
"acc_stderr": 0.04089465449325583, |
|
"acc_norm": 0.3958333333333333, |
|
"acc_norm_stderr": 0.04089465449325583 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.04923659639173309, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.04923659639173309 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5144508670520231, |
|
"acc_stderr": 0.026907849856282542, |
|
"acc_norm": 0.5144508670520231, |
|
"acc_norm_stderr": 0.026907849856282542 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5398773006134969, |
|
"acc_stderr": 0.03915857291436972, |
|
"acc_norm": 0.5398773006134969, |
|
"acc_norm_stderr": 0.03915857291436972 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.49074074074074076, |
|
"acc_stderr": 0.027815973433878014, |
|
"acc_norm": 0.49074074074074076, |
|
"acc_norm_stderr": 0.027815973433878014 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.044084400227680794, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.044084400227680794 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.6062176165803109, |
|
"acc_stderr": 0.035260770955482405, |
|
"acc_norm": 0.6062176165803109, |
|
"acc_norm_stderr": 0.035260770955482405 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.21929824561403508, |
|
"acc_stderr": 0.03892431106518754, |
|
"acc_norm": 0.21929824561403508, |
|
"acc_norm_stderr": 0.03892431106518754 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.6055045871559633, |
|
"acc_stderr": 0.020954642108587506, |
|
"acc_norm": 0.6055045871559633, |
|
"acc_norm_stderr": 0.020954642108587506 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.31746031746031744, |
|
"acc_stderr": 0.04163453031302859, |
|
"acc_norm": 0.31746031746031744, |
|
"acc_norm_stderr": 0.04163453031302859 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4150326797385621, |
|
"acc_stderr": 0.028213504177824093, |
|
"acc_norm": 0.4150326797385621, |
|
"acc_norm_stderr": 0.028213504177824093 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6198347107438017, |
|
"acc_stderr": 0.04431324501968431, |
|
"acc_norm": 0.6198347107438017, |
|
"acc_norm_stderr": 0.04431324501968431 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3881578947368421, |
|
"acc_stderr": 0.03965842097512744, |
|
"acc_norm": 0.3881578947368421, |
|
"acc_norm_stderr": 0.03965842097512744 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.41013071895424835, |
|
"acc_stderr": 0.01989841271763589, |
|
"acc_norm": 0.41013071895424835, |
|
"acc_norm_stderr": 0.01989841271763589 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.32269503546099293, |
|
"acc_stderr": 0.02788913930053478, |
|
"acc_norm": 0.32269503546099293, |
|
"acc_norm_stderr": 0.02788913930053478 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.2767857142857143, |
|
"acc_stderr": 0.042466243366976235, |
|
"acc_norm": 0.2767857142857143, |
|
"acc_norm_stderr": 0.042466243366976235 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3287037037037037, |
|
"acc_stderr": 0.03203614084670058, |
|
"acc_norm": 0.3287037037037037, |
|
"acc_norm_stderr": 0.03203614084670058 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.26927374301675977, |
|
"acc_stderr": 0.014835616582882618, |
|
"acc_norm": 0.26927374301675977, |
|
"acc_norm_stderr": 0.014835616582882618 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.04878317312145632, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.04878317312145632 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.39338235294117646, |
|
"acc_stderr": 0.02967428828131117, |
|
"acc_norm": 0.39338235294117646, |
|
"acc_norm_stderr": 0.02967428828131117 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.46938775510204084, |
|
"acc_stderr": 0.031949171367580624, |
|
"acc_norm": 0.46938775510204084, |
|
"acc_norm_stderr": 0.031949171367580624 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6455696202531646, |
|
"acc_stderr": 0.0311373042971858, |
|
"acc_norm": 0.6455696202531646, |
|
"acc_norm_stderr": 0.0311373042971858 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.37222946544980445, |
|
"acc_stderr": 0.012346241297204366, |
|
"acc_norm": 0.37222946544980445, |
|
"acc_norm_stderr": 0.012346241297204366 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5196078431372549, |
|
"acc_stderr": 0.03506612560524866, |
|
"acc_norm": 0.5196078431372549, |
|
"acc_norm_stderr": 0.03506612560524866 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5515151515151515, |
|
"acc_stderr": 0.038835659779569286, |
|
"acc_norm": 0.5515151515151515, |
|
"acc_norm_stderr": 0.038835659779569286 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3292533659730722, |
|
"mc1_stderr": 0.01645126444006824, |
|
"mc2": 0.4865420269226251, |
|
"mc2_stderr": 0.016014497778680654 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4675324675324675, |
|
"acc_stderr": 0.01715407371668286, |
|
"acc_norm": 0.5029515938606848, |
|
"acc_norm_stderr": 0.017190054580194694 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "HumanF-MarkrAI/COKAL-DPO-13b-v3", |
|
"model_sha": "64a95028cd730b0453dba44259b776a455f86049", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |