|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.34897610921501704, |
|
"acc_stderr": 0.0139289334613825, |
|
"acc_norm": 0.38310580204778155, |
|
"acc_norm_stderr": 0.01420647266167288 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3973312089225254, |
|
"acc_stderr": 0.00488345518890897, |
|
"acc_norm": 0.518621788488349, |
|
"acc_norm_stderr": 0.004986319587524962 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.3508771929824561, |
|
"acc_stderr": 0.036602988340491624, |
|
"acc_norm": 0.3508771929824561, |
|
"acc_norm_stderr": 0.036602988340491624 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.2524271844660194, |
|
"acc_stderr": 0.04301250399690879, |
|
"acc_norm": 0.2524271844660194, |
|
"acc_norm_stderr": 0.04301250399690879 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.35759897828863346, |
|
"acc_stderr": 0.017139488998803288, |
|
"acc_norm": 0.35759897828863346, |
|
"acc_norm_stderr": 0.017139488998803288 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.362962962962963, |
|
"acc_stderr": 0.041539484047424, |
|
"acc_norm": 0.362962962962963, |
|
"acc_norm_stderr": 0.041539484047424 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.04560480215720683, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.04560480215720683 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.28936170212765955, |
|
"acc_stderr": 0.029644006577009618, |
|
"acc_norm": 0.28936170212765955, |
|
"acc_norm_stderr": 0.029644006577009618 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.25301204819277107, |
|
"acc_stderr": 0.033844291552331346, |
|
"acc_norm": 0.25301204819277107, |
|
"acc_norm_stderr": 0.033844291552331346 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3729903536977492, |
|
"acc_stderr": 0.027466610213140105, |
|
"acc_norm": 0.3729903536977492, |
|
"acc_norm_stderr": 0.027466610213140105 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.26905829596412556, |
|
"acc_stderr": 0.029763779406874975, |
|
"acc_norm": 0.26905829596412556, |
|
"acc_norm_stderr": 0.029763779406874975 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.33587786259541985, |
|
"acc_stderr": 0.04142313771996664, |
|
"acc_norm": 0.33587786259541985, |
|
"acc_norm_stderr": 0.04142313771996664 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.0347327959083696, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.0347327959083696 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.38620689655172413, |
|
"acc_stderr": 0.04057324734419036, |
|
"acc_norm": 0.38620689655172413, |
|
"acc_norm_stderr": 0.04057324734419036 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.18627450980392157, |
|
"acc_stderr": 0.038739587141493524, |
|
"acc_norm": 0.18627450980392157, |
|
"acc_norm_stderr": 0.038739587141493524 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.25210084033613445, |
|
"acc_stderr": 0.028205545033277723, |
|
"acc_norm": 0.25210084033613445, |
|
"acc_norm_stderr": 0.028205545033277723 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.23076923076923078, |
|
"acc_stderr": 0.021362027725222738, |
|
"acc_norm": 0.23076923076923078, |
|
"acc_norm_stderr": 0.021362027725222738 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252605, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252605 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.03942772444036623, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.03942772444036623 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.35185185185185186, |
|
"acc_stderr": 0.04616631111801713, |
|
"acc_norm": 0.35185185185185186, |
|
"acc_norm_stderr": 0.04616631111801713 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.270935960591133, |
|
"acc_stderr": 0.031270907132976984, |
|
"acc_norm": 0.270935960591133, |
|
"acc_norm_stderr": 0.031270907132976984 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.2806451612903226, |
|
"acc_stderr": 0.025560604721022877, |
|
"acc_norm": 0.2806451612903226, |
|
"acc_norm_stderr": 0.025560604721022877 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.3974358974358974, |
|
"acc_stderr": 0.03205953453789293, |
|
"acc_norm": 0.3974358974358974, |
|
"acc_norm_stderr": 0.03205953453789293 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.32452830188679244, |
|
"acc_stderr": 0.028815615713432118, |
|
"acc_norm": 0.32452830188679244, |
|
"acc_norm_stderr": 0.028815615713432118 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.2818181818181818, |
|
"acc_stderr": 0.04309118709946458, |
|
"acc_norm": 0.2818181818181818, |
|
"acc_norm_stderr": 0.04309118709946458 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.26666666666666666, |
|
"acc_stderr": 0.026962424325073838, |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.026962424325073838 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2781456953642384, |
|
"acc_stderr": 0.03658603262763743, |
|
"acc_norm": 0.2781456953642384, |
|
"acc_norm_stderr": 0.03658603262763743 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.3880597014925373, |
|
"acc_stderr": 0.034457899643627506, |
|
"acc_norm": 0.3880597014925373, |
|
"acc_norm_stderr": 0.034457899643627506 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.27167630057803466, |
|
"acc_stderr": 0.03391750322321658, |
|
"acc_norm": 0.27167630057803466, |
|
"acc_norm_stderr": 0.03391750322321658 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2671957671957672, |
|
"acc_stderr": 0.022789673145776578, |
|
"acc_norm": 0.2671957671957672, |
|
"acc_norm_stderr": 0.022789673145776578 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.037455547914624576, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.037455547914624576 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.16, |
|
"acc_stderr": 0.0368452949177471, |
|
"acc_norm": 0.16, |
|
"acc_norm_stderr": 0.0368452949177471 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.047609522856952365, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.047609522856952365 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.33815028901734107, |
|
"acc_stderr": 0.02546977014940017, |
|
"acc_norm": 0.33815028901734107, |
|
"acc_norm_stderr": 0.02546977014940017 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.2392638036809816, |
|
"acc_stderr": 0.03351953879521269, |
|
"acc_norm": 0.2392638036809816, |
|
"acc_norm_stderr": 0.03351953879521269 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.3549382716049383, |
|
"acc_stderr": 0.026624152478845853, |
|
"acc_norm": 0.3549382716049383, |
|
"acc_norm_stderr": 0.026624152478845853 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.23316062176165803, |
|
"acc_stderr": 0.03051611137147601, |
|
"acc_norm": 0.23316062176165803, |
|
"acc_norm_stderr": 0.03051611137147601 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.039994238792813344, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.039994238792813344 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3211009174311927, |
|
"acc_stderr": 0.020018149772733747, |
|
"acc_norm": 0.3211009174311927, |
|
"acc_norm_stderr": 0.020018149772733747 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.18253968253968253, |
|
"acc_stderr": 0.03455071019102149, |
|
"acc_norm": 0.18253968253968253, |
|
"acc_norm_stderr": 0.03455071019102149 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3235294117647059, |
|
"acc_stderr": 0.026787453111906532, |
|
"acc_norm": 0.3235294117647059, |
|
"acc_norm_stderr": 0.026787453111906532 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768079, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768079 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.4628099173553719, |
|
"acc_stderr": 0.045517111961042175, |
|
"acc_norm": 0.4628099173553719, |
|
"acc_norm_stderr": 0.045517111961042175 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3355263157894737, |
|
"acc_stderr": 0.03842498559395268, |
|
"acc_norm": 0.3355263157894737, |
|
"acc_norm_stderr": 0.03842498559395268 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3006535947712418, |
|
"acc_stderr": 0.018550634502952964, |
|
"acc_norm": 0.3006535947712418, |
|
"acc_norm_stderr": 0.018550634502952964 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2730496453900709, |
|
"acc_stderr": 0.026577860943307854, |
|
"acc_norm": 0.2730496453900709, |
|
"acc_norm_stderr": 0.026577860943307854 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.22321428571428573, |
|
"acc_stderr": 0.03952301967702511, |
|
"acc_norm": 0.22321428571428573, |
|
"acc_norm_stderr": 0.03952301967702511 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.25462962962962965, |
|
"acc_stderr": 0.02971127586000534, |
|
"acc_norm": 0.25462962962962965, |
|
"acc_norm_stderr": 0.02971127586000534 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.25251396648044694, |
|
"acc_stderr": 0.014530330201468638, |
|
"acc_norm": 0.25251396648044694, |
|
"acc_norm_stderr": 0.014530330201468638 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.27941176470588236, |
|
"acc_stderr": 0.027257202606114948, |
|
"acc_norm": 0.27941176470588236, |
|
"acc_norm_stderr": 0.027257202606114948 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.2816326530612245, |
|
"acc_stderr": 0.0287951855742913, |
|
"acc_norm": 0.2816326530612245, |
|
"acc_norm_stderr": 0.0287951855742913 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.35864978902953587, |
|
"acc_stderr": 0.031219569445301843, |
|
"acc_norm": 0.35864978902953587, |
|
"acc_norm_stderr": 0.031219569445301843 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2907431551499348, |
|
"acc_stderr": 0.011598062372851981, |
|
"acc_norm": 0.2907431551499348, |
|
"acc_norm_stderr": 0.011598062372851981 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.25980392156862747, |
|
"acc_stderr": 0.030778554678693268, |
|
"acc_norm": 0.25980392156862747, |
|
"acc_norm_stderr": 0.030778554678693268 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.28484848484848485, |
|
"acc_stderr": 0.03524390844511784, |
|
"acc_norm": 0.28484848484848485, |
|
"acc_norm_stderr": 0.03524390844511784 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.24112607099143207, |
|
"mc1_stderr": 0.014974827279752329, |
|
"mc2": 0.3762518297834469, |
|
"mc2_stderr": 0.015197001689915996 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.31759149940968123, |
|
"acc_stderr": 0.016005581876229306, |
|
"acc_norm": 0.3990554899645809, |
|
"acc_norm_stderr": 0.0168363772928493 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "BM-K/llama-2-ko-7b-it-v1.0.0", |
|
"model_sha": "d77fd44b31382f84fa4b8b9afd63a92ded7bde93", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |