|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.37627986348122866, |
|
"acc_stderr": 0.014157022555407163, |
|
"acc_norm": 0.4274744027303754, |
|
"acc_norm_stderr": 0.014456862944650649 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.40310695080661224, |
|
"acc_stderr": 0.0048951941438926784, |
|
"acc_norm": 0.536247759410476, |
|
"acc_norm_stderr": 0.004976651989757641 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5321637426900585, |
|
"acc_stderr": 0.03826882417660368, |
|
"acc_norm": 0.5321637426900585, |
|
"acc_norm_stderr": 0.03826882417660368 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.47572815533980584, |
|
"acc_stderr": 0.049449010929737795, |
|
"acc_norm": 0.47572815533980584, |
|
"acc_norm_stderr": 0.049449010929737795 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5006385696040868, |
|
"acc_stderr": 0.01787994891443169, |
|
"acc_norm": 0.5006385696040868, |
|
"acc_norm_stderr": 0.01787994891443169 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4074074074074074, |
|
"acc_stderr": 0.04244633238353229, |
|
"acc_norm": 0.4074074074074074, |
|
"acc_norm_stderr": 0.04244633238353229 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.35319148936170214, |
|
"acc_stderr": 0.03124532520276193, |
|
"acc_norm": 0.35319148936170214, |
|
"acc_norm_stderr": 0.03124532520276193 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3674698795180723, |
|
"acc_stderr": 0.03753267402120574, |
|
"acc_norm": 0.3674698795180723, |
|
"acc_norm_stderr": 0.03753267402120574 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.43086816720257237, |
|
"acc_stderr": 0.02812534098397271, |
|
"acc_norm": 0.43086816720257237, |
|
"acc_norm_stderr": 0.02812534098397271 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4170403587443946, |
|
"acc_stderr": 0.03309266936071721, |
|
"acc_norm": 0.4170403587443946, |
|
"acc_norm_stderr": 0.03309266936071721 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.48854961832061067, |
|
"acc_stderr": 0.043841400240780176, |
|
"acc_norm": 0.48854961832061067, |
|
"acc_norm_stderr": 0.043841400240780176 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.4797979797979798, |
|
"acc_stderr": 0.03559443565563919, |
|
"acc_norm": 0.4797979797979798, |
|
"acc_norm_stderr": 0.03559443565563919 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4482758620689655, |
|
"acc_stderr": 0.04144311810878151, |
|
"acc_norm": 0.4482758620689655, |
|
"acc_norm_stderr": 0.04144311810878151 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.04092563958237655, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.04092563958237655 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.42857142857142855, |
|
"acc_stderr": 0.032145368597886394, |
|
"acc_norm": 0.42857142857142855, |
|
"acc_norm_stderr": 0.032145368597886394 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4307692307692308, |
|
"acc_stderr": 0.02510682066053975, |
|
"acc_norm": 0.4307692307692308, |
|
"acc_norm_stderr": 0.02510682066053975 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620333, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620333 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.04833682445228318, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.04833682445228318 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.35960591133004927, |
|
"acc_stderr": 0.03376458246509567, |
|
"acc_norm": 0.35960591133004927, |
|
"acc_norm_stderr": 0.03376458246509567 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4258064516129032, |
|
"acc_stderr": 0.028129112709165894, |
|
"acc_norm": 0.4258064516129032, |
|
"acc_norm_stderr": 0.028129112709165894 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6239316239316239, |
|
"acc_stderr": 0.03173393632969481, |
|
"acc_norm": 0.6239316239316239, |
|
"acc_norm_stderr": 0.03173393632969481 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.43018867924528303, |
|
"acc_stderr": 0.030471445867183235, |
|
"acc_norm": 0.43018867924528303, |
|
"acc_norm_stderr": 0.030471445867183235 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4636363636363636, |
|
"acc_stderr": 0.04776449162396197, |
|
"acc_norm": 0.4636363636363636, |
|
"acc_norm_stderr": 0.04776449162396197 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2740740740740741, |
|
"acc_stderr": 0.027195934804085626, |
|
"acc_norm": 0.2740740740740741, |
|
"acc_norm_stderr": 0.027195934804085626 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.271523178807947, |
|
"acc_stderr": 0.03631329803969654, |
|
"acc_norm": 0.271523178807947, |
|
"acc_norm_stderr": 0.03631329803969654 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5572139303482587, |
|
"acc_stderr": 0.03512310964123936, |
|
"acc_norm": 0.5572139303482587, |
|
"acc_norm_stderr": 0.03512310964123936 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.34104046242774566, |
|
"acc_stderr": 0.03614665424180826, |
|
"acc_norm": 0.34104046242774566, |
|
"acc_norm_stderr": 0.03614665424180826 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.30423280423280424, |
|
"acc_stderr": 0.023695415009463087, |
|
"acc_norm": 0.30423280423280424, |
|
"acc_norm_stderr": 0.023695415009463087 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3263888888888889, |
|
"acc_stderr": 0.03921067198982266, |
|
"acc_norm": 0.3263888888888889, |
|
"acc_norm_stderr": 0.03921067198982266 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.63, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.63, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.4277456647398844, |
|
"acc_stderr": 0.026636539741116076, |
|
"acc_norm": 0.4277456647398844, |
|
"acc_norm_stderr": 0.026636539741116076 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.37423312883435583, |
|
"acc_stderr": 0.03802068102899616, |
|
"acc_norm": 0.37423312883435583, |
|
"acc_norm_stderr": 0.03802068102899616 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.44135802469135804, |
|
"acc_stderr": 0.02762873715566878, |
|
"acc_norm": 0.44135802469135804, |
|
"acc_norm_stderr": 0.02762873715566878 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.44041450777202074, |
|
"acc_stderr": 0.035827245300360945, |
|
"acc_norm": 0.44041450777202074, |
|
"acc_norm_stderr": 0.035827245300360945 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.22807017543859648, |
|
"acc_stderr": 0.03947152782669415, |
|
"acc_norm": 0.22807017543859648, |
|
"acc_norm_stderr": 0.03947152782669415 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.42385321100917434, |
|
"acc_stderr": 0.021187263209087533, |
|
"acc_norm": 0.42385321100917434, |
|
"acc_norm_stderr": 0.021187263209087533 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2619047619047619, |
|
"acc_stderr": 0.039325376803928704, |
|
"acc_norm": 0.2619047619047619, |
|
"acc_norm_stderr": 0.039325376803928704 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.40522875816993464, |
|
"acc_stderr": 0.02811092849280907, |
|
"acc_norm": 0.40522875816993464, |
|
"acc_norm_stderr": 0.02811092849280907 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5619834710743802, |
|
"acc_stderr": 0.04529146804435792, |
|
"acc_norm": 0.5619834710743802, |
|
"acc_norm_stderr": 0.04529146804435792 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4407894736842105, |
|
"acc_stderr": 0.04040311062490436, |
|
"acc_norm": 0.4407894736842105, |
|
"acc_norm_stderr": 0.04040311062490436 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.32516339869281047, |
|
"acc_stderr": 0.01895088677080631, |
|
"acc_norm": 0.32516339869281047, |
|
"acc_norm_stderr": 0.01895088677080631 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.31560283687943264, |
|
"acc_stderr": 0.027724989449509317, |
|
"acc_norm": 0.31560283687943264, |
|
"acc_norm_stderr": 0.027724989449509317 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.21428571428571427, |
|
"acc_stderr": 0.03894641120044793, |
|
"acc_norm": 0.21428571428571427, |
|
"acc_norm_stderr": 0.03894641120044793 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.28703703703703703, |
|
"acc_stderr": 0.03085199299325701, |
|
"acc_norm": 0.28703703703703703, |
|
"acc_norm_stderr": 0.03085199299325701 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24022346368715083, |
|
"acc_stderr": 0.014288343803925295, |
|
"acc_norm": 0.24022346368715083, |
|
"acc_norm_stderr": 0.014288343803925295 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.04975698519562428, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.04975698519562428 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.23161764705882354, |
|
"acc_stderr": 0.025626533803777562, |
|
"acc_norm": 0.23161764705882354, |
|
"acc_norm_stderr": 0.025626533803777562 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.4489795918367347, |
|
"acc_stderr": 0.03184213866687579, |
|
"acc_norm": 0.4489795918367347, |
|
"acc_norm_stderr": 0.03184213866687579 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.46835443037974683, |
|
"acc_stderr": 0.03248197400511075, |
|
"acc_norm": 0.46835443037974683, |
|
"acc_norm_stderr": 0.03248197400511075 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2920469361147327, |
|
"acc_stderr": 0.011613349136271824, |
|
"acc_norm": 0.2920469361147327, |
|
"acc_norm_stderr": 0.011613349136271824 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.39705882352941174, |
|
"acc_stderr": 0.03434131164719129, |
|
"acc_norm": 0.39705882352941174, |
|
"acc_norm_stderr": 0.03434131164719129 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.47878787878787876, |
|
"acc_stderr": 0.03900828913737301, |
|
"acc_norm": 0.47878787878787876, |
|
"acc_norm_stderr": 0.03900828913737301 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2864137086903305, |
|
"mc1_stderr": 0.015826142439502342, |
|
"mc2": 0.45610675413247587, |
|
"mc2_stderr": 0.01508637089874796 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.41440377804014167, |
|
"acc_stderr": 0.016936583383943625, |
|
"acc_norm": 0.45454545454545453, |
|
"acc_norm_stderr": 0.017119172208061504 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "cepiloth/ko-en-llama2-13b-finetune", |
|
"model_sha": "966347fa24706fb7265c1967e3212504ad0f32da", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |