|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3302047781569966, |
|
"acc_stderr": 0.013743085603760424, |
|
"acc_norm": 0.37627986348122866, |
|
"acc_norm_stderr": 0.014157022555407175 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3580959968133838, |
|
"acc_stderr": 0.004784607222774628, |
|
"acc_norm": 0.448814977096196, |
|
"acc_norm_stderr": 0.004963567029129058 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.03811079669833531, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.03811079669833531 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5048543689320388, |
|
"acc_stderr": 0.04950504382128921, |
|
"acc_norm": 0.5048543689320388, |
|
"acc_norm_stderr": 0.04950504382128921 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.4521072796934866, |
|
"acc_stderr": 0.017797751493865626, |
|
"acc_norm": 0.4521072796934866, |
|
"acc_norm_stderr": 0.017797751493865626 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.32592592592592595, |
|
"acc_stderr": 0.040491220417025055, |
|
"acc_norm": 0.32592592592592595, |
|
"acc_norm_stderr": 0.040491220417025055 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816506, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816506 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.34893617021276596, |
|
"acc_stderr": 0.031158522131357766, |
|
"acc_norm": 0.34893617021276596, |
|
"acc_norm_stderr": 0.031158522131357766 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3855421686746988, |
|
"acc_stderr": 0.037891344246115496, |
|
"acc_norm": 0.3855421686746988, |
|
"acc_norm_stderr": 0.037891344246115496 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3890675241157556, |
|
"acc_stderr": 0.027690337536485376, |
|
"acc_norm": 0.3890675241157556, |
|
"acc_norm_stderr": 0.027690337536485376 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4170403587443946, |
|
"acc_stderr": 0.03309266936071721, |
|
"acc_norm": 0.4170403587443946, |
|
"acc_norm_stderr": 0.03309266936071721 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.37404580152671757, |
|
"acc_stderr": 0.04243869242230524, |
|
"acc_norm": 0.37404580152671757, |
|
"acc_norm_stderr": 0.04243869242230524 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939098, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939098 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.4494949494949495, |
|
"acc_stderr": 0.03544132491947969, |
|
"acc_norm": 0.4494949494949495, |
|
"acc_norm_stderr": 0.03544132491947969 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4413793103448276, |
|
"acc_stderr": 0.04137931034482758, |
|
"acc_norm": 0.4413793103448276, |
|
"acc_norm_stderr": 0.04137931034482758 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.20588235294117646, |
|
"acc_stderr": 0.04023382273617749, |
|
"acc_norm": 0.20588235294117646, |
|
"acc_norm_stderr": 0.04023382273617749 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.37815126050420167, |
|
"acc_stderr": 0.031499305777849054, |
|
"acc_norm": 0.37815126050420167, |
|
"acc_norm_stderr": 0.031499305777849054 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.40512820512820513, |
|
"acc_stderr": 0.024890471769938152, |
|
"acc_norm": 0.40512820512820513, |
|
"acc_norm_stderr": 0.024890471769938152 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.52, |
|
"acc_stderr": 0.05021167315686779, |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.05021167315686779 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.23, |
|
"acc_stderr": 0.04229525846816505, |
|
"acc_norm": 0.23, |
|
"acc_norm_stderr": 0.04229525846816505 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.42592592592592593, |
|
"acc_stderr": 0.0478034362693679, |
|
"acc_norm": 0.42592592592592593, |
|
"acc_norm_stderr": 0.0478034362693679 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3497536945812808, |
|
"acc_stderr": 0.033554009049695646, |
|
"acc_norm": 0.3497536945812808, |
|
"acc_norm_stderr": 0.033554009049695646 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4064516129032258, |
|
"acc_stderr": 0.027941727346256315, |
|
"acc_norm": 0.4064516129032258, |
|
"acc_norm_stderr": 0.027941727346256315 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6581196581196581, |
|
"acc_stderr": 0.031075028526507748, |
|
"acc_norm": 0.6581196581196581, |
|
"acc_norm_stderr": 0.031075028526507748 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.3886792452830189, |
|
"acc_stderr": 0.030000485448675986, |
|
"acc_norm": 0.3886792452830189, |
|
"acc_norm_stderr": 0.030000485448675986 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4090909090909091, |
|
"acc_stderr": 0.04709306978661896, |
|
"acc_norm": 0.4090909090909091, |
|
"acc_norm_stderr": 0.04709306978661896 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3111111111111111, |
|
"acc_stderr": 0.028226446749683515, |
|
"acc_norm": 0.3111111111111111, |
|
"acc_norm_stderr": 0.028226446749683515 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.33112582781456956, |
|
"acc_stderr": 0.038425817186598696, |
|
"acc_norm": 0.33112582781456956, |
|
"acc_norm_stderr": 0.038425817186598696 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.572139303482587, |
|
"acc_stderr": 0.03498541988407795, |
|
"acc_norm": 0.572139303482587, |
|
"acc_norm_stderr": 0.03498541988407795 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3872832369942196, |
|
"acc_stderr": 0.03714325906302065, |
|
"acc_norm": 0.3872832369942196, |
|
"acc_norm_stderr": 0.03714325906302065 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.025107425481137285, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.025107425481137285 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3263888888888889, |
|
"acc_stderr": 0.03921067198982266, |
|
"acc_norm": 0.3263888888888889, |
|
"acc_norm_stderr": 0.03921067198982266 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.04688261722621505, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.04688261722621505 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.62, |
|
"acc_stderr": 0.04878317312145633, |
|
"acc_norm": 0.62, |
|
"acc_norm_stderr": 0.04878317312145633 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.3930635838150289, |
|
"acc_stderr": 0.02629622791561367, |
|
"acc_norm": 0.3930635838150289, |
|
"acc_norm_stderr": 0.02629622791561367 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4110429447852761, |
|
"acc_stderr": 0.038656978537853624, |
|
"acc_norm": 0.4110429447852761, |
|
"acc_norm_stderr": 0.038656978537853624 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.39814814814814814, |
|
"acc_stderr": 0.027237415094592474, |
|
"acc_norm": 0.39814814814814814, |
|
"acc_norm_stderr": 0.027237415094592474 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.04793724854411021, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.04793724854411021 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.44041450777202074, |
|
"acc_stderr": 0.03582724530036094, |
|
"acc_norm": 0.44041450777202074, |
|
"acc_norm_stderr": 0.03582724530036094 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2894736842105263, |
|
"acc_stderr": 0.04266339443159394, |
|
"acc_norm": 0.2894736842105263, |
|
"acc_norm_stderr": 0.04266339443159394 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.42018348623853213, |
|
"acc_stderr": 0.021162420048273515, |
|
"acc_norm": 0.42018348623853213, |
|
"acc_norm_stderr": 0.021162420048273515 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3492063492063492, |
|
"acc_stderr": 0.04263906892795133, |
|
"acc_norm": 0.3492063492063492, |
|
"acc_norm_stderr": 0.04263906892795133 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3790849673202614, |
|
"acc_stderr": 0.027780141207023344, |
|
"acc_norm": 0.3790849673202614, |
|
"acc_norm_stderr": 0.027780141207023344 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5950413223140496, |
|
"acc_stderr": 0.04481137755942469, |
|
"acc_norm": 0.5950413223140496, |
|
"acc_norm_stderr": 0.04481137755942469 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3815789473684211, |
|
"acc_stderr": 0.03953173377749194, |
|
"acc_norm": 0.3815789473684211, |
|
"acc_norm_stderr": 0.03953173377749194 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3611111111111111, |
|
"acc_stderr": 0.01943177567703731, |
|
"acc_norm": 0.3611111111111111, |
|
"acc_norm_stderr": 0.01943177567703731 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3404255319148936, |
|
"acc_stderr": 0.02826765748265016, |
|
"acc_norm": 0.3404255319148936, |
|
"acc_norm_stderr": 0.02826765748265016 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.3392857142857143, |
|
"acc_stderr": 0.0449394906861354, |
|
"acc_norm": 0.3392857142857143, |
|
"acc_norm_stderr": 0.0449394906861354 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3101851851851852, |
|
"acc_stderr": 0.03154696285656627, |
|
"acc_norm": 0.3101851851851852, |
|
"acc_norm_stderr": 0.03154696285656627 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.23687150837988827, |
|
"acc_stderr": 0.014219570788103986, |
|
"acc_norm": 0.23687150837988827, |
|
"acc_norm_stderr": 0.014219570788103986 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956911, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956911 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.3272058823529412, |
|
"acc_stderr": 0.028501452860396573, |
|
"acc_norm": 0.3272058823529412, |
|
"acc_norm_stderr": 0.028501452860396573 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.47346938775510206, |
|
"acc_stderr": 0.03196412734523272, |
|
"acc_norm": 0.47346938775510206, |
|
"acc_norm_stderr": 0.03196412734523272 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5569620253164557, |
|
"acc_stderr": 0.03233532777533484, |
|
"acc_norm": 0.5569620253164557, |
|
"acc_norm_stderr": 0.03233532777533484 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2920469361147327, |
|
"acc_stderr": 0.011613349136271803, |
|
"acc_norm": 0.2920469361147327, |
|
"acc_norm_stderr": 0.011613349136271803 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.3872549019607843, |
|
"acc_stderr": 0.03418931233833342, |
|
"acc_norm": 0.3872549019607843, |
|
"acc_norm_stderr": 0.03418931233833342 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.036810508691615486, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.036810508691615486 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2729498164014688, |
|
"mc1_stderr": 0.015594753632006506, |
|
"mc2": 0.45470265644306807, |
|
"mc2_stderr": 0.01566955975434091 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3600944510035419, |
|
"acc_stderr": 0.016503686720440065, |
|
"acc_norm": 0.4604486422668241, |
|
"acc_norm_stderr": 0.017136487626049846 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "LI-ST/Mistral-7B-ko-v0.001", |
|
"model_sha": "7fa29dd55c6d480bd1dd023d04bbc351d9c465c2", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |