|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3660409556313993, |
|
"acc_stderr": 0.014077223108470137, |
|
"acc_norm": 0.4035836177474403, |
|
"acc_norm_stderr": 0.01433715891426844 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3948416650069707, |
|
"acc_stderr": 0.004878176541703574, |
|
"acc_norm": 0.5118502290380402, |
|
"acc_norm_stderr": 0.004988379805261165 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4678362573099415, |
|
"acc_stderr": 0.038268824176603704, |
|
"acc_norm": 0.4678362573099415, |
|
"acc_norm_stderr": 0.038268824176603704 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.4077669902912621, |
|
"acc_stderr": 0.048657775704107696, |
|
"acc_norm": 0.4077669902912621, |
|
"acc_norm_stderr": 0.048657775704107696 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.44189016602809705, |
|
"acc_stderr": 0.017758800534214417, |
|
"acc_norm": 0.44189016602809705, |
|
"acc_norm_stderr": 0.017758800534214417 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.3037037037037037, |
|
"acc_stderr": 0.03972552884785139, |
|
"acc_norm": 0.3037037037037037, |
|
"acc_norm_stderr": 0.03972552884785139 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.28, |
|
"acc_stderr": 0.045126085985421255, |
|
"acc_norm": 0.28, |
|
"acc_norm_stderr": 0.045126085985421255 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2553191489361702, |
|
"acc_stderr": 0.02850485647051419, |
|
"acc_norm": 0.2553191489361702, |
|
"acc_norm_stderr": 0.02850485647051419 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.2891566265060241, |
|
"acc_stderr": 0.03529486801511115, |
|
"acc_norm": 0.2891566265060241, |
|
"acc_norm_stderr": 0.03529486801511115 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.40836012861736337, |
|
"acc_stderr": 0.02791705074848462, |
|
"acc_norm": 0.40836012861736337, |
|
"acc_norm_stderr": 0.02791705074848462 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.4080717488789238, |
|
"acc_stderr": 0.03298574607842822, |
|
"acc_norm": 0.4080717488789238, |
|
"acc_norm_stderr": 0.03298574607842822 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.35877862595419846, |
|
"acc_stderr": 0.04206739313864908, |
|
"acc_norm": 0.35877862595419846, |
|
"acc_norm_stderr": 0.04206739313864908 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.0440844002276808, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.0440844002276808 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.398989898989899, |
|
"acc_stderr": 0.03488901616852731, |
|
"acc_norm": 0.398989898989899, |
|
"acc_norm_stderr": 0.03488901616852731 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.36551724137931035, |
|
"acc_stderr": 0.040131241954243856, |
|
"acc_norm": 0.36551724137931035, |
|
"acc_norm_stderr": 0.040131241954243856 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.27450980392156865, |
|
"acc_stderr": 0.044405219061793275, |
|
"acc_norm": 0.27450980392156865, |
|
"acc_norm_stderr": 0.044405219061793275 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3697478991596639, |
|
"acc_stderr": 0.03135709599613591, |
|
"acc_norm": 0.3697478991596639, |
|
"acc_norm_stderr": 0.03135709599613591 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.34102564102564104, |
|
"acc_stderr": 0.024035489676335065, |
|
"acc_norm": 0.34102564102564104, |
|
"acc_norm_stderr": 0.024035489676335065 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.04943110704237102, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.04943110704237102 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4351851851851852, |
|
"acc_stderr": 0.04792898170907062, |
|
"acc_norm": 0.4351851851851852, |
|
"acc_norm_stderr": 0.04792898170907062 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.33497536945812806, |
|
"acc_stderr": 0.033208527423483104, |
|
"acc_norm": 0.33497536945812806, |
|
"acc_norm_stderr": 0.033208527423483104 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3419354838709677, |
|
"acc_stderr": 0.02698528957655274, |
|
"acc_norm": 0.3419354838709677, |
|
"acc_norm_stderr": 0.02698528957655274 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.5128205128205128, |
|
"acc_stderr": 0.032745319388423504, |
|
"acc_norm": 0.5128205128205128, |
|
"acc_norm_stderr": 0.032745319388423504 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.39622641509433965, |
|
"acc_stderr": 0.03010279378179119, |
|
"acc_norm": 0.39622641509433965, |
|
"acc_norm_stderr": 0.03010279378179119 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.2818181818181818, |
|
"acc_stderr": 0.043091187099464585, |
|
"acc_norm": 0.2818181818181818, |
|
"acc_norm_stderr": 0.043091187099464585 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.32592592592592595, |
|
"acc_stderr": 0.028578348365473075, |
|
"acc_norm": 0.32592592592592595, |
|
"acc_norm_stderr": 0.028578348365473075 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.24503311258278146, |
|
"acc_stderr": 0.035118075718047245, |
|
"acc_norm": 0.24503311258278146, |
|
"acc_norm_stderr": 0.035118075718047245 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.3681592039800995, |
|
"acc_stderr": 0.03410410565495302, |
|
"acc_norm": 0.3681592039800995, |
|
"acc_norm_stderr": 0.03410410565495302 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2658959537572254, |
|
"acc_stderr": 0.0336876293225943, |
|
"acc_norm": 0.2658959537572254, |
|
"acc_norm_stderr": 0.0336876293225943 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.29894179894179895, |
|
"acc_stderr": 0.023577604791655805, |
|
"acc_norm": 0.29894179894179895, |
|
"acc_norm_stderr": 0.023577604791655805 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2847222222222222, |
|
"acc_stderr": 0.03773809990686934, |
|
"acc_norm": 0.2847222222222222, |
|
"acc_norm_stderr": 0.03773809990686934 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.3583815028901734, |
|
"acc_stderr": 0.0258167567915842, |
|
"acc_norm": 0.3583815028901734, |
|
"acc_norm_stderr": 0.0258167567915842 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3374233128834356, |
|
"acc_stderr": 0.03714908409935574, |
|
"acc_norm": 0.3374233128834356, |
|
"acc_norm_stderr": 0.03714908409935574 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.3734567901234568, |
|
"acc_stderr": 0.026915003011380147, |
|
"acc_norm": 0.3734567901234568, |
|
"acc_norm_stderr": 0.026915003011380147 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.39896373056994816, |
|
"acc_stderr": 0.03533999094065696, |
|
"acc_norm": 0.39896373056994816, |
|
"acc_norm_stderr": 0.03533999094065696 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2543859649122807, |
|
"acc_stderr": 0.040969851398436716, |
|
"acc_norm": 0.2543859649122807, |
|
"acc_norm_stderr": 0.040969851398436716 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3926605504587156, |
|
"acc_stderr": 0.020937505161201093, |
|
"acc_norm": 0.3926605504587156, |
|
"acc_norm_stderr": 0.020937505161201093 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.037184890068181146, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.037184890068181146 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3562091503267974, |
|
"acc_stderr": 0.027420477662629245, |
|
"acc_norm": 0.3562091503267974, |
|
"acc_norm_stderr": 0.027420477662629245 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.48760330578512395, |
|
"acc_stderr": 0.045629515481807666, |
|
"acc_norm": 0.48760330578512395, |
|
"acc_norm_stderr": 0.045629515481807666 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.39473684210526316, |
|
"acc_stderr": 0.039777499346220734, |
|
"acc_norm": 0.39473684210526316, |
|
"acc_norm_stderr": 0.039777499346220734 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.24509803921568626, |
|
"acc_stderr": 0.017401816711427653, |
|
"acc_norm": 0.24509803921568626, |
|
"acc_norm_stderr": 0.017401816711427653 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.26595744680851063, |
|
"acc_stderr": 0.026358065698880592, |
|
"acc_norm": 0.26595744680851063, |
|
"acc_norm_stderr": 0.026358065698880592 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.1875, |
|
"acc_stderr": 0.0370468111477387, |
|
"acc_norm": 0.1875, |
|
"acc_norm_stderr": 0.0370468111477387 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.3148148148148148, |
|
"acc_stderr": 0.0316746870682898, |
|
"acc_norm": 0.3148148148148148, |
|
"acc_norm_stderr": 0.0316746870682898 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.28308823529411764, |
|
"acc_stderr": 0.02736586113151381, |
|
"acc_norm": 0.28308823529411764, |
|
"acc_norm_stderr": 0.02736586113151381 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.4122448979591837, |
|
"acc_stderr": 0.03151236044674281, |
|
"acc_norm": 0.4122448979591837, |
|
"acc_norm_stderr": 0.03151236044674281 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.3881856540084388, |
|
"acc_stderr": 0.031722950043323296, |
|
"acc_norm": 0.3881856540084388, |
|
"acc_norm_stderr": 0.031722950043323296 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.28552803129074317, |
|
"acc_stderr": 0.011535751586665668, |
|
"acc_norm": 0.28552803129074317, |
|
"acc_norm_stderr": 0.011535751586665668 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.28431372549019607, |
|
"acc_stderr": 0.03166009679399811, |
|
"acc_norm": 0.28431372549019607, |
|
"acc_norm_stderr": 0.03166009679399811 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.036810508691615486, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.036810508691615486 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3047735618115055, |
|
"mc1_stderr": 0.016114124156882466, |
|
"mc2": 0.48334405699140953, |
|
"mc2_stderr": 0.015932530840786423 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.33884297520661155, |
|
"acc_stderr": 0.01627295299701912, |
|
"acc_norm": 0.3789846517119244, |
|
"acc_norm_stderr": 0.016679260684229282 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "cepiloth/ko-en-llama2-13b-finetune-ex", |
|
"model_sha": "ee6a38bb61742af106567d743b3d87458a303f60", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |