|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.29948805460750855, |
|
"acc_stderr": 0.01338502163731356, |
|
"acc_norm": 0.35494880546075086, |
|
"acc_norm_stderr": 0.013983036904094094 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.38627763393746267, |
|
"acc_stderr": 0.004859004184694615, |
|
"acc_norm": 0.4993029277036447, |
|
"acc_norm_stderr": 0.00498977656227611 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.28654970760233917, |
|
"acc_stderr": 0.034678266857038245, |
|
"acc_norm": 0.28654970760233917, |
|
"acc_norm_stderr": 0.034678266857038245 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.1941747572815534, |
|
"acc_stderr": 0.03916667762822584, |
|
"acc_norm": 0.1941747572815534, |
|
"acc_norm_stderr": 0.03916667762822584 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.280970625798212, |
|
"acc_stderr": 0.016073127851221225, |
|
"acc_norm": 0.280970625798212, |
|
"acc_norm_stderr": 0.016073127851221225 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.3851851851851852, |
|
"acc_stderr": 0.042039210401562783, |
|
"acc_norm": 0.3851851851851852, |
|
"acc_norm_stderr": 0.042039210401562783 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.20851063829787234, |
|
"acc_stderr": 0.02655698211783875, |
|
"acc_norm": 0.20851063829787234, |
|
"acc_norm_stderr": 0.02655698211783875 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.19879518072289157, |
|
"acc_stderr": 0.031069390260789413, |
|
"acc_norm": 0.19879518072289157, |
|
"acc_norm_stderr": 0.031069390260789413 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.3054662379421222, |
|
"acc_stderr": 0.026160584450140474, |
|
"acc_norm": 0.3054662379421222, |
|
"acc_norm_stderr": 0.026160584450140474 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.2062780269058296, |
|
"acc_stderr": 0.02715715047956382, |
|
"acc_norm": 0.2062780269058296, |
|
"acc_norm_stderr": 0.02715715047956382 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.24427480916030533, |
|
"acc_stderr": 0.037683359597287434, |
|
"acc_norm": 0.24427480916030533, |
|
"acc_norm_stderr": 0.037683359597287434 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.2, |
|
"acc_stderr": 0.04020151261036845, |
|
"acc_norm": 0.2, |
|
"acc_norm_stderr": 0.04020151261036845 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.26262626262626265, |
|
"acc_stderr": 0.03135305009533084, |
|
"acc_norm": 0.26262626262626265, |
|
"acc_norm_stderr": 0.03135305009533084 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3103448275862069, |
|
"acc_stderr": 0.03855289616378949, |
|
"acc_norm": 0.3103448275862069, |
|
"acc_norm_stderr": 0.03855289616378949 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.04092563958237657, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.04092563958237657 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.2184873949579832, |
|
"acc_stderr": 0.026841514322958924, |
|
"acc_norm": 0.2184873949579832, |
|
"acc_norm_stderr": 0.026841514322958924 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.2153846153846154, |
|
"acc_stderr": 0.020843034557462878, |
|
"acc_norm": 0.2153846153846154, |
|
"acc_norm_stderr": 0.020843034557462878 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.23148148148148148, |
|
"acc_stderr": 0.04077494709252627, |
|
"acc_norm": 0.23148148148148148, |
|
"acc_norm_stderr": 0.04077494709252627 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2955665024630542, |
|
"acc_stderr": 0.032104944337514575, |
|
"acc_norm": 0.2955665024630542, |
|
"acc_norm_stderr": 0.032104944337514575 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.2645161290322581, |
|
"acc_stderr": 0.02509189237885928, |
|
"acc_norm": 0.2645161290322581, |
|
"acc_norm_stderr": 0.02509189237885928 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.2606837606837607, |
|
"acc_stderr": 0.028760348956523414, |
|
"acc_norm": 0.2606837606837607, |
|
"acc_norm_stderr": 0.028760348956523414 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.2339622641509434, |
|
"acc_stderr": 0.02605529690115292, |
|
"acc_norm": 0.2339622641509434, |
|
"acc_norm_stderr": 0.02605529690115292 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.20909090909090908, |
|
"acc_stderr": 0.03895091015724135, |
|
"acc_norm": 0.20909090909090908, |
|
"acc_norm_stderr": 0.03895091015724135 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.26296296296296295, |
|
"acc_stderr": 0.026842057873833706, |
|
"acc_norm": 0.26296296296296295, |
|
"acc_norm_stderr": 0.026842057873833706 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.271523178807947, |
|
"acc_stderr": 0.03631329803969653, |
|
"acc_norm": 0.271523178807947, |
|
"acc_norm_stderr": 0.03631329803969653 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.24875621890547264, |
|
"acc_stderr": 0.030567675938916714, |
|
"acc_norm": 0.24875621890547264, |
|
"acc_norm_stderr": 0.030567675938916714 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.24855491329479767, |
|
"acc_stderr": 0.03295304696818317, |
|
"acc_norm": 0.24855491329479767, |
|
"acc_norm_stderr": 0.03295304696818317 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2619047619047619, |
|
"acc_stderr": 0.02264421261552521, |
|
"acc_norm": 0.2619047619047619, |
|
"acc_norm_stderr": 0.02264421261552521 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2569444444444444, |
|
"acc_stderr": 0.03653946969442099, |
|
"acc_norm": 0.2569444444444444, |
|
"acc_norm_stderr": 0.03653946969442099 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.17, |
|
"acc_stderr": 0.0377525168068637, |
|
"acc_norm": 0.17, |
|
"acc_norm_stderr": 0.0377525168068637 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768079, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768079 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.3092485549132948, |
|
"acc_stderr": 0.02488314057007176, |
|
"acc_norm": 0.3092485549132948, |
|
"acc_norm_stderr": 0.02488314057007176 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.3006134969325153, |
|
"acc_stderr": 0.03602511318806771, |
|
"acc_norm": 0.3006134969325153, |
|
"acc_norm_stderr": 0.03602511318806771 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.30246913580246915, |
|
"acc_stderr": 0.025557653981868038, |
|
"acc_norm": 0.30246913580246915, |
|
"acc_norm_stderr": 0.025557653981868038 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.22797927461139897, |
|
"acc_stderr": 0.030276909945178256, |
|
"acc_norm": 0.22797927461139897, |
|
"acc_norm_stderr": 0.030276909945178256 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.24561403508771928, |
|
"acc_stderr": 0.04049339297748141, |
|
"acc_norm": 0.24561403508771928, |
|
"acc_norm_stderr": 0.04049339297748141 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.22568807339449543, |
|
"acc_stderr": 0.01792308766780305, |
|
"acc_norm": 0.22568807339449543, |
|
"acc_norm_stderr": 0.01792308766780305 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.14285714285714285, |
|
"acc_stderr": 0.03129843185743809, |
|
"acc_norm": 0.14285714285714285, |
|
"acc_norm_stderr": 0.03129843185743809 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.26143790849673204, |
|
"acc_stderr": 0.025160998214292456, |
|
"acc_norm": 0.26143790849673204, |
|
"acc_norm_stderr": 0.025160998214292456 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.041633319989322674, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.041633319989322674 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.38016528925619836, |
|
"acc_stderr": 0.04431324501968431, |
|
"acc_norm": 0.38016528925619836, |
|
"acc_norm_stderr": 0.04431324501968431 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.3223684210526316, |
|
"acc_stderr": 0.038035102483515854, |
|
"acc_norm": 0.3223684210526316, |
|
"acc_norm_stderr": 0.038035102483515854 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2761437908496732, |
|
"acc_stderr": 0.018087276935663133, |
|
"acc_norm": 0.2761437908496732, |
|
"acc_norm_stderr": 0.018087276935663133 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2730496453900709, |
|
"acc_stderr": 0.026577860943307854, |
|
"acc_norm": 0.2730496453900709, |
|
"acc_norm_stderr": 0.026577860943307854 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.24107142857142858, |
|
"acc_stderr": 0.04059867246952689, |
|
"acc_norm": 0.24107142857142858, |
|
"acc_norm_stderr": 0.04059867246952689 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.20833333333333334, |
|
"acc_stderr": 0.027696910713093936, |
|
"acc_norm": 0.20833333333333334, |
|
"acc_norm_stderr": 0.027696910713093936 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.24692737430167597, |
|
"acc_stderr": 0.014422292204808852, |
|
"acc_norm": 0.24692737430167597, |
|
"acc_norm_stderr": 0.014422292204808852 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.04408440022768079, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.04408440022768079 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252606, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252606 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.16544117647058823, |
|
"acc_stderr": 0.02257177102549475, |
|
"acc_norm": 0.16544117647058823, |
|
"acc_norm_stderr": 0.02257177102549475 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.24081632653061225, |
|
"acc_stderr": 0.027372942201788167, |
|
"acc_norm": 0.24081632653061225, |
|
"acc_norm_stderr": 0.027372942201788167 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.31645569620253167, |
|
"acc_stderr": 0.030274974880218977, |
|
"acc_norm": 0.31645569620253167, |
|
"acc_norm_stderr": 0.030274974880218977 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2692307692307692, |
|
"acc_stderr": 0.011328734403140327, |
|
"acc_norm": 0.2692307692307692, |
|
"acc_norm_stderr": 0.011328734403140327 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.27941176470588236, |
|
"acc_stderr": 0.03149328104507956, |
|
"acc_norm": 0.27941176470588236, |
|
"acc_norm_stderr": 0.03149328104507956 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.3515151515151515, |
|
"acc_stderr": 0.037282069986826503, |
|
"acc_norm": 0.3515151515151515, |
|
"acc_norm_stderr": 0.037282069986826503 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.24112607099143207, |
|
"mc1_stderr": 0.014974827279752337, |
|
"mc2": 0.3942593710384486, |
|
"mc2_stderr": 0.014811018314989769 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.7312206572769953, |
|
"acc_stderr": 0.015196983421381498, |
|
"acc_norm": 0.7769953051643192, |
|
"acc_norm_stderr": 0.014269258984221404 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge", |
|
"model_sha": "793d22f37f5945b22fbc33c447f8cdcaa4a50221", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |