|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.2883959044368601, |
|
"acc_stderr": 0.013238394422428164, |
|
"acc_norm": 0.3515358361774744, |
|
"acc_norm_stderr": 0.013952413699600933 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3881696873132842, |
|
"acc_stderr": 0.004863375698153872, |
|
"acc_norm": 0.4946225851424019, |
|
"acc_norm_stderr": 0.004989492828168542 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.26900584795321636, |
|
"acc_stderr": 0.0340105262010409, |
|
"acc_norm": 0.26900584795321636, |
|
"acc_norm_stderr": 0.0340105262010409 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.2524271844660194, |
|
"acc_stderr": 0.04301250399690877, |
|
"acc_norm": 0.2524271844660194, |
|
"acc_norm_stderr": 0.04301250399690877 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.24521072796934865, |
|
"acc_stderr": 0.01538435228454394, |
|
"acc_norm": 0.24521072796934865, |
|
"acc_norm_stderr": 0.01538435228454394 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.25925925925925924, |
|
"acc_stderr": 0.03785714465066653, |
|
"acc_norm": 0.25925925925925924, |
|
"acc_norm_stderr": 0.03785714465066653 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.20851063829787234, |
|
"acc_stderr": 0.026556982117838707, |
|
"acc_norm": 0.20851063829787234, |
|
"acc_norm_stderr": 0.026556982117838707 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.25301204819277107, |
|
"acc_stderr": 0.03384429155233135, |
|
"acc_norm": 0.25301204819277107, |
|
"acc_norm_stderr": 0.03384429155233135 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.2861736334405145, |
|
"acc_stderr": 0.025670259242188933, |
|
"acc_norm": 0.2861736334405145, |
|
"acc_norm_stderr": 0.025670259242188933 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.16143497757847533, |
|
"acc_stderr": 0.02469395789912846, |
|
"acc_norm": 0.16143497757847533, |
|
"acc_norm_stderr": 0.02469395789912846 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2748091603053435, |
|
"acc_stderr": 0.039153454088478354, |
|
"acc_norm": 0.2748091603053435, |
|
"acc_norm_stderr": 0.039153454088478354 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.35, |
|
"acc_stderr": 0.04793724854411022, |
|
"acc_norm": 0.35, |
|
"acc_norm_stderr": 0.04793724854411022 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.30808080808080807, |
|
"acc_stderr": 0.03289477330098615, |
|
"acc_norm": 0.30808080808080807, |
|
"acc_norm_stderr": 0.03289477330098615 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.25517241379310346, |
|
"acc_stderr": 0.03632984052707842, |
|
"acc_norm": 0.25517241379310346, |
|
"acc_norm_stderr": 0.03632984052707842 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.2549019607843137, |
|
"acc_stderr": 0.043364327079931785, |
|
"acc_norm": 0.2549019607843137, |
|
"acc_norm_stderr": 0.043364327079931785 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.2773109243697479, |
|
"acc_stderr": 0.02907937453948001, |
|
"acc_norm": 0.2773109243697479, |
|
"acc_norm_stderr": 0.02907937453948001 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.26153846153846155, |
|
"acc_stderr": 0.02228214120420442, |
|
"acc_norm": 0.26153846153846155, |
|
"acc_norm_stderr": 0.02228214120420442 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.041633319989322695, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.041633319989322695 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.18, |
|
"acc_stderr": 0.038612291966536955, |
|
"acc_norm": 0.18, |
|
"acc_norm_stderr": 0.038612291966536955 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.26851851851851855, |
|
"acc_stderr": 0.04284467968052191, |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.04284467968052191 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.22167487684729065, |
|
"acc_stderr": 0.029225575892489593, |
|
"acc_norm": 0.22167487684729065, |
|
"acc_norm_stderr": 0.029225575892489593 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.23870967741935484, |
|
"acc_stderr": 0.024251071262208834, |
|
"acc_norm": 0.23870967741935484, |
|
"acc_norm_stderr": 0.024251071262208834 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.3034188034188034, |
|
"acc_stderr": 0.030118210106942652, |
|
"acc_norm": 0.3034188034188034, |
|
"acc_norm_stderr": 0.030118210106942652 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.27169811320754716, |
|
"acc_stderr": 0.027377706624670713, |
|
"acc_norm": 0.27169811320754716, |
|
"acc_norm_stderr": 0.027377706624670713 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.2545454545454545, |
|
"acc_stderr": 0.041723430387053825, |
|
"acc_norm": 0.2545454545454545, |
|
"acc_norm_stderr": 0.041723430387053825 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.28888888888888886, |
|
"acc_stderr": 0.027634907264178544, |
|
"acc_norm": 0.28888888888888886, |
|
"acc_norm_stderr": 0.027634907264178544 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.26490066225165565, |
|
"acc_stderr": 0.036030385453603826, |
|
"acc_norm": 0.26490066225165565, |
|
"acc_norm_stderr": 0.036030385453603826 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.1791044776119403, |
|
"acc_stderr": 0.027113286753111837, |
|
"acc_norm": 0.1791044776119403, |
|
"acc_norm_stderr": 0.027113286753111837 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3352601156069364, |
|
"acc_stderr": 0.03599586301247078, |
|
"acc_norm": 0.3352601156069364, |
|
"acc_norm_stderr": 0.03599586301247078 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2222222222222222, |
|
"acc_stderr": 0.02141168439369418, |
|
"acc_norm": 0.2222222222222222, |
|
"acc_norm_stderr": 0.02141168439369418 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2777777777777778, |
|
"acc_stderr": 0.037455547914624576, |
|
"acc_norm": 0.2777777777777778, |
|
"acc_norm_stderr": 0.037455547914624576 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.22, |
|
"acc_stderr": 0.041633319989322695, |
|
"acc_norm": 0.22, |
|
"acc_norm_stderr": 0.041633319989322695 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.24277456647398843, |
|
"acc_stderr": 0.0230836585869842, |
|
"acc_norm": 0.24277456647398843, |
|
"acc_norm_stderr": 0.0230836585869842 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.2331288343558282, |
|
"acc_stderr": 0.0332201579577674, |
|
"acc_norm": 0.2331288343558282, |
|
"acc_norm_stderr": 0.0332201579577674 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.29012345679012347, |
|
"acc_stderr": 0.02525117393649502, |
|
"acc_norm": 0.29012345679012347, |
|
"acc_norm_stderr": 0.02525117393649502 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909284, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909284 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.29533678756476683, |
|
"acc_stderr": 0.03292296639155141, |
|
"acc_norm": 0.29533678756476683, |
|
"acc_norm_stderr": 0.03292296639155141 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2894736842105263, |
|
"acc_stderr": 0.04266339443159394, |
|
"acc_norm": 0.2894736842105263, |
|
"acc_norm_stderr": 0.04266339443159394 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3137614678899083, |
|
"acc_stderr": 0.019894723341469127, |
|
"acc_norm": 0.3137614678899083, |
|
"acc_norm_stderr": 0.019894723341469127 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.1984126984126984, |
|
"acc_stderr": 0.035670166752768635, |
|
"acc_norm": 0.1984126984126984, |
|
"acc_norm_stderr": 0.035670166752768635 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.27450980392156865, |
|
"acc_stderr": 0.025553169991826524, |
|
"acc_norm": 0.27450980392156865, |
|
"acc_norm_stderr": 0.025553169991826524 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.256198347107438, |
|
"acc_stderr": 0.03984979653302872, |
|
"acc_norm": 0.256198347107438, |
|
"acc_norm_stderr": 0.03984979653302872 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.23026315789473684, |
|
"acc_stderr": 0.03426059424403165, |
|
"acc_norm": 0.23026315789473684, |
|
"acc_norm_stderr": 0.03426059424403165 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.23366013071895425, |
|
"acc_stderr": 0.017119158496044506, |
|
"acc_norm": 0.23366013071895425, |
|
"acc_norm_stderr": 0.017119158496044506 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.23404255319148937, |
|
"acc_stderr": 0.025257861359432407, |
|
"acc_norm": 0.23404255319148937, |
|
"acc_norm_stderr": 0.025257861359432407 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.29464285714285715, |
|
"acc_stderr": 0.04327040932578728, |
|
"acc_norm": 0.29464285714285715, |
|
"acc_norm_stderr": 0.04327040932578728 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.39351851851851855, |
|
"acc_stderr": 0.03331747876370312, |
|
"acc_norm": 0.39351851851851855, |
|
"acc_norm_stderr": 0.03331747876370312 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.27150837988826815, |
|
"acc_stderr": 0.014874252168095278, |
|
"acc_norm": 0.27150837988826815, |
|
"acc_norm_stderr": 0.014874252168095278 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.39705882352941174, |
|
"acc_stderr": 0.02972215209928006, |
|
"acc_norm": 0.39705882352941174, |
|
"acc_norm_stderr": 0.02972215209928006 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.2693877551020408, |
|
"acc_stderr": 0.02840125202902294, |
|
"acc_norm": 0.2693877551020408, |
|
"acc_norm_stderr": 0.02840125202902294 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.25738396624472576, |
|
"acc_stderr": 0.028458820991460288, |
|
"acc_norm": 0.25738396624472576, |
|
"acc_norm_stderr": 0.028458820991460288 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.25097783572359844, |
|
"acc_stderr": 0.01107373029918721, |
|
"acc_norm": 0.25097783572359844, |
|
"acc_norm_stderr": 0.01107373029918721 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.23039215686274508, |
|
"acc_stderr": 0.02955429260569506, |
|
"acc_norm": 0.23039215686274508, |
|
"acc_norm_stderr": 0.02955429260569506 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.18787878787878787, |
|
"acc_stderr": 0.03050193405942914, |
|
"acc_norm": 0.18787878787878787, |
|
"acc_norm_stderr": 0.03050193405942914 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.27539779681762544, |
|
"mc1_stderr": 0.015638135667775523, |
|
"mc2": 0.42952087211843815, |
|
"mc2_stderr": 0.015198599249746652 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3779342723004695, |
|
"acc_stderr": 0.016621166340849272, |
|
"acc_norm": 0.43896713615023475, |
|
"acc_norm_stderr": 0.017011608310486037 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "siryuon/polyglot-ko-12.8b-sryn", |
|
"model_sha": "5bc6d25dbc83bb1e2bcc656141316ae2cb079aff", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |