{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.38822525597269625, "acc_stderr": 0.01424161420741405, "acc_norm": 0.4283276450511945, "acc_norm_stderr": 0.01446049636759902 }, "harness|ko_hellaswag|10": { "acc": 0.40360485958972314, "acc_stderr": 0.004896173035943316, "acc_norm": 0.5388368850826528, "acc_norm_stderr": 0.0049747064284342835 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5672514619883041, "acc_stderr": 0.03799978644370607, "acc_norm": 0.5672514619883041, "acc_norm_stderr": 0.03799978644370607 }, "harness|ko_mmlu_management|5": { "acc": 0.5145631067961165, "acc_stderr": 0.04948637324026637, "acc_norm": 0.5145631067961165, "acc_norm_stderr": 0.04948637324026637 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5300127713920817, "acc_stderr": 0.01784772308664907, "acc_norm": 0.5300127713920817, "acc_norm_stderr": 0.01784772308664907 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4222222222222222, "acc_stderr": 0.04266763404099582, "acc_norm": 0.4222222222222222, "acc_norm_stderr": 0.04266763404099582 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.33191489361702126, "acc_stderr": 0.030783736757745667, "acc_norm": 0.33191489361702126, "acc_norm_stderr": 0.030783736757745667 }, "harness|ko_mmlu_virology|5": { "acc": 0.35542168674698793, "acc_stderr": 0.03726214354322415, "acc_norm": 0.35542168674698793, "acc_norm_stderr": 0.03726214354322415 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5144694533762058, "acc_stderr": 0.02838619808417768, "acc_norm": 0.5144694533762058, "acc_norm_stderr": 0.02838619808417768 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.43946188340807174, "acc_stderr": 0.03331092511038179, "acc_norm": 0.43946188340807174, "acc_norm_stderr": 0.03331092511038179 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.5114503816793893, "acc_stderr": 0.043841400240780176, "acc_norm": 0.5114503816793893, "acc_norm_stderr": 0.043841400240780176 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5, "acc_stderr": 0.035623524993954825, "acc_norm": 0.5, "acc_norm_stderr": 0.035623524993954825 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.38620689655172413, "acc_stderr": 0.04057324734419034, "acc_norm": 0.38620689655172413, "acc_norm_stderr": 0.04057324734419034 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.04158307533083286, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.04158307533083286 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.453781512605042, "acc_stderr": 0.032339434681820885, "acc_norm": 0.453781512605042, "acc_norm_stderr": 0.032339434681820885 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.44358974358974357, "acc_stderr": 0.02518914989476419, "acc_norm": 0.44358974358974357, "acc_norm_stderr": 0.02518914989476419 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5092592592592593, "acc_stderr": 0.04832853553437056, "acc_norm": 0.5092592592592593, "acc_norm_stderr": 0.04832853553437056 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3645320197044335, "acc_stderr": 0.0338640574606209, "acc_norm": 0.3645320197044335, "acc_norm_stderr": 0.0338640574606209 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.4612903225806452, "acc_stderr": 0.028358634859836918, "acc_norm": 0.4612903225806452, "acc_norm_stderr": 0.028358634859836918 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6410256410256411, "acc_stderr": 0.03142616993791924, "acc_norm": 0.6410256410256411, "acc_norm_stderr": 0.03142616993791924 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4, "acc_stderr": 0.030151134457776292, "acc_norm": 0.4, "acc_norm_stderr": 0.030151134457776292 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5181818181818182, "acc_stderr": 0.04785964010794916, "acc_norm": 0.5181818181818182, "acc_norm_stderr": 0.04785964010794916 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3, "acc_stderr": 0.02794045713622841, "acc_norm": 0.3, "acc_norm_stderr": 0.02794045713622841 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5671641791044776, "acc_stderr": 0.03503490923673282, "acc_norm": 0.5671641791044776, "acc_norm_stderr": 0.03503490923673282 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.31213872832369943, "acc_stderr": 0.035331333893236574, "acc_norm": 0.31213872832369943, "acc_norm_stderr": 0.035331333893236574 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2698412698412698, "acc_stderr": 0.022860838309232072, "acc_norm": 0.2698412698412698, "acc_norm_stderr": 0.022860838309232072 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4513888888888889, "acc_stderr": 0.04161402398403279, "acc_norm": 0.4513888888888889, "acc_norm_stderr": 0.04161402398403279 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.4913294797687861, "acc_stderr": 0.026915047355369804, "acc_norm": 0.4913294797687861, "acc_norm_stderr": 0.026915047355369804 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.44171779141104295, "acc_stderr": 0.03901591825836184, "acc_norm": 0.44171779141104295, "acc_norm_stderr": 0.03901591825836184 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.42592592592592593, "acc_stderr": 0.027513747284379414, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.027513747284379414 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.538860103626943, "acc_stderr": 0.03597524411734578, "acc_norm": 0.538860103626943, "acc_norm_stderr": 0.03597524411734578 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03947152782669415, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03947152782669415 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5339449541284403, "acc_stderr": 0.02138786335035399, "acc_norm": 0.5339449541284403, "acc_norm_stderr": 0.02138786335035399 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3492063492063492, "acc_stderr": 0.04263906892795133, "acc_norm": 0.3492063492063492, "acc_norm_stderr": 0.04263906892795133 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.43790849673202614, "acc_stderr": 0.02840830202033269, "acc_norm": 0.43790849673202614, "acc_norm_stderr": 0.02840830202033269 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6446280991735537, "acc_stderr": 0.0436923632657398, "acc_norm": 0.6446280991735537, "acc_norm_stderr": 0.0436923632657398 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.4407894736842105, "acc_stderr": 0.04040311062490436, "acc_norm": 0.4407894736842105, "acc_norm_stderr": 0.04040311062490436 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3627450980392157, "acc_stderr": 0.019450768432505518, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.019450768432505518 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.31560283687943264, "acc_stderr": 0.027724989449509317, "acc_norm": 0.31560283687943264, "acc_norm_stderr": 0.027724989449509317 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.24107142857142858, "acc_stderr": 0.04059867246952687, "acc_norm": 0.24107142857142858, "acc_norm_stderr": 0.04059867246952687 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.35648148148148145, "acc_stderr": 0.032664783315272714, "acc_norm": 0.35648148148148145, "acc_norm_stderr": 0.032664783315272714 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24692737430167597, "acc_stderr": 0.014422292204808852, "acc_norm": 0.24692737430167597, "acc_norm_stderr": 0.014422292204808852 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3713235294117647, "acc_stderr": 0.02934980313976587, "acc_norm": 0.3713235294117647, "acc_norm_stderr": 0.02934980313976587 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.40816326530612246, "acc_stderr": 0.03146465712827424, "acc_norm": 0.40816326530612246, "acc_norm_stderr": 0.03146465712827424 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6329113924050633, "acc_stderr": 0.03137624072561619, "acc_norm": 0.6329113924050633, "acc_norm_stderr": 0.03137624072561619 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.37614080834419816, "acc_stderr": 0.012372214430599819, "acc_norm": 0.37614080834419816, "acc_norm_stderr": 0.012372214430599819 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.4852941176470588, "acc_stderr": 0.03507793834791324, "acc_norm": 0.4852941176470588, "acc_norm_stderr": 0.03507793834791324 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5333333333333333, "acc_stderr": 0.03895658065271847, "acc_norm": 0.5333333333333333, "acc_norm_stderr": 0.03895658065271847 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.27539779681762544, "mc1_stderr": 0.015638135667775523, "mc2": 0.4343237644069022, "mc2_stderr": 0.015029108040608447 }, "harness|ko_commongen_v2|2": { "acc": 0.3872491145218418, "acc_stderr": 0.016747577991642792, "acc_norm": 0.4628099173553719, "acc_norm_stderr": 0.0171427361176433 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "kiyoonyoo/ko-platypus-13b-control", "model_sha": "6cdc49b0713c6d4ad656fe98f5be7eccb1d8b4ef", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }