{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.2764505119453925, "acc_stderr": 0.013069662474252425, "acc_norm": 0.3216723549488055, "acc_norm_stderr": 0.013650488084494162 }, "harness|ko_hellaswag|10": { "acc": 0.3296156144194384, "acc_stderr": 0.004691128722535481, "acc_norm": 0.4091814379605656, "acc_norm_stderr": 0.004906779523192671 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4152046783625731, "acc_stderr": 0.03779275945503201, "acc_norm": 0.4152046783625731, "acc_norm_stderr": 0.03779275945503201 }, "harness|ko_mmlu_management|5": { "acc": 0.27184466019417475, "acc_stderr": 0.044052680241409216, "acc_norm": 0.27184466019417475, "acc_norm_stderr": 0.044052680241409216 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.39080459770114945, "acc_stderr": 0.01744836606706253, "acc_norm": 0.39080459770114945, "acc_norm_stderr": 0.01744836606706253 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.2962962962962963, "acc_stderr": 0.03944624162501117, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.03944624162501117 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720683, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720683 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3446808510638298, "acc_stderr": 0.031068985963122145, "acc_norm": 0.3446808510638298, "acc_norm_stderr": 0.031068985963122145 }, "harness|ko_mmlu_virology|5": { "acc": 0.26506024096385544, "acc_stderr": 0.03436024037944967, "acc_norm": 0.26506024096385544, "acc_norm_stderr": 0.03436024037944967 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.37942122186495175, "acc_stderr": 0.02755994980234782, "acc_norm": 0.37942122186495175, "acc_norm_stderr": 0.02755994980234782 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.34977578475336324, "acc_stderr": 0.03200736719484503, "acc_norm": 0.34977578475336324, "acc_norm_stderr": 0.03200736719484503 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.35877862595419846, "acc_stderr": 0.04206739313864908, "acc_norm": 0.35877862595419846, "acc_norm_stderr": 0.04206739313864908 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542129, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542129 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.29797979797979796, "acc_stderr": 0.03258630383836556, "acc_norm": 0.29797979797979796, "acc_norm_stderr": 0.03258630383836556 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3103448275862069, "acc_stderr": 0.038552896163789464, "acc_norm": 0.3103448275862069, "acc_norm_stderr": 0.038552896163789464 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.03104194130405927, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.03104194130405927 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.26153846153846155, "acc_stderr": 0.022282141204204426, "acc_norm": 0.26153846153846155, "acc_norm_stderr": 0.022282141204204426 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4166666666666667, "acc_stderr": 0.04766075165356461, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.04766075165356461 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.31527093596059114, "acc_stderr": 0.03269080871970187, "acc_norm": 0.31527093596059114, "acc_norm_stderr": 0.03269080871970187 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.36451612903225805, "acc_stderr": 0.027379871229943245, "acc_norm": 0.36451612903225805, "acc_norm_stderr": 0.027379871229943245 }, "harness|ko_mmlu_marketing|5": { "acc": 0.4700854700854701, "acc_stderr": 0.03269741106812443, "acc_norm": 0.4700854700854701, "acc_norm_stderr": 0.03269741106812443 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.32452830188679244, "acc_stderr": 0.02881561571343211, "acc_norm": 0.32452830188679244, "acc_norm_stderr": 0.02881561571343211 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.32727272727272727, "acc_stderr": 0.04494290866252088, "acc_norm": 0.32727272727272727, "acc_norm_stderr": 0.04494290866252088 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3111111111111111, "acc_stderr": 0.028226446749683522, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.028226446749683522 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.271523178807947, "acc_stderr": 0.03631329803969653, "acc_norm": 0.271523178807947, "acc_norm_stderr": 0.03631329803969653 }, "harness|ko_mmlu_sociology|5": { "acc": 0.43283582089552236, "acc_stderr": 0.03503490923673281, "acc_norm": 0.43283582089552236, "acc_norm_stderr": 0.03503490923673281 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.2543352601156069, "acc_stderr": 0.0332055644308557, "acc_norm": 0.2543352601156069, "acc_norm_stderr": 0.0332055644308557 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2619047619047619, "acc_stderr": 0.022644212615525218, "acc_norm": 0.2619047619047619, "acc_norm_stderr": 0.022644212615525218 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3194444444444444, "acc_stderr": 0.03899073687357335, "acc_norm": 0.3194444444444444, "acc_norm_stderr": 0.03899073687357335 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.3468208092485549, "acc_stderr": 0.025624723994030457, "acc_norm": 0.3468208092485549, "acc_norm_stderr": 0.025624723994030457 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.31901840490797545, "acc_stderr": 0.03661997551073836, "acc_norm": 0.31901840490797545, "acc_norm_stderr": 0.03661997551073836 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.3888888888888889, "acc_stderr": 0.02712511551316686, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.02712511551316686 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.3316062176165803, "acc_stderr": 0.03397636541089117, "acc_norm": 0.3316062176165803, "acc_norm_stderr": 0.03397636541089117 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022056, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022056 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.30275229357798167, "acc_stderr": 0.019698711434756357, "acc_norm": 0.30275229357798167, "acc_norm_stderr": 0.019698711434756357 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.23015873015873015, "acc_stderr": 0.037649508797906066, "acc_norm": 0.23015873015873015, "acc_norm_stderr": 0.037649508797906066 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.3431372549019608, "acc_stderr": 0.027184498909941613, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.027184498909941613 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5289256198347108, "acc_stderr": 0.04556710331269498, "acc_norm": 0.5289256198347108, "acc_norm_stderr": 0.04556710331269498 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.2894736842105263, "acc_stderr": 0.036906779861372814, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.036906779861372814 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.28921568627450983, "acc_stderr": 0.018342529845275915, "acc_norm": 0.28921568627450983, "acc_norm_stderr": 0.018342529845275915 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2624113475177305, "acc_stderr": 0.02624492034984302, "acc_norm": 0.2624113475177305, "acc_norm_stderr": 0.02624492034984302 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.04287858751340455, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.04287858751340455 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.031141447823536027, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.031141447823536027 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24804469273743016, "acc_stderr": 0.014444157808261446, "acc_norm": 0.24804469273743016, "acc_norm_stderr": 0.014444157808261446 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.22426470588235295, "acc_stderr": 0.02533684856333236, "acc_norm": 0.22426470588235295, "acc_norm_stderr": 0.02533684856333236 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.39183673469387753, "acc_stderr": 0.03125127591089165, "acc_norm": 0.39183673469387753, "acc_norm_stderr": 0.03125127591089165 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.350210970464135, "acc_stderr": 0.031052391937584353, "acc_norm": 0.350210970464135, "acc_norm_stderr": 0.031052391937584353 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.25945241199478486, "acc_stderr": 0.011195262076350314, "acc_norm": 0.25945241199478486, "acc_norm_stderr": 0.011195262076350314 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.30392156862745096, "acc_stderr": 0.03228210387037892, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.03228210387037892 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.32727272727272727, "acc_stderr": 0.03663974994391242, "acc_norm": 0.32727272727272727, "acc_norm_stderr": 0.03663974994391242 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.28886168910648713, "mc1_stderr": 0.015866346401384304, "mc2": 0.4547120708605401, "mc2_stderr": 0.015426627135169792 }, "harness|ko_commongen_v2|2": { "acc": 0.2632821723730815, "acc_stderr": 0.015141752199573205, "acc_norm": 0.3530106257378985, "acc_norm_stderr": 0.016430745982427126 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "cepiloth/ko-llama2-finetune-ex2", "model_sha": "ab3114ee91616a692eee5bfa8e238f6f821e89b8", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }