{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.32764505119453924, "acc_stderr": 0.013715847940719346, "acc_norm": 0.3890784982935154, "acc_norm_stderr": 0.014247309976045605 }, "harness|ko_hellaswag|10": { "acc": 0.3777136028679546, "acc_stderr": 0.0048382464107862766, "acc_norm": 0.48894642501493724, "acc_norm_stderr": 0.004988561944277391 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4678362573099415, "acc_stderr": 0.03826882417660368, "acc_norm": 0.4678362573099415, "acc_norm_stderr": 0.03826882417660368 }, "harness|ko_mmlu_management|5": { "acc": 0.5339805825242718, "acc_stderr": 0.0493929144727348, "acc_norm": 0.5339805825242718, "acc_norm_stderr": 0.0493929144727348 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4227330779054917, "acc_stderr": 0.017665180351954066, "acc_norm": 0.4227330779054917, "acc_norm_stderr": 0.017665180351954066 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3925925925925926, "acc_stderr": 0.042185062153688786, "acc_norm": 0.3925925925925926, "acc_norm_stderr": 0.042185062153688786 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.28085106382978725, "acc_stderr": 0.02937917046412482, "acc_norm": 0.28085106382978725, "acc_norm_stderr": 0.02937917046412482 }, "harness|ko_mmlu_virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370519, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370519 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4180064308681672, "acc_stderr": 0.02801365189199507, "acc_norm": 0.4180064308681672, "acc_norm_stderr": 0.02801365189199507 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.35874439461883406, "acc_stderr": 0.03219079200419996, "acc_norm": 0.35874439461883406, "acc_norm_stderr": 0.03219079200419996 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4580152671755725, "acc_stderr": 0.04369802690578756, "acc_norm": 0.4580152671755725, "acc_norm_stderr": 0.04369802690578756 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.46464646464646464, "acc_stderr": 0.03553436368828063, "acc_norm": 0.46464646464646464, "acc_norm_stderr": 0.03553436368828063 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.35172413793103446, "acc_stderr": 0.0397923663749741, "acc_norm": 0.35172413793103446, "acc_norm_stderr": 0.0397923663749741 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.2647058823529412, "acc_stderr": 0.043898699568087785, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.043898699568087785 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3319327731092437, "acc_stderr": 0.030588697013783663, "acc_norm": 0.3319327731092437, "acc_norm_stderr": 0.030588697013783663 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.3435897435897436, "acc_stderr": 0.024078696580635463, "acc_norm": 0.3435897435897436, "acc_norm_stderr": 0.024078696580635463 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.42, "acc_stderr": 0.04960449637488584, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488584 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.37962962962962965, "acc_stderr": 0.04691521224077742, "acc_norm": 0.37962962962962965, "acc_norm_stderr": 0.04691521224077742 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4039408866995074, "acc_stderr": 0.0345245390382204, "acc_norm": 0.4039408866995074, "acc_norm_stderr": 0.0345245390382204 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3903225806451613, "acc_stderr": 0.027751256636969573, "acc_norm": 0.3903225806451613, "acc_norm_stderr": 0.027751256636969573 }, "harness|ko_mmlu_marketing|5": { "acc": 0.44871794871794873, "acc_stderr": 0.032583346493868806, "acc_norm": 0.44871794871794873, "acc_norm_stderr": 0.032583346493868806 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.3283018867924528, "acc_stderr": 0.028901593612411784, "acc_norm": 0.3283018867924528, "acc_norm_stderr": 0.028901593612411784 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.33636363636363636, "acc_stderr": 0.04525393596302505, "acc_norm": 0.33636363636363636, "acc_norm_stderr": 0.04525393596302505 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.27037037037037037, "acc_stderr": 0.02708037281514566, "acc_norm": 0.27037037037037037, "acc_norm_stderr": 0.02708037281514566 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|ko_mmlu_sociology|5": { "acc": 0.48258706467661694, "acc_stderr": 0.035333892347392454, "acc_norm": 0.48258706467661694, "acc_norm_stderr": 0.035333892347392454 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3236994219653179, "acc_stderr": 0.035676037996391706, "acc_norm": 0.3236994219653179, "acc_norm_stderr": 0.035676037996391706 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.29894179894179895, "acc_stderr": 0.023577604791655802, "acc_norm": 0.29894179894179895, "acc_norm_stderr": 0.023577604791655802 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3125, "acc_stderr": 0.038760854559127644, "acc_norm": 0.3125, "acc_norm_stderr": 0.038760854559127644 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.3670520231213873, "acc_stderr": 0.025950054337654085, "acc_norm": 0.3670520231213873, "acc_norm_stderr": 0.025950054337654085 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4294478527607362, "acc_stderr": 0.038890666191127216, "acc_norm": 0.4294478527607362, "acc_norm_stderr": 0.038890666191127216 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.41975308641975306, "acc_stderr": 0.027460099557005138, "acc_norm": 0.41975308641975306, "acc_norm_stderr": 0.027460099557005138 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.3471502590673575, "acc_stderr": 0.03435696168361355, "acc_norm": 0.3471502590673575, "acc_norm_stderr": 0.03435696168361355 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.21052631578947367, "acc_stderr": 0.03835153954399421, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.03835153954399421 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.4018348623853211, "acc_stderr": 0.02102010617299701, "acc_norm": 0.4018348623853211, "acc_norm_stderr": 0.02102010617299701 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.21428571428571427, "acc_stderr": 0.03670066451047181, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.03670066451047181 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.3627450980392157, "acc_stderr": 0.027530078447110307, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.027530078447110307 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5785123966942148, "acc_stderr": 0.04507732278775087, "acc_norm": 0.5785123966942148, "acc_norm_stderr": 0.04507732278775087 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.39473684210526316, "acc_stderr": 0.039777499346220734, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.039777499346220734 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.315359477124183, "acc_stderr": 0.018798086284886897, "acc_norm": 0.315359477124183, "acc_norm_stderr": 0.018798086284886897 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2978723404255319, "acc_stderr": 0.027281608344469414, "acc_norm": 0.2978723404255319, "acc_norm_stderr": 0.027281608344469414 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.23214285714285715, "acc_stderr": 0.04007341809755808, "acc_norm": 0.23214285714285715, "acc_norm_stderr": 0.04007341809755808 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.30092592592592593, "acc_stderr": 0.031280390843298825, "acc_norm": 0.30092592592592593, "acc_norm_stderr": 0.031280390843298825 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2446927374301676, "acc_stderr": 0.014378169884098423, "acc_norm": 0.2446927374301676, "acc_norm_stderr": 0.014378169884098423 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.20220588235294118, "acc_stderr": 0.02439819298665492, "acc_norm": 0.20220588235294118, "acc_norm_stderr": 0.02439819298665492 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4775510204081633, "acc_stderr": 0.03197694118713673, "acc_norm": 0.4775510204081633, "acc_norm_stderr": 0.03197694118713673 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.4472573839662447, "acc_stderr": 0.03236564251614192, "acc_norm": 0.4472573839662447, "acc_norm_stderr": 0.03236564251614192 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3070404172099087, "acc_stderr": 0.01178095911451376, "acc_norm": 0.3070404172099087, "acc_norm_stderr": 0.01178095911451376 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.4264705882352941, "acc_stderr": 0.03471157907953426, "acc_norm": 0.4264705882352941, "acc_norm_stderr": 0.03471157907953426 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.4666666666666667, "acc_stderr": 0.038956580652718446, "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.038956580652718446 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.23255813953488372, "mc1_stderr": 0.01478915753108054, "mc2": 0.40330820076202195, "mc2_stderr": 0.01545475941668547 }, "harness|ko_commongen_v2|2": { "acc": 0.3380281690140845, "acc_stderr": 0.016215540194273195, "acc_norm": 0.3873239436619718, "acc_norm_stderr": 0.016698899332843718 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Teddysum/bllossom-1.0-13b", "model_sha": "8d117668b35c61b1d0720a244531cf8123be0bc5", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }