{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.31313993174061433, "acc_stderr": 0.013552671543623506, "acc_norm": 0.36177474402730375, "acc_norm_stderr": 0.014041957945038076 }, "harness|ko_hellaswag|10": { "acc": 0.3577972515435172, "acc_stderr": 0.004783723798286501, "acc_norm": 0.4493128858793069, "acc_norm_stderr": 0.00496407587012034 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.3567251461988304, "acc_stderr": 0.03674013002860954, "acc_norm": 0.3567251461988304, "acc_norm_stderr": 0.03674013002860954 }, "harness|ko_mmlu_management|5": { "acc": 0.44660194174757284, "acc_stderr": 0.04922424153458933, "acc_norm": 0.44660194174757284, "acc_norm_stderr": 0.04922424153458933 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.41890166028097064, "acc_stderr": 0.01764320505237717, "acc_norm": 0.41890166028097064, "acc_norm_stderr": 0.01764320505237717 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.26666666666666666, "acc_stderr": 0.038201699145179055, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.038201699145179055 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.044619604333847415, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847415 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.39148936170212767, "acc_stderr": 0.031907012423268113, "acc_norm": 0.39148936170212767, "acc_norm_stderr": 0.031907012423268113 }, "harness|ko_mmlu_virology|5": { "acc": 0.35542168674698793, "acc_stderr": 0.03726214354322415, "acc_norm": 0.35542168674698793, "acc_norm_stderr": 0.03726214354322415 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.3408360128617363, "acc_stderr": 0.026920841260776155, "acc_norm": 0.3408360128617363, "acc_norm_stderr": 0.026920841260776155 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4170403587443946, "acc_stderr": 0.03309266936071721, "acc_norm": 0.4170403587443946, "acc_norm_stderr": 0.03309266936071721 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.398989898989899, "acc_stderr": 0.03488901616852731, "acc_norm": 0.398989898989899, "acc_norm_stderr": 0.03488901616852731 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.32413793103448274, "acc_stderr": 0.03900432069185553, "acc_norm": 0.32413793103448274, "acc_norm_stderr": 0.03900432069185553 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.04280105837364395, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.04280105837364395 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.39915966386554624, "acc_stderr": 0.031811100324139245, "acc_norm": 0.39915966386554624, "acc_norm_stderr": 0.031811100324139245 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.33589743589743587, "acc_stderr": 0.023946724741563976, "acc_norm": 0.33589743589743587, "acc_norm_stderr": 0.023946724741563976 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.46296296296296297, "acc_stderr": 0.04820403072760627, "acc_norm": 0.46296296296296297, "acc_norm_stderr": 0.04820403072760627 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.35467980295566504, "acc_stderr": 0.03366124489051449, "acc_norm": 0.35467980295566504, "acc_norm_stderr": 0.03366124489051449 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3548387096774194, "acc_stderr": 0.027218889773308753, "acc_norm": 0.3548387096774194, "acc_norm_stderr": 0.027218889773308753 }, "harness|ko_mmlu_marketing|5": { "acc": 0.594017094017094, "acc_stderr": 0.03217180182641087, "acc_norm": 0.594017094017094, "acc_norm_stderr": 0.03217180182641087 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.39622641509433965, "acc_stderr": 0.030102793781791194, "acc_norm": 0.39622641509433965, "acc_norm_stderr": 0.030102793781791194 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.44545454545454544, "acc_stderr": 0.047605488214603246, "acc_norm": 0.44545454545454544, "acc_norm_stderr": 0.047605488214603246 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.27037037037037037, "acc_stderr": 0.02708037281514565, "acc_norm": 0.27037037037037037, "acc_norm_stderr": 0.02708037281514565 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.25165562913907286, "acc_stderr": 0.035433042343899844, "acc_norm": 0.25165562913907286, "acc_norm_stderr": 0.035433042343899844 }, "harness|ko_mmlu_sociology|5": { "acc": 0.43283582089552236, "acc_stderr": 0.03503490923673282, "acc_norm": 0.43283582089552236, "acc_norm_stderr": 0.03503490923673282 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3352601156069364, "acc_stderr": 0.03599586301247079, "acc_norm": 0.3352601156069364, "acc_norm_stderr": 0.03599586301247079 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.30158730158730157, "acc_stderr": 0.023636975996101813, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.023636975996101813 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2847222222222222, "acc_stderr": 0.03773809990686934, "acc_norm": 0.2847222222222222, "acc_norm_stderr": 0.03773809990686934 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.3468208092485549, "acc_stderr": 0.025624723994030454, "acc_norm": 0.3468208092485549, "acc_norm_stderr": 0.025624723994030454 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.39263803680981596, "acc_stderr": 0.03836740907831027, "acc_norm": 0.39263803680981596, "acc_norm_stderr": 0.03836740907831027 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.35185185185185186, "acc_stderr": 0.026571483480719967, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.026571483480719967 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.38341968911917096, "acc_stderr": 0.03508984236295341, "acc_norm": 0.38341968911917096, "acc_norm_stderr": 0.03508984236295341 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.30701754385964913, "acc_stderr": 0.043391383225798594, "acc_norm": 0.30701754385964913, "acc_norm_stderr": 0.043391383225798594 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.3743119266055046, "acc_stderr": 0.020748959408988313, "acc_norm": 0.3743119266055046, "acc_norm_stderr": 0.020748959408988313 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.23015873015873015, "acc_stderr": 0.037649508797906066, "acc_norm": 0.23015873015873015, "acc_norm_stderr": 0.037649508797906066 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.32679738562091504, "acc_stderr": 0.02685729466328142, "acc_norm": 0.32679738562091504, "acc_norm_stderr": 0.02685729466328142 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_international_law|5": { "acc": 0.47107438016528924, "acc_stderr": 0.04556710331269498, "acc_norm": 0.47107438016528924, "acc_norm_stderr": 0.04556710331269498 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.23684210526315788, "acc_stderr": 0.03459777606810536, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.03459777606810536 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.32679738562091504, "acc_stderr": 0.018975427920507226, "acc_norm": 0.32679738562091504, "acc_norm_stderr": 0.018975427920507226 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.30141843971631205, "acc_stderr": 0.027374128882631157, "acc_norm": 0.30141843971631205, "acc_norm_stderr": 0.027374128882631157 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.3392857142857143, "acc_stderr": 0.044939490686135404, "acc_norm": 0.3392857142857143, "acc_norm_stderr": 0.044939490686135404 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3055555555555556, "acc_stderr": 0.03141554629402543, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.03141554629402543 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.23, "acc_stderr": 0.042295258468165065, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165065 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.2867647058823529, "acc_stderr": 0.02747227447323382, "acc_norm": 0.2867647058823529, "acc_norm_stderr": 0.02747227447323382 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.23673469387755103, "acc_stderr": 0.02721283588407316, "acc_norm": 0.23673469387755103, "acc_norm_stderr": 0.02721283588407316 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.39662447257383965, "acc_stderr": 0.031843998738112264, "acc_norm": 0.39662447257383965, "acc_norm_stderr": 0.031843998738112264 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2926988265971317, "acc_stderr": 0.011620949195849535, "acc_norm": 0.2926988265971317, "acc_norm_stderr": 0.011620949195849535 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.3333333333333333, "acc_stderr": 0.03308611113236435, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.03308611113236435 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.3090909090909091, "acc_stderr": 0.036085410115739666, "acc_norm": 0.3090909090909091, "acc_norm_stderr": 0.036085410115739666 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.29008567931456547, "mc1_stderr": 0.01588623687420952, "mc2": 0.45430591754746863, "mc2_stderr": 0.015765896646627423 }, "harness|ko_commongen_v2|2": { "acc": 0.3069657615112161, "acc_stderr": 0.015857588095362824, "acc_norm": 0.35182998819362454, "acc_norm_stderr": 0.01641820645121805 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "BM-K/stupid_model", "model_sha": "880d642a665380933fb7c9a975649188c51be2f8", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }