{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.378839590443686, "acc_stderr": 0.014175915490000324, "acc_norm": 0.4300341296928328, "acc_norm_stderr": 0.014467631559137998 }, "harness|ko_hellaswag|10": { "acc": 0.4086835291774547, "acc_stderr": 0.004905859114942308, "acc_norm": 0.5462059350726947, "acc_norm_stderr": 0.0049684294763450345 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5146198830409356, "acc_stderr": 0.038331852752130254, "acc_norm": 0.5146198830409356, "acc_norm_stderr": 0.038331852752130254 }, "harness|ko_mmlu_management|5": { "acc": 0.46601941747572817, "acc_stderr": 0.0493929144727348, "acc_norm": 0.46601941747572817, "acc_norm_stderr": 0.0493929144727348 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.49808429118773945, "acc_stderr": 0.017879832259026677, "acc_norm": 0.49808429118773945, "acc_norm_stderr": 0.017879832259026677 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.362962962962963, "acc_stderr": 0.041539484047424, "acc_norm": 0.362962962962963, "acc_norm_stderr": 0.041539484047424 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3617021276595745, "acc_stderr": 0.03141082197596239, "acc_norm": 0.3617021276595745, "acc_norm_stderr": 0.03141082197596239 }, "harness|ko_mmlu_virology|5": { "acc": 0.4397590361445783, "acc_stderr": 0.03864139923699122, "acc_norm": 0.4397590361445783, "acc_norm_stderr": 0.03864139923699122 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4694533762057878, "acc_stderr": 0.02834504586484068, "acc_norm": 0.4694533762057878, "acc_norm_stderr": 0.02834504586484068 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.3632286995515695, "acc_stderr": 0.03227790442850499, "acc_norm": 0.3632286995515695, "acc_norm_stderr": 0.03227790442850499 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4732824427480916, "acc_stderr": 0.04379024936553894, "acc_norm": 0.4732824427480916, "acc_norm_stderr": 0.04379024936553894 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5202020202020202, "acc_stderr": 0.03559443565563918, "acc_norm": 0.5202020202020202, "acc_norm_stderr": 0.03559443565563918 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.41379310344827586, "acc_stderr": 0.04104269211806232, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.04104269211806232 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.27450980392156865, "acc_stderr": 0.04440521906179327, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.04440521906179327 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.37815126050420167, "acc_stderr": 0.031499305777849054, "acc_norm": 0.37815126050420167, "acc_norm_stderr": 0.031499305777849054 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.36153846153846153, "acc_stderr": 0.024359581465396955, "acc_norm": 0.36153846153846153, "acc_norm_stderr": 0.024359581465396955 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.39814814814814814, "acc_stderr": 0.04732332615978813, "acc_norm": 0.39814814814814814, "acc_norm_stderr": 0.04732332615978813 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.39408866995073893, "acc_stderr": 0.034381579670365446, "acc_norm": 0.39408866995073893, "acc_norm_stderr": 0.034381579670365446 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.45806451612903226, "acc_stderr": 0.02834378725054064, "acc_norm": 0.45806451612903226, "acc_norm_stderr": 0.02834378725054064 }, "harness|ko_mmlu_marketing|5": { "acc": 0.5854700854700855, "acc_stderr": 0.03227396567623779, "acc_norm": 0.5854700854700855, "acc_norm_stderr": 0.03227396567623779 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4377358490566038, "acc_stderr": 0.030533338430467512, "acc_norm": 0.4377358490566038, "acc_norm_stderr": 0.030533338430467512 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.4636363636363636, "acc_stderr": 0.04776449162396197, "acc_norm": 0.4636363636363636, "acc_norm_stderr": 0.04776449162396197 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.026962424325073828, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.026962424325073828 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2913907284768212, "acc_stderr": 0.037101857261199946, "acc_norm": 0.2913907284768212, "acc_norm_stderr": 0.037101857261199946 }, "harness|ko_mmlu_sociology|5": { "acc": 0.572139303482587, "acc_stderr": 0.03498541988407795, "acc_norm": 0.572139303482587, "acc_norm_stderr": 0.03498541988407795 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.35260115606936415, "acc_stderr": 0.036430371689585496, "acc_norm": 0.35260115606936415, "acc_norm_stderr": 0.036430371689585496 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.29894179894179895, "acc_stderr": 0.023577604791655795, "acc_norm": 0.29894179894179895, "acc_norm_stderr": 0.023577604791655795 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3194444444444444, "acc_stderr": 0.03899073687357335, "acc_norm": 0.3194444444444444, "acc_norm_stderr": 0.03899073687357335 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.42196531791907516, "acc_stderr": 0.02658923114217426, "acc_norm": 0.42196531791907516, "acc_norm_stderr": 0.02658923114217426 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3496932515337423, "acc_stderr": 0.03746668325470021, "acc_norm": 0.3496932515337423, "acc_norm_stderr": 0.03746668325470021 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.44135802469135804, "acc_stderr": 0.027628737155668777, "acc_norm": 0.44135802469135804, "acc_norm_stderr": 0.027628737155668777 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.42487046632124353, "acc_stderr": 0.0356747133521254, "acc_norm": 0.42487046632124353, "acc_norm_stderr": 0.0356747133521254 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.041857744240220575, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.041857744240220575 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.46788990825688076, "acc_stderr": 0.021393071222680814, "acc_norm": 0.46788990825688076, "acc_norm_stderr": 0.021393071222680814 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2698412698412698, "acc_stderr": 0.03970158273235173, "acc_norm": 0.2698412698412698, "acc_norm_stderr": 0.03970158273235173 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4019607843137255, "acc_stderr": 0.02807415894760066, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.02807415894760066 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5289256198347108, "acc_stderr": 0.04556710331269498, "acc_norm": 0.5289256198347108, "acc_norm_stderr": 0.04556710331269498 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.39473684210526316, "acc_stderr": 0.03977749934622074, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.03977749934622074 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.31862745098039214, "acc_stderr": 0.01885008469646872, "acc_norm": 0.31862745098039214, "acc_norm_stderr": 0.01885008469646872 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3120567375886525, "acc_stderr": 0.027640120545169945, "acc_norm": 0.3120567375886525, "acc_norm_stderr": 0.027640120545169945 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.21428571428571427, "acc_stderr": 0.038946411200447915, "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.038946411200447915 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.28703703703703703, "acc_stderr": 0.03085199299325701, "acc_norm": 0.28703703703703703, "acc_norm_stderr": 0.03085199299325701 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.22058823529411764, "acc_stderr": 0.025187786660227276, "acc_norm": 0.22058823529411764, "acc_norm_stderr": 0.025187786660227276 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.43673469387755104, "acc_stderr": 0.031751952375833226, "acc_norm": 0.43673469387755104, "acc_norm_stderr": 0.031751952375833226 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.4978902953586498, "acc_stderr": 0.032546938018020076, "acc_norm": 0.4978902953586498, "acc_norm_stderr": 0.032546938018020076 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.29595827900912647, "acc_stderr": 0.011658518525277039, "acc_norm": 0.29595827900912647, "acc_norm_stderr": 0.011658518525277039 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.4068627450980392, "acc_stderr": 0.03447891136353382, "acc_norm": 0.4068627450980392, "acc_norm_stderr": 0.03447891136353382 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.4727272727272727, "acc_stderr": 0.03898531605579419, "acc_norm": 0.4727272727272727, "acc_norm_stderr": 0.03898531605579419 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2582619339045288, "mc1_stderr": 0.0153218216884762, "mc2": 0.4217472836360241, "mc2_stderr": 0.014796357378387609 }, "harness|ko_commongen_v2|2": { "acc": 0.4769775678866588, "acc_stderr": 0.017172121546727637, "acc_norm": 0.5655253837072018, "acc_norm_stderr": 0.017042098620824928 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "OMK510/omk_mixed2", "model_sha": "8fb8a29ecba1b69a023885fcf11ea223b491bbac", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }