{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.34897610921501704, "acc_stderr": 0.013928933461382494, "acc_norm": 0.3984641638225256, "acc_norm_stderr": 0.014306946052735567 }, "harness|ko_hellaswag|10": { "acc": 0.37502489543915557, "acc_stderr": 0.004831399218500244, "acc_norm": 0.47849034056960765, "acc_norm_stderr": 0.00498516207433611 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5146198830409356, "acc_stderr": 0.038331852752130254, "acc_norm": 0.5146198830409356, "acc_norm_stderr": 0.038331852752130254 }, "harness|ko_mmlu_management|5": { "acc": 0.5631067961165048, "acc_stderr": 0.04911147107365777, "acc_norm": 0.5631067961165048, "acc_norm_stderr": 0.04911147107365777 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.45977011494252873, "acc_stderr": 0.01782199409693353, "acc_norm": 0.45977011494252873, "acc_norm_stderr": 0.01782199409693353 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.45185185185185184, "acc_stderr": 0.042992689054808624, "acc_norm": 0.45185185185185184, "acc_norm_stderr": 0.042992689054808624 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.39574468085106385, "acc_stderr": 0.031967586978353627, "acc_norm": 0.39574468085106385, "acc_norm_stderr": 0.031967586978353627 }, "harness|ko_mmlu_virology|5": { "acc": 0.4578313253012048, "acc_stderr": 0.03878626771002361, "acc_norm": 0.4578313253012048, "acc_norm_stderr": 0.03878626771002361 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.45016077170418006, "acc_stderr": 0.028256660723360184, "acc_norm": 0.45016077170418006, "acc_norm_stderr": 0.028256660723360184 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.3991031390134529, "acc_stderr": 0.03286745312567961, "acc_norm": 0.3991031390134529, "acc_norm_stderr": 0.03286745312567961 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4732824427480916, "acc_stderr": 0.04379024936553894, "acc_norm": 0.4732824427480916, "acc_norm_stderr": 0.04379024936553894 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5303030303030303, "acc_stderr": 0.03555804051763929, "acc_norm": 0.5303030303030303, "acc_norm_stderr": 0.03555804051763929 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.46206896551724136, "acc_stderr": 0.04154659671707546, "acc_norm": 0.46206896551724136, "acc_norm_stderr": 0.04154659671707546 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.04158307533083286, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.04158307533083286 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.4579831932773109, "acc_stderr": 0.03236361111951941, "acc_norm": 0.4579831932773109, "acc_norm_stderr": 0.03236361111951941 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4025641025641026, "acc_stderr": 0.02486499515976776, "acc_norm": 0.4025641025641026, "acc_norm_stderr": 0.02486499515976776 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4722222222222222, "acc_stderr": 0.04826217294139894, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.04826217294139894 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.41379310344827586, "acc_stderr": 0.03465304488406796, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.03465304488406796 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.45806451612903226, "acc_stderr": 0.028343787250540636, "acc_norm": 0.45806451612903226, "acc_norm_stderr": 0.028343787250540636 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7008547008547008, "acc_stderr": 0.02999695185834948, "acc_norm": 0.7008547008547008, "acc_norm_stderr": 0.02999695185834948 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4490566037735849, "acc_stderr": 0.030612730713641095, "acc_norm": 0.4490566037735849, "acc_norm_stderr": 0.030612730713641095 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5363636363636364, "acc_stderr": 0.04776449162396197, "acc_norm": 0.5363636363636364, "acc_norm_stderr": 0.04776449162396197 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3111111111111111, "acc_stderr": 0.028226446749683515, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.028226446749683515 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.24503311258278146, "acc_stderr": 0.03511807571804724, "acc_norm": 0.24503311258278146, "acc_norm_stderr": 0.03511807571804724 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5422885572139303, "acc_stderr": 0.035228658640995975, "acc_norm": 0.5422885572139303, "acc_norm_stderr": 0.035228658640995975 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3699421965317919, "acc_stderr": 0.03681229633394319, "acc_norm": 0.3699421965317919, "acc_norm_stderr": 0.03681229633394319 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.34656084656084657, "acc_stderr": 0.024508777521028424, "acc_norm": 0.34656084656084657, "acc_norm_stderr": 0.024508777521028424 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2638888888888889, "acc_stderr": 0.03685651095897532, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.03685651095897532 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.476878612716763, "acc_stderr": 0.026890297881303125, "acc_norm": 0.476878612716763, "acc_norm_stderr": 0.026890297881303125 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4171779141104294, "acc_stderr": 0.038741028598180814, "acc_norm": 0.4171779141104294, "acc_norm_stderr": 0.038741028598180814 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.44135802469135804, "acc_stderr": 0.027628737155668777, "acc_norm": 0.44135802469135804, "acc_norm_stderr": 0.027628737155668777 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.04793724854411021, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411021 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5025906735751295, "acc_stderr": 0.03608390745384487, "acc_norm": 0.5025906735751295, "acc_norm_stderr": 0.03608390745384487 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2982456140350877, "acc_stderr": 0.043036840335373173, "acc_norm": 0.2982456140350877, "acc_norm_stderr": 0.043036840335373173 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.46055045871559636, "acc_stderr": 0.0213704946099951, "acc_norm": 0.46055045871559636, "acc_norm_stderr": 0.0213704946099951 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.35714285714285715, "acc_stderr": 0.04285714285714281, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.04285714285714281 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4869281045751634, "acc_stderr": 0.028620130800700246, "acc_norm": 0.4869281045751634, "acc_norm_stderr": 0.028620130800700246 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6198347107438017, "acc_stderr": 0.044313245019684304, "acc_norm": 0.6198347107438017, "acc_norm_stderr": 0.044313245019684304 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3815789473684211, "acc_stderr": 0.03953173377749194, "acc_norm": 0.3815789473684211, "acc_norm_stderr": 0.03953173377749194 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3660130718954248, "acc_stderr": 0.01948802574552967, "acc_norm": 0.3660130718954248, "acc_norm_stderr": 0.01948802574552967 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.33687943262411346, "acc_stderr": 0.02819553487396673, "acc_norm": 0.33687943262411346, "acc_norm_stderr": 0.02819553487396673 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.35714285714285715, "acc_stderr": 0.04547960999764376, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.04547960999764376 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.33796296296296297, "acc_stderr": 0.03225941352631295, "acc_norm": 0.33796296296296297, "acc_norm_stderr": 0.03225941352631295 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2569832402234637, "acc_stderr": 0.014614465821966346, "acc_norm": 0.2569832402234637, "acc_norm_stderr": 0.014614465821966346 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.33455882352941174, "acc_stderr": 0.028661996202335307, "acc_norm": 0.33455882352941174, "acc_norm_stderr": 0.028661996202335307 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.5102040816326531, "acc_stderr": 0.03200255347893783, "acc_norm": 0.5102040816326531, "acc_norm_stderr": 0.03200255347893783 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5316455696202531, "acc_stderr": 0.032481974005110756, "acc_norm": 0.5316455696202531, "acc_norm_stderr": 0.032481974005110756 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.29921773142112124, "acc_stderr": 0.011695374630696047, "acc_norm": 0.29921773142112124, "acc_norm_stderr": 0.011695374630696047 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.45588235294117646, "acc_stderr": 0.03495624522015474, "acc_norm": 0.45588235294117646, "acc_norm_stderr": 0.03495624522015474 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.4484848484848485, "acc_stderr": 0.038835659779569286, "acc_norm": 0.4484848484848485, "acc_norm_stderr": 0.038835659779569286 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2802937576499388, "mc1_stderr": 0.015723139524608742, "mc2": 0.47047609010515296, "mc2_stderr": 0.016013828931677482 }, "harness|ko_commongen_v2|2": { "acc": 0.4025974025974026, "acc_stderr": 0.016861020486407776, "acc_norm": 0.42384887839433294, "acc_norm_stderr": 0.016989810834628253 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "MNC-Jihun/Mistral-11B-Omni-OP-u1k-ver0.5", "model_sha": "8c58d63d92483624ec8b73e6b3ba93338d1abf86", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }