{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.4872013651877133, "acc_stderr": 0.014606603181012546, "acc_norm": 0.5273037542662116, "acc_norm_stderr": 0.014589589101985994 }, "harness|ko_hellaswag|10": { "acc": 0.4403505277833101, "acc_stderr": 0.004954146286513353, "acc_norm": 0.55646285600478, "acc_norm_stderr": 0.004957863944093124 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5321637426900585, "acc_stderr": 0.03826882417660368, "acc_norm": 0.5321637426900585, "acc_norm_stderr": 0.03826882417660368 }, "harness|ko_mmlu_management|5": { "acc": 0.6116504854368932, "acc_stderr": 0.04825729337356389, "acc_norm": 0.6116504854368932, "acc_norm_stderr": 0.04825729337356389 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4929757343550447, "acc_stderr": 0.01787819900343221, "acc_norm": 0.4929757343550447, "acc_norm_stderr": 0.01787819900343221 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.34074074074074073, "acc_stderr": 0.040943762699967946, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.040943762699967946 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.42127659574468085, "acc_stderr": 0.03227834510146268, "acc_norm": 0.42127659574468085, "acc_norm_stderr": 0.03227834510146268 }, "harness|ko_mmlu_virology|5": { "acc": 0.3855421686746988, "acc_stderr": 0.037891344246115496, "acc_norm": 0.3855421686746988, "acc_norm_stderr": 0.037891344246115496 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4437299035369775, "acc_stderr": 0.02821768355665231, "acc_norm": 0.4437299035369775, "acc_norm_stderr": 0.02821768355665231 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.43946188340807174, "acc_stderr": 0.03331092511038179, "acc_norm": 0.43946188340807174, "acc_norm_stderr": 0.03331092511038179 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.40458015267175573, "acc_stderr": 0.043046937953806645, "acc_norm": 0.40458015267175573, "acc_norm_stderr": 0.043046937953806645 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5353535353535354, "acc_stderr": 0.03553436368828063, "acc_norm": 0.5353535353535354, "acc_norm_stderr": 0.03553436368828063 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3793103448275862, "acc_stderr": 0.04043461861916747, "acc_norm": 0.3793103448275862, "acc_norm_stderr": 0.04043461861916747 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171453, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171453 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.46218487394957986, "acc_stderr": 0.032385469487589795, "acc_norm": 0.46218487394957986, "acc_norm_stderr": 0.032385469487589795 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.3974358974358974, "acc_stderr": 0.024811920017903836, "acc_norm": 0.3974358974358974, "acc_norm_stderr": 0.024811920017903836 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4444444444444444, "acc_stderr": 0.04803752235190193, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.04803752235190193 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.35960591133004927, "acc_stderr": 0.03376458246509567, "acc_norm": 0.35960591133004927, "acc_norm_stderr": 0.03376458246509567 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.44193548387096776, "acc_stderr": 0.02825155790684974, "acc_norm": 0.44193548387096776, "acc_norm_stderr": 0.02825155790684974 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6837606837606838, "acc_stderr": 0.03046365674734025, "acc_norm": 0.6837606837606838, "acc_norm_stderr": 0.03046365674734025 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4188679245283019, "acc_stderr": 0.03036505082911522, "acc_norm": 0.4188679245283019, "acc_norm_stderr": 0.03036505082911522 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.4727272727272727, "acc_stderr": 0.04782001791380063, "acc_norm": 0.4727272727272727, "acc_norm_stderr": 0.04782001791380063 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2851851851851852, "acc_stderr": 0.027528599210340492, "acc_norm": 0.2851851851851852, "acc_norm_stderr": 0.027528599210340492 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6069651741293532, "acc_stderr": 0.0345368246603156, "acc_norm": 0.6069651741293532, "acc_norm_stderr": 0.0345368246603156 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3468208092485549, "acc_stderr": 0.036291466701596636, "acc_norm": 0.3468208092485549, "acc_norm_stderr": 0.036291466701596636 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.4021164021164021, "acc_stderr": 0.02525303255499768, "acc_norm": 0.4021164021164021, "acc_norm_stderr": 0.02525303255499768 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.375, "acc_stderr": 0.04048439222695598, "acc_norm": 0.375, "acc_norm_stderr": 0.04048439222695598 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.4624277456647399, "acc_stderr": 0.026842985519615375, "acc_norm": 0.4624277456647399, "acc_norm_stderr": 0.026842985519615375 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.48466257668711654, "acc_stderr": 0.03926522378708843, "acc_norm": 0.48466257668711654, "acc_norm_stderr": 0.03926522378708843 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.43209876543209874, "acc_stderr": 0.027563010971606676, "acc_norm": 0.43209876543209874, "acc_norm_stderr": 0.027563010971606676 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5077720207253886, "acc_stderr": 0.03608003225569654, "acc_norm": 0.5077720207253886, "acc_norm_stderr": 0.03608003225569654 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.3157894736842105, "acc_stderr": 0.04372748290278007, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.04372748290278007 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5045871559633027, "acc_stderr": 0.02143642095552942, "acc_norm": 0.5045871559633027, "acc_norm_stderr": 0.02143642095552942 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.043062412591271526, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.043062412591271526 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.43790849673202614, "acc_stderr": 0.02840830202033269, "acc_norm": 0.43790849673202614, "acc_norm_stderr": 0.02840830202033269 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5289256198347108, "acc_stderr": 0.04556710331269498, "acc_norm": 0.5289256198347108, "acc_norm_stderr": 0.04556710331269498 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3684210526315789, "acc_stderr": 0.03925523381052932, "acc_norm": 0.3684210526315789, "acc_norm_stderr": 0.03925523381052932 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.38235294117647056, "acc_stderr": 0.01965992249362334, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.01965992249362334 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.32269503546099293, "acc_stderr": 0.027889139300534795, "acc_norm": 0.32269503546099293, "acc_norm_stderr": 0.027889139300534795 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.39285714285714285, "acc_stderr": 0.04635550135609976, "acc_norm": 0.39285714285714285, "acc_norm_stderr": 0.04635550135609976 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.2916666666666667, "acc_stderr": 0.03099866630456053, "acc_norm": 0.2916666666666667, "acc_norm_stderr": 0.03099866630456053 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.20335195530726258, "acc_stderr": 0.013461351487507506, "acc_norm": 0.20335195530726258, "acc_norm_stderr": 0.013461351487507506 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3492647058823529, "acc_stderr": 0.02895975519682487, "acc_norm": 0.3492647058823529, "acc_norm_stderr": 0.02895975519682487 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4897959183673469, "acc_stderr": 0.03200255347893782, "acc_norm": 0.4897959183673469, "acc_norm_stderr": 0.03200255347893782 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5569620253164557, "acc_stderr": 0.03233532777533484, "acc_norm": 0.5569620253164557, "acc_norm_stderr": 0.03233532777533484 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3109517601043025, "acc_stderr": 0.0118222529177992, "acc_norm": 0.3109517601043025, "acc_norm_stderr": 0.0118222529177992 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.4068627450980392, "acc_stderr": 0.03447891136353382, "acc_norm": 0.4068627450980392, "acc_norm_stderr": 0.03447891136353382 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.4303030303030303, "acc_stderr": 0.03866225962879077, "acc_norm": 0.4303030303030303, "acc_norm_stderr": 0.03866225962879077 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2974296205630355, "mc1_stderr": 0.016002651487361016, "mc2": 0.4499453306291458, "mc2_stderr": 0.016369397422184195 }, "harness|ko_commongen_v2|2": { "acc": 0.3187721369539551, "acc_stderr": 0.016021427055309578, "acc_norm": 0.3270365997638725, "acc_norm_stderr": 0.016129047485457022 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "DopeorNope/mistralopithecus-v1-dpo-7b", "model_sha": "ec7557bb2a4fbbb775d057f98f98ae6b4430c8d5", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }