{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.40784982935153585, "acc_stderr": 0.01436109728844971, "acc_norm": 0.47013651877133106, "acc_norm_stderr": 0.014585305840007105 }, "harness|ko_hellaswag|10": { "acc": 0.40728938458474406, "acc_stderr": 0.00490325426417762, "acc_norm": 0.5451105357498506, "acc_norm_stderr": 0.004969431900874312 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5263157894736842, "acc_stderr": 0.03829509868994727, "acc_norm": 0.5263157894736842, "acc_norm_stderr": 0.03829509868994727 }, "harness|ko_mmlu_management|5": { "acc": 0.6213592233009708, "acc_stderr": 0.04802694698258975, "acc_norm": 0.6213592233009708, "acc_norm_stderr": 0.04802694698258975 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5223499361430396, "acc_stderr": 0.017862091778507852, "acc_norm": 0.5223499361430396, "acc_norm_stderr": 0.017862091778507852 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4666666666666667, "acc_stderr": 0.043097329010363554, "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.043097329010363554 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3617021276595745, "acc_stderr": 0.0314108219759624, "acc_norm": 0.3617021276595745, "acc_norm_stderr": 0.0314108219759624 }, "harness|ko_mmlu_virology|5": { "acc": 0.39759036144578314, "acc_stderr": 0.038099730845402184, "acc_norm": 0.39759036144578314, "acc_norm_stderr": 0.038099730845402184 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4983922829581994, "acc_stderr": 0.02839794490780661, "acc_norm": 0.4983922829581994, "acc_norm_stderr": 0.02839794490780661 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4125560538116592, "acc_stderr": 0.03304062175449297, "acc_norm": 0.4125560538116592, "acc_norm_stderr": 0.03304062175449297 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.48854961832061067, "acc_stderr": 0.043841400240780176, "acc_norm": 0.48854961832061067, "acc_norm_stderr": 0.043841400240780176 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5404040404040404, "acc_stderr": 0.035507024651313425, "acc_norm": 0.5404040404040404, "acc_norm_stderr": 0.035507024651313425 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4482758620689655, "acc_stderr": 0.04144311810878152, "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.04144311810878152 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.04158307533083286, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.04158307533083286 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.46218487394957986, "acc_stderr": 0.032385469487589795, "acc_norm": 0.46218487394957986, "acc_norm_stderr": 0.032385469487589795 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.44358974358974357, "acc_stderr": 0.02518914989476419, "acc_norm": 0.44358974358974357, "acc_norm_stderr": 0.02518914989476419 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5462962962962963, "acc_stderr": 0.04812917324536823, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.04812917324536823 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3891625615763547, "acc_stderr": 0.034304624161038716, "acc_norm": 0.3891625615763547, "acc_norm_stderr": 0.034304624161038716 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.4774193548387097, "acc_stderr": 0.028414985019707868, "acc_norm": 0.4774193548387097, "acc_norm_stderr": 0.028414985019707868 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6666666666666666, "acc_stderr": 0.030882736974138653, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.030882736974138653 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.47547169811320755, "acc_stderr": 0.030735822206205615, "acc_norm": 0.47547169811320755, "acc_norm_stderr": 0.030735822206205615 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.4818181818181818, "acc_stderr": 0.04785964010794916, "acc_norm": 0.4818181818181818, "acc_norm_stderr": 0.04785964010794916 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.24814814814814815, "acc_stderr": 0.0263357394040558, "acc_norm": 0.24814814814814815, "acc_norm_stderr": 0.0263357394040558 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.03802039760107903, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.03802039760107903 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6019900497512438, "acc_stderr": 0.034611994290400135, "acc_norm": 0.6019900497512438, "acc_norm_stderr": 0.034611994290400135 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.43352601156069365, "acc_stderr": 0.03778621079092055, "acc_norm": 0.43352601156069365, "acc_norm_stderr": 0.03778621079092055 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.31746031746031744, "acc_stderr": 0.02397386199899208, "acc_norm": 0.31746031746031744, "acc_norm_stderr": 0.02397386199899208 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3402777777777778, "acc_stderr": 0.039621355734862175, "acc_norm": 0.3402777777777778, "acc_norm_stderr": 0.039621355734862175 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.67, "acc_stderr": 0.04725815626252605, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252605 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5086705202312138, "acc_stderr": 0.0269150473553698, "acc_norm": 0.5086705202312138, "acc_norm_stderr": 0.0269150473553698 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4662576687116564, "acc_stderr": 0.039194155450484096, "acc_norm": 0.4662576687116564, "acc_norm_stderr": 0.039194155450484096 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.47530864197530864, "acc_stderr": 0.02778680093142745, "acc_norm": 0.47530864197530864, "acc_norm_stderr": 0.02778680093142745 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5284974093264249, "acc_stderr": 0.03602573571288441, "acc_norm": 0.5284974093264249, "acc_norm_stderr": 0.03602573571288441 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.21052631578947367, "acc_stderr": 0.038351539543994194, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.038351539543994194 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5321100917431193, "acc_stderr": 0.021393071222680814, "acc_norm": 0.5321100917431193, "acc_norm_stderr": 0.021393071222680814 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.0404061017820884, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.0404061017820884 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4477124183006536, "acc_stderr": 0.02847293847803353, "acc_norm": 0.4477124183006536, "acc_norm_stderr": 0.02847293847803353 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6859504132231405, "acc_stderr": 0.042369647530410184, "acc_norm": 0.6859504132231405, "acc_norm_stderr": 0.042369647530410184 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.4473684210526316, "acc_stderr": 0.040463368839782514, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.040463368839782514 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3366013071895425, "acc_stderr": 0.01911721391149515, "acc_norm": 0.3366013071895425, "acc_norm_stderr": 0.01911721391149515 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.35106382978723405, "acc_stderr": 0.02847350127296377, "acc_norm": 0.35106382978723405, "acc_norm_stderr": 0.02847350127296377 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.22321428571428573, "acc_stderr": 0.039523019677025116, "acc_norm": 0.22321428571428573, "acc_norm_stderr": 0.039523019677025116 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.03167468706828979, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.03167468706828979 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.34191176470588236, "acc_stderr": 0.028814722422254174, "acc_norm": 0.34191176470588236, "acc_norm_stderr": 0.028814722422254174 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.42448979591836733, "acc_stderr": 0.031642094879429414, "acc_norm": 0.42448979591836733, "acc_norm_stderr": 0.031642094879429414 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5780590717299579, "acc_stderr": 0.032148146302403695, "acc_norm": 0.5780590717299579, "acc_norm_stderr": 0.032148146302403695 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.31290743155149936, "acc_stderr": 0.011842529823063, "acc_norm": 0.31290743155149936, "acc_norm_stderr": 0.011842529823063 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.47549019607843135, "acc_stderr": 0.035050931943487976, "acc_norm": 0.47549019607843135, "acc_norm_stderr": 0.035050931943487976 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5212121212121212, "acc_stderr": 0.03900828913737302, "acc_norm": 0.5212121212121212, "acc_norm_stderr": 0.03900828913737302 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2717258261933905, "mc1_stderr": 0.015572840452875833, "mc2": 0.4439993647512429, "mc2_stderr": 0.014990045797851265 }, "harness|ko_commongen_v2|2": { "acc": 0.4167650531286895, "acc_stderr": 0.01695048914610883, "acc_norm": 0.4817001180637544, "acc_norm_stderr": 0.017178836639177738 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "amphora/olaf-l.0.1", "model_sha": "1fe9598f2ec7fe35ce77e773ef35b97b893b11d0", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }