{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.46245733788395904, "acc_stderr": 0.014570144495075578, "acc_norm": 0.5204778156996587, "acc_norm_stderr": 0.014599131353035004 }, "harness|ko_hellaswag|10": { "acc": 0.4457279426409082, "acc_stderr": 0.004960299952519394, "acc_norm": 0.6199960167297351, "acc_norm_stderr": 0.004843954338451443 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5614035087719298, "acc_stderr": 0.038057975055904594, "acc_norm": 0.5614035087719298, "acc_norm_stderr": 0.038057975055904594 }, "harness|ko_mmlu_management|5": { "acc": 0.6504854368932039, "acc_stderr": 0.047211885060971716, "acc_norm": 0.6504854368932039, "acc_norm_stderr": 0.047211885060971716 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.648786717752235, "acc_stderr": 0.017069982051499427, "acc_norm": 0.648786717752235, "acc_norm_stderr": 0.017069982051499427 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4740740740740741, "acc_stderr": 0.04313531696750574, "acc_norm": 0.4740740740740741, "acc_norm_stderr": 0.04313531696750574 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.17, "acc_stderr": 0.03775251680686371, "acc_norm": 0.17, "acc_norm_stderr": 0.03775251680686371 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4808510638297872, "acc_stderr": 0.032662042990646775, "acc_norm": 0.4808510638297872, "acc_norm_stderr": 0.032662042990646775 }, "harness|ko_mmlu_virology|5": { "acc": 0.4759036144578313, "acc_stderr": 0.03887971849597264, "acc_norm": 0.4759036144578313, "acc_norm_stderr": 0.03887971849597264 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.6205787781350482, "acc_stderr": 0.02755994980234782, "acc_norm": 0.6205787781350482, "acc_norm_stderr": 0.02755994980234782 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.5650224215246636, "acc_stderr": 0.033272833702713445, "acc_norm": 0.5650224215246636, "acc_norm_stderr": 0.033272833702713445 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.6106870229007634, "acc_stderr": 0.04276486542814591, "acc_norm": 0.6106870229007634, "acc_norm_stderr": 0.04276486542814591 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.7171717171717171, "acc_stderr": 0.03208779558786753, "acc_norm": 0.7171717171717171, "acc_norm_stderr": 0.03208779558786753 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.42758620689655175, "acc_stderr": 0.04122737111370332, "acc_norm": 0.42758620689655175, "acc_norm_stderr": 0.04122737111370332 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.30392156862745096, "acc_stderr": 0.04576665403207763, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.04576665403207763 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.6176470588235294, "acc_stderr": 0.031566630992154156, "acc_norm": 0.6176470588235294, "acc_norm_stderr": 0.031566630992154156 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.5769230769230769, "acc_stderr": 0.02504919787604236, "acc_norm": 0.5769230769230769, "acc_norm_stderr": 0.02504919787604236 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.6388888888888888, "acc_stderr": 0.04643454608906275, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.04643454608906275 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3793103448275862, "acc_stderr": 0.034139638059062345, "acc_norm": 0.3793103448275862, "acc_norm_stderr": 0.034139638059062345 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.5870967741935483, "acc_stderr": 0.028009138125400377, "acc_norm": 0.5870967741935483, "acc_norm_stderr": 0.028009138125400377 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7649572649572649, "acc_stderr": 0.027778835904935427, "acc_norm": 0.7649572649572649, "acc_norm_stderr": 0.027778835904935427 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.5283018867924528, "acc_stderr": 0.0307235352490061, "acc_norm": 0.5283018867924528, "acc_norm_stderr": 0.0307235352490061 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.6, "acc_stderr": 0.0469237132203465, "acc_norm": 0.6, "acc_norm_stderr": 0.0469237132203465 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3074074074074074, "acc_stderr": 0.02813325257881563, "acc_norm": 0.3074074074074074, "acc_norm_stderr": 0.02813325257881563 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|ko_mmlu_sociology|5": { "acc": 0.7313432835820896, "acc_stderr": 0.031343283582089536, "acc_norm": 0.7313432835820896, "acc_norm_stderr": 0.031343283582089536 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.5144508670520231, "acc_stderr": 0.03810871630454764, "acc_norm": 0.5144508670520231, "acc_norm_stderr": 0.03810871630454764 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.4365079365079365, "acc_stderr": 0.025542846817400513, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.025542846817400513 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4722222222222222, "acc_stderr": 0.04174752578923185, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.04174752578923185 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.66, "acc_stderr": 0.04760952285695238, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695238 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5867052023121387, "acc_stderr": 0.026511261369409244, "acc_norm": 0.5867052023121387, "acc_norm_stderr": 0.026511261369409244 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.50920245398773, "acc_stderr": 0.03927705600787443, "acc_norm": 0.50920245398773, "acc_norm_stderr": 0.03927705600787443 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.595679012345679, "acc_stderr": 0.027306625297327684, "acc_norm": 0.595679012345679, "acc_norm_stderr": 0.027306625297327684 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.7098445595854922, "acc_stderr": 0.03275264467791516, "acc_norm": 0.7098445595854922, "acc_norm_stderr": 0.03275264467791516 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.046774730044912005, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.046774730044912005 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.6642201834862386, "acc_stderr": 0.02024808139675293, "acc_norm": 0.6642201834862386, "acc_norm_stderr": 0.02024808139675293 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.0442626668137991, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.0442626668137991 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.5816993464052288, "acc_stderr": 0.028245134024387292, "acc_norm": 0.5816993464052288, "acc_norm_stderr": 0.028245134024387292 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_international_law|5": { "acc": 0.7107438016528925, "acc_stderr": 0.041391127276354626, "acc_norm": 0.7107438016528925, "acc_norm_stderr": 0.041391127276354626 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.5197368421052632, "acc_stderr": 0.040657710025626036, "acc_norm": 0.5197368421052632, "acc_norm_stderr": 0.040657710025626036 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.511437908496732, "acc_stderr": 0.02022254151561086, "acc_norm": 0.511437908496732, "acc_norm_stderr": 0.02022254151561086 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.4078014184397163, "acc_stderr": 0.029316011776343555, "acc_norm": 0.4078014184397163, "acc_norm_stderr": 0.029316011776343555 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.4107142857142857, "acc_stderr": 0.04669510663875192, "acc_norm": 0.4107142857142857, "acc_norm_stderr": 0.04669510663875192 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.5462962962962963, "acc_stderr": 0.03395322726375797, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.03395322726375797 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.30837988826815643, "acc_stderr": 0.015445716910998879, "acc_norm": 0.30837988826815643, "acc_norm_stderr": 0.015445716910998879 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.5073529411764706, "acc_stderr": 0.030369552523902173, "acc_norm": 0.5073529411764706, "acc_norm_stderr": 0.030369552523902173 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.6, "acc_stderr": 0.031362502409358936, "acc_norm": 0.6, "acc_norm_stderr": 0.031362502409358936 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.759493670886076, "acc_stderr": 0.027820781981149678, "acc_norm": 0.759493670886076, "acc_norm_stderr": 0.027820781981149678 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.4152542372881356, "acc_stderr": 0.012585471793400664, "acc_norm": 0.4152542372881356, "acc_norm_stderr": 0.012585471793400664 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.6764705882352942, "acc_stderr": 0.03283472056108561, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.03283472056108561 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.7090909090909091, "acc_stderr": 0.03546563019624336, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.03546563019624336 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.44063647490820074, "mc1_stderr": 0.01737969755543745, "mc2": 0.6120817623581549, "mc2_stderr": 0.01578357530044301 }, "harness|ko_commongen_v2|2": { "acc": 0.5985832349468713, "acc_stderr": 0.01685290785872906, "acc_norm": 0.6127508854781583, "acc_norm_stderr": 0.01674757799164278 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "JaeyeonKang/CCK-v1.4.0-DPO", "model_sha": "2ab21e24f4b3533531d46ee202bd176dfd004a78", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }