{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.7073378839590444, "acc_stderr": 0.013295916103619425, "acc_norm": 0.7457337883959044, "acc_norm_stderr": 0.01272499994515774 }, "harness|ko_hellaswag|10": { "acc": 0.5908185620394344, "acc_stderr": 0.004906779523192669, "acc_norm": 0.7459669388568014, "acc_norm_stderr": 0.004344266179634919 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.7309941520467836, "acc_stderr": 0.0340105262010409, "acc_norm": 0.7309941520467836, "acc_norm_stderr": 0.0340105262010409 }, "harness|ko_mmlu_management|5": { "acc": 0.7281553398058253, "acc_stderr": 0.044052680241409216, "acc_norm": 0.7281553398058253, "acc_norm_stderr": 0.044052680241409216 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.698595146871009, "acc_stderr": 0.016409091097268787, "acc_norm": 0.698595146871009, "acc_norm_stderr": 0.016409091097268787 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.48148148148148145, "acc_stderr": 0.043163785995113245, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.043163785995113245 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4978723404255319, "acc_stderr": 0.03268572658667491, "acc_norm": 0.4978723404255319, "acc_norm_stderr": 0.03268572658667491 }, "harness|ko_mmlu_virology|5": { "acc": 0.4879518072289157, "acc_stderr": 0.0389136449583582, "acc_norm": 0.4879518072289157, "acc_norm_stderr": 0.0389136449583582 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.6495176848874598, "acc_stderr": 0.027098652621301747, "acc_norm": 0.6495176848874598, "acc_norm_stderr": 0.027098652621301747 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.6457399103139013, "acc_stderr": 0.032100621541349864, "acc_norm": 0.6457399103139013, "acc_norm_stderr": 0.032100621541349864 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.6030534351145038, "acc_stderr": 0.04291135671009224, "acc_norm": 0.6030534351145038, "acc_norm_stderr": 0.04291135671009224 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586815, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586815 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370332, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370332 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.3137254901960784, "acc_stderr": 0.04617034827006717, "acc_norm": 0.3137254901960784, "acc_norm_stderr": 0.04617034827006717 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.6302521008403361, "acc_stderr": 0.03135709599613591, "acc_norm": 0.6302521008403361, "acc_norm_stderr": 0.03135709599613591 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.6410256410256411, "acc_stderr": 0.024321738484602354, "acc_norm": 0.6410256410256411, "acc_norm_stderr": 0.024321738484602354 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.6574074074074074, "acc_stderr": 0.04587904741301812, "acc_norm": 0.6574074074074074, "acc_norm_stderr": 0.04587904741301812 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4729064039408867, "acc_stderr": 0.03512819077876106, "acc_norm": 0.4729064039408867, "acc_norm_stderr": 0.03512819077876106 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.6161290322580645, "acc_stderr": 0.027666182075539638, "acc_norm": 0.6161290322580645, "acc_norm_stderr": 0.027666182075539638 }, "harness|ko_mmlu_marketing|5": { "acc": 0.8418803418803419, "acc_stderr": 0.023902325549560417, "acc_norm": 0.8418803418803419, "acc_norm_stderr": 0.023902325549560417 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.5660377358490566, "acc_stderr": 0.030503292013342596, "acc_norm": 0.5660377358490566, "acc_norm_stderr": 0.030503292013342596 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.6272727272727273, "acc_stderr": 0.04631381319425465, "acc_norm": 0.6272727272727273, "acc_norm_stderr": 0.04631381319425465 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3851851851851852, "acc_stderr": 0.029670906124630882, "acc_norm": 0.3851851851851852, "acc_norm_stderr": 0.029670906124630882 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.03958027231121569, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.03958027231121569 }, "harness|ko_mmlu_sociology|5": { "acc": 0.7611940298507462, "acc_stderr": 0.030147775935409217, "acc_norm": 0.7611940298507462, "acc_norm_stderr": 0.030147775935409217 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.5317919075144508, "acc_stderr": 0.03804749744364764, "acc_norm": 0.5317919075144508, "acc_norm_stderr": 0.03804749744364764 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.4576719576719577, "acc_stderr": 0.02565886886205832, "acc_norm": 0.4576719576719577, "acc_norm_stderr": 0.02565886886205832 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.625, "acc_stderr": 0.04048439222695598, "acc_norm": 0.625, "acc_norm_stderr": 0.04048439222695598 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.78, "acc_stderr": 0.04163331998932263, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932263 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5982658959537572, "acc_stderr": 0.026394104177643634, "acc_norm": 0.5982658959537572, "acc_norm_stderr": 0.026394104177643634 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.6196319018404908, "acc_stderr": 0.038142698932618374, "acc_norm": 0.6196319018404908, "acc_norm_stderr": 0.038142698932618374 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.6759259259259259, "acc_stderr": 0.02604176620271716, "acc_norm": 0.6759259259259259, "acc_norm_stderr": 0.02604176620271716 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.7564766839378239, "acc_stderr": 0.030975436386845426, "acc_norm": 0.7564766839378239, "acc_norm_stderr": 0.030975436386845426 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.4649122807017544, "acc_stderr": 0.046920083813689104, "acc_norm": 0.4649122807017544, "acc_norm_stderr": 0.046920083813689104 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.744954128440367, "acc_stderr": 0.018688500856535863, "acc_norm": 0.744954128440367, "acc_norm_stderr": 0.018688500856535863 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.6209150326797386, "acc_stderr": 0.027780141207023344, "acc_norm": 0.6209150326797386, "acc_norm_stderr": 0.027780141207023344 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.67, "acc_stderr": 0.04725815626252607, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252607 }, "harness|ko_mmlu_international_law|5": { "acc": 0.7520661157024794, "acc_stderr": 0.039418975265163025, "acc_norm": 0.7520661157024794, "acc_norm_stderr": 0.039418975265163025 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.6644736842105263, "acc_stderr": 0.038424985593952674, "acc_norm": 0.6644736842105263, "acc_norm_stderr": 0.038424985593952674 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.5931372549019608, "acc_stderr": 0.019873802005061173, "acc_norm": 0.5931372549019608, "acc_norm_stderr": 0.019873802005061173 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.42907801418439717, "acc_stderr": 0.029525914302558562, "acc_norm": 0.42907801418439717, "acc_norm_stderr": 0.029525914302558562 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010213, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010213 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.5, "acc_stderr": 0.034099716973523674, "acc_norm": 0.5, "acc_norm_stderr": 0.034099716973523674 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.37206703910614525, "acc_stderr": 0.016165847583563292, "acc_norm": 0.37206703910614525, "acc_norm_stderr": 0.016165847583563292 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.66, "acc_stderr": 0.04760952285695238, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695238 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.5367647058823529, "acc_stderr": 0.03029061918048569, "acc_norm": 0.5367647058823529, "acc_norm_stderr": 0.03029061918048569 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.710204081632653, "acc_stderr": 0.02904308868330434, "acc_norm": 0.710204081632653, "acc_norm_stderr": 0.02904308868330434 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.7383966244725738, "acc_stderr": 0.028609516716994934, "acc_norm": 0.7383966244725738, "acc_norm_stderr": 0.028609516716994934 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.45045632333767927, "acc_stderr": 0.012707390438502348, "acc_norm": 0.45045632333767927, "acc_norm_stderr": 0.012707390438502348 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.6813725490196079, "acc_stderr": 0.03270287181482082, "acc_norm": 0.6813725490196079, "acc_norm_stderr": 0.03270287181482082 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.6181818181818182, "acc_stderr": 0.03793713171165634, "acc_norm": 0.6181818181818182, "acc_norm_stderr": 0.03793713171165634 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.7172582619339045, "mc1_stderr": 0.015764770836777298, "mc2": 0.8192245102794692, "mc2_stderr": 0.012628467709646754 }, "harness|ko_commongen_v2|2": { "acc": 0.5147579693034239, "acc_stderr": 0.017182864434998564, "acc_norm": 0.5289256198347108, "acc_norm_stderr": 0.017161563949916348 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "ENERGY-DRINK-LOVE/nox_DPOv3", "model_sha": "b83a769f764d3060a516c3ecbedad554a1922e46", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }