{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3506825938566553, "acc_stderr": 0.013944635930726092, "acc_norm": 0.40273037542662116, "acc_norm_stderr": 0.014332236306790138 }, "harness|ko_hellaswag|10": { "acc": 0.37880900219079866, "acc_stderr": 0.004840990593494688, "acc_norm": 0.49830711013742285, "acc_norm_stderr": 0.004989752811173411 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.47953216374269003, "acc_stderr": 0.038316105328219316, "acc_norm": 0.47953216374269003, "acc_norm_stderr": 0.038316105328219316 }, "harness|ko_mmlu_management|5": { "acc": 0.5631067961165048, "acc_stderr": 0.049111471073657764, "acc_norm": 0.5631067961165048, "acc_norm_stderr": 0.049111471073657764 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4674329501915709, "acc_stderr": 0.017841995750520857, "acc_norm": 0.4674329501915709, "acc_norm_stderr": 0.017841995750520857 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.43703703703703706, "acc_stderr": 0.04284958639753399, "acc_norm": 0.43703703703703706, "acc_norm_stderr": 0.04284958639753399 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4297872340425532, "acc_stderr": 0.03236214467715564, "acc_norm": 0.4297872340425532, "acc_norm_stderr": 0.03236214467715564 }, "harness|ko_mmlu_virology|5": { "acc": 0.42168674698795183, "acc_stderr": 0.03844453181770917, "acc_norm": 0.42168674698795183, "acc_norm_stderr": 0.03844453181770917 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5048231511254019, "acc_stderr": 0.028396770444111298, "acc_norm": 0.5048231511254019, "acc_norm_stderr": 0.028396770444111298 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4439461883408072, "acc_stderr": 0.03334625674242728, "acc_norm": 0.4439461883408072, "acc_norm_stderr": 0.03334625674242728 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.48091603053435117, "acc_stderr": 0.04382094705550988, "acc_norm": 0.48091603053435117, "acc_norm_stderr": 0.04382094705550988 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5656565656565656, "acc_stderr": 0.03531505879359183, "acc_norm": 0.5656565656565656, "acc_norm_stderr": 0.03531505879359183 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4413793103448276, "acc_stderr": 0.04137931034482758, "acc_norm": 0.4413793103448276, "acc_norm_stderr": 0.04137931034482758 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.04280105837364396, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.04280105837364396 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.5504201680672269, "acc_stderr": 0.03231293497137707, "acc_norm": 0.5504201680672269, "acc_norm_stderr": 0.03231293497137707 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.49743589743589745, "acc_stderr": 0.025350672979412202, "acc_norm": 0.49743589743589745, "acc_norm_stderr": 0.025350672979412202 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.58, "acc_stderr": 0.04960449637488583, "acc_norm": 0.58, "acc_norm_stderr": 0.04960449637488583 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5277777777777778, "acc_stderr": 0.04826217294139894, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.04826217294139894 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.42857142857142855, "acc_stderr": 0.03481904844438803, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.03481904844438803 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.4870967741935484, "acc_stderr": 0.028434533152681848, "acc_norm": 0.4870967741935484, "acc_norm_stderr": 0.028434533152681848 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7393162393162394, "acc_stderr": 0.02876034895652341, "acc_norm": 0.7393162393162394, "acc_norm_stderr": 0.02876034895652341 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.43018867924528303, "acc_stderr": 0.030471445867183235, "acc_norm": 0.43018867924528303, "acc_norm_stderr": 0.030471445867183235 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5272727272727272, "acc_stderr": 0.04782001791380061, "acc_norm": 0.5272727272727272, "acc_norm_stderr": 0.04782001791380061 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.34444444444444444, "acc_stderr": 0.02897264888484427, "acc_norm": 0.34444444444444444, "acc_norm_stderr": 0.02897264888484427 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.24503311258278146, "acc_stderr": 0.03511807571804723, "acc_norm": 0.24503311258278146, "acc_norm_stderr": 0.03511807571804723 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6169154228855721, "acc_stderr": 0.0343751933733825, "acc_norm": 0.6169154228855721, "acc_norm_stderr": 0.0343751933733825 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.4046242774566474, "acc_stderr": 0.03742461193887249, "acc_norm": 0.4046242774566474, "acc_norm_stderr": 0.03742461193887249 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.3888888888888889, "acc_stderr": 0.025107425481137282, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.025107425481137282 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3541666666666667, "acc_stderr": 0.039994111357535424, "acc_norm": 0.3541666666666667, "acc_norm_stderr": 0.039994111357535424 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.58, "acc_stderr": 0.04960449637488584, "acc_norm": 0.58, "acc_norm_stderr": 0.04960449637488584 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5317919075144508, "acc_stderr": 0.026864624366756663, "acc_norm": 0.5317919075144508, "acc_norm_stderr": 0.026864624366756663 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.5030674846625767, "acc_stderr": 0.03928297078179663, "acc_norm": 0.5030674846625767, "acc_norm_stderr": 0.03928297078179663 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4722222222222222, "acc_stderr": 0.027777777777777804, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.027777777777777804 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5440414507772021, "acc_stderr": 0.035944137112724366, "acc_norm": 0.5440414507772021, "acc_norm_stderr": 0.035944137112724366 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.34210526315789475, "acc_stderr": 0.044629175353369376, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.044629175353369376 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.47889908256880737, "acc_stderr": 0.021418224754264643, "acc_norm": 0.47889908256880737, "acc_norm_stderr": 0.021418224754264643 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.40476190476190477, "acc_stderr": 0.043902592653775614, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.043902592653775614 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4934640522875817, "acc_stderr": 0.028627470550556054, "acc_norm": 0.4934640522875817, "acc_norm_stderr": 0.028627470550556054 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.44, "acc_stderr": 0.049888765156985884, "acc_norm": 0.44, "acc_norm_stderr": 0.049888765156985884 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6694214876033058, "acc_stderr": 0.04294340845212094, "acc_norm": 0.6694214876033058, "acc_norm_stderr": 0.04294340845212094 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.46710526315789475, "acc_stderr": 0.040601270352363966, "acc_norm": 0.46710526315789475, "acc_norm_stderr": 0.040601270352363966 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.41013071895424835, "acc_stderr": 0.019898412717635913, "acc_norm": 0.41013071895424835, "acc_norm_stderr": 0.019898412717635913 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3546099290780142, "acc_stderr": 0.02853865002887865, "acc_norm": 0.3546099290780142, "acc_norm_stderr": 0.02853865002887865 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.375, "acc_stderr": 0.04595091388086298, "acc_norm": 0.375, "acc_norm_stderr": 0.04595091388086298 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.4444444444444444, "acc_stderr": 0.03388857118502326, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.03388857118502326 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.3486033519553073, "acc_stderr": 0.015937484656687022, "acc_norm": 0.3486033519553073, "acc_norm_stderr": 0.015937484656687022 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.45955882352941174, "acc_stderr": 0.030273325077345755, "acc_norm": 0.45955882352941174, "acc_norm_stderr": 0.030273325077345755 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.563265306122449, "acc_stderr": 0.03175195237583324, "acc_norm": 0.563265306122449, "acc_norm_stderr": 0.03175195237583324 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6118143459915611, "acc_stderr": 0.03172295004332331, "acc_norm": 0.6118143459915611, "acc_norm_stderr": 0.03172295004332331 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.32985658409387225, "acc_stderr": 0.012008129938540479, "acc_norm": 0.32985658409387225, "acc_norm_stderr": 0.012008129938540479 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.45588235294117646, "acc_stderr": 0.03495624522015473, "acc_norm": 0.45588235294117646, "acc_norm_stderr": 0.03495624522015473 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.4727272727272727, "acc_stderr": 0.03898531605579419, "acc_norm": 0.4727272727272727, "acc_norm_stderr": 0.03898531605579419 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.28886168910648713, "mc1_stderr": 0.0158663464013843, "mc2": 0.48509410722375507, "mc2_stderr": 0.015448476334612172 }, "harness|ko_commongen_v2|2": { "acc": 0.5182998819362455, "acc_stderr": 0.017178836639177755, "acc_norm": 0.5631641086186541, "acc_norm_stderr": 0.017052633559856065 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "MNC-Jihun/Mistral-7B-OP-u1k-ver0.7", "model_sha": "d6e5e9f3245ff8beba92c77a0cedcfbb5eb8798f", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }