{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3310580204778157, "acc_stderr": 0.013752062419817836, "acc_norm": 0.3848122866894198, "acc_norm_stderr": 0.014218371065251117 }, "harness|ko_hellaswag|10": { "acc": 0.3650667197769369, "acc_stderr": 0.0048046491971637005, "acc_norm": 0.4547898824935272, "acc_norm_stderr": 0.004969341773423513 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4444444444444444, "acc_stderr": 0.03811079669833531, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.03811079669833531 }, "harness|ko_mmlu_management|5": { "acc": 0.49514563106796117, "acc_stderr": 0.049505043821289195, "acc_norm": 0.49514563106796117, "acc_norm_stderr": 0.049505043821289195 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4342273307790549, "acc_stderr": 0.017724589389677785, "acc_norm": 0.4342273307790549, "acc_norm_stderr": 0.017724589389677785 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3111111111111111, "acc_stderr": 0.039992628766177214, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.039992628766177214 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.35319148936170214, "acc_stderr": 0.03124532520276193, "acc_norm": 0.35319148936170214, "acc_norm_stderr": 0.03124532520276193 }, "harness|ko_mmlu_virology|5": { "acc": 0.3614457831325301, "acc_stderr": 0.0374005938202932, "acc_norm": 0.3614457831325301, "acc_norm_stderr": 0.0374005938202932 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.38263665594855306, "acc_stderr": 0.027604689028581993, "acc_norm": 0.38263665594855306, "acc_norm_stderr": 0.027604689028581993 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.42152466367713004, "acc_stderr": 0.033141902221106564, "acc_norm": 0.42152466367713004, "acc_norm_stderr": 0.033141902221106564 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.37404580152671757, "acc_stderr": 0.042438692422305246, "acc_norm": 0.37404580152671757, "acc_norm_stderr": 0.042438692422305246 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.41919191919191917, "acc_stderr": 0.035155207286704175, "acc_norm": 0.41919191919191917, "acc_norm_stderr": 0.035155207286704175 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.35172413793103446, "acc_stderr": 0.0397923663749741, "acc_norm": 0.35172413793103446, "acc_norm_stderr": 0.0397923663749741 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179961, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179961 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.39915966386554624, "acc_stderr": 0.03181110032413926, "acc_norm": 0.39915966386554624, "acc_norm_stderr": 0.03181110032413926 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.40512820512820513, "acc_stderr": 0.02489047176993815, "acc_norm": 0.40512820512820513, "acc_norm_stderr": 0.02489047176993815 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4722222222222222, "acc_stderr": 0.04826217294139894, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.04826217294139894 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.35960591133004927, "acc_stderr": 0.03376458246509568, "acc_norm": 0.35960591133004927, "acc_norm_stderr": 0.03376458246509568 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.42258064516129035, "acc_stderr": 0.02810096472427264, "acc_norm": 0.42258064516129035, "acc_norm_stderr": 0.02810096472427264 }, "harness|ko_mmlu_marketing|5": { "acc": 0.5897435897435898, "acc_stderr": 0.03222414045241107, "acc_norm": 0.5897435897435898, "acc_norm_stderr": 0.03222414045241107 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4679245283018868, "acc_stderr": 0.030709486992556545, "acc_norm": 0.4679245283018868, "acc_norm_stderr": 0.030709486992556545 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.39090909090909093, "acc_stderr": 0.04673752333670237, "acc_norm": 0.39090909090909093, "acc_norm_stderr": 0.04673752333670237 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.026719240783712166, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.026719240783712166 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.23841059602649006, "acc_stderr": 0.03479185572599661, "acc_norm": 0.23841059602649006, "acc_norm_stderr": 0.03479185572599661 }, "harness|ko_mmlu_sociology|5": { "acc": 0.4626865671641791, "acc_stderr": 0.03525675167467974, "acc_norm": 0.4626865671641791, "acc_norm_stderr": 0.03525675167467974 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.36416184971098264, "acc_stderr": 0.03669072477416906, "acc_norm": 0.36416184971098264, "acc_norm_stderr": 0.03669072477416906 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.30158730158730157, "acc_stderr": 0.023636975996101806, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.023636975996101806 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2847222222222222, "acc_stderr": 0.037738099906869334, "acc_norm": 0.2847222222222222, "acc_norm_stderr": 0.037738099906869334 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.4161849710982659, "acc_stderr": 0.026538189104705477, "acc_norm": 0.4161849710982659, "acc_norm_stderr": 0.026538189104705477 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4110429447852761, "acc_stderr": 0.038656978537853624, "acc_norm": 0.4110429447852761, "acc_norm_stderr": 0.038656978537853624 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.3549382716049383, "acc_stderr": 0.02662415247884585, "acc_norm": 0.3549382716049383, "acc_norm_stderr": 0.02662415247884585 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.44559585492227977, "acc_stderr": 0.03587014986075659, "acc_norm": 0.44559585492227977, "acc_norm_stderr": 0.03587014986075659 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.04096985139843671, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.04096985139843671 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.45688073394495415, "acc_stderr": 0.021357458785226203, "acc_norm": 0.45688073394495415, "acc_norm_stderr": 0.021357458785226203 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.0404061017820884, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.0404061017820884 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.3660130718954248, "acc_stderr": 0.0275828114151596, "acc_norm": 0.3660130718954248, "acc_norm_stderr": 0.0275828114151596 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_international_law|5": { "acc": 0.5371900826446281, "acc_stderr": 0.04551711196104218, "acc_norm": 0.5371900826446281, "acc_norm_stderr": 0.04551711196104218 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.3157894736842105, "acc_stderr": 0.0378272898086547, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.0378272898086547 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.31862745098039214, "acc_stderr": 0.018850084696468705, "acc_norm": 0.31862745098039214, "acc_norm_stderr": 0.018850084696468705 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2907801418439716, "acc_stderr": 0.027090664368353178, "acc_norm": 0.2907801418439716, "acc_norm_stderr": 0.027090664368353178 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.38392857142857145, "acc_stderr": 0.04616143075028547, "acc_norm": 0.38392857142857145, "acc_norm_stderr": 0.04616143075028547 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.35648148148148145, "acc_stderr": 0.032664783315272714, "acc_norm": 0.35648148148148145, "acc_norm_stderr": 0.032664783315272714 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24134078212290502, "acc_stderr": 0.014310999547961459, "acc_norm": 0.24134078212290502, "acc_norm_stderr": 0.014310999547961459 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3860294117647059, "acc_stderr": 0.029573269134411124, "acc_norm": 0.3860294117647059, "acc_norm_stderr": 0.029573269134411124 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.37142857142857144, "acc_stderr": 0.03093285879278985, "acc_norm": 0.37142857142857144, "acc_norm_stderr": 0.03093285879278985 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5316455696202531, "acc_stderr": 0.032481974005110756, "acc_norm": 0.5316455696202531, "acc_norm_stderr": 0.032481974005110756 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3116036505867014, "acc_stderr": 0.011829039182849648, "acc_norm": 0.3116036505867014, "acc_norm_stderr": 0.011829039182849648 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.39705882352941174, "acc_stderr": 0.03434131164719128, "acc_norm": 0.39705882352941174, "acc_norm_stderr": 0.03434131164719128 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.3575757575757576, "acc_stderr": 0.037425970438065864, "acc_norm": 0.3575757575757576, "acc_norm_stderr": 0.037425970438065864 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2998776009791922, "mc1_stderr": 0.016040352966713606, "mc2": 0.4749868563072917, "mc2_stderr": 0.015742730178250185 }, "harness|ko_commongen_v2|2": { "acc": 0.3789846517119244, "acc_stderr": 0.016679260684229286, "acc_norm": 0.48406139315230223, "acc_norm_stderr": 0.017181617837190195 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "LI-ST/Mistral-7B-ko-v0.004", "model_sha": "7f80f3cf2ad264fe73a1934824845e9aa7aa2451", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }