{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3250853242320819, "acc_stderr": 0.013688147309729124, "acc_norm": 0.36177474402730375, "acc_norm_stderr": 0.01404195794503808 }, "harness|ko_hellaswag|10": { "acc": 0.37263493328022307, "acc_stderr": 0.004825179407757562, "acc_norm": 0.47610037841067515, "acc_norm_stderr": 0.004984077906216099 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5087719298245614, "acc_stderr": 0.038342347441649924, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.038342347441649924 }, "harness|ko_mmlu_management|5": { "acc": 0.5436893203883495, "acc_stderr": 0.049318019942204146, "acc_norm": 0.5436893203883495, "acc_norm_stderr": 0.049318019942204146 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4725415070242657, "acc_stderr": 0.017852981266633955, "acc_norm": 0.4725415070242657, "acc_norm_stderr": 0.017852981266633955 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3851851851851852, "acc_stderr": 0.042039210401562783, "acc_norm": 0.3851851851851852, "acc_norm_stderr": 0.042039210401562783 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.43829787234042555, "acc_stderr": 0.03243618636108101, "acc_norm": 0.43829787234042555, "acc_norm_stderr": 0.03243618636108101 }, "harness|ko_mmlu_virology|5": { "acc": 0.41566265060240964, "acc_stderr": 0.03836722176598052, "acc_norm": 0.41566265060240964, "acc_norm_stderr": 0.03836722176598052 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4790996784565916, "acc_stderr": 0.028373270961069414, "acc_norm": 0.4790996784565916, "acc_norm_stderr": 0.028373270961069414 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.39461883408071746, "acc_stderr": 0.03280400504755292, "acc_norm": 0.39461883408071746, "acc_norm_stderr": 0.03280400504755292 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.44274809160305345, "acc_stderr": 0.043564472026650695, "acc_norm": 0.44274809160305345, "acc_norm_stderr": 0.043564472026650695 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5555555555555556, "acc_stderr": 0.035402943770953675, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.035402943770953675 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.47586206896551725, "acc_stderr": 0.04161808503501528, "acc_norm": 0.47586206896551725, "acc_norm_stderr": 0.04161808503501528 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.2549019607843137, "acc_stderr": 0.043364327079931785, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.043364327079931785 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.5084033613445378, "acc_stderr": 0.03247390276569669, "acc_norm": 0.5084033613445378, "acc_norm_stderr": 0.03247390276569669 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.46153846153846156, "acc_stderr": 0.025275892070240627, "acc_norm": 0.46153846153846156, "acc_norm_stderr": 0.025275892070240627 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.49074074074074076, "acc_stderr": 0.04832853553437055, "acc_norm": 0.49074074074074076, "acc_norm_stderr": 0.04832853553437055 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4482758620689655, "acc_stderr": 0.034991131376767445, "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.034991131376767445 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.47419354838709676, "acc_stderr": 0.028406095057653315, "acc_norm": 0.47419354838709676, "acc_norm_stderr": 0.028406095057653315 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6581196581196581, "acc_stderr": 0.031075028526507755, "acc_norm": 0.6581196581196581, "acc_norm_stderr": 0.031075028526507755 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4528301886792453, "acc_stderr": 0.030635627957961823, "acc_norm": 0.4528301886792453, "acc_norm_stderr": 0.030635627957961823 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5454545454545454, "acc_stderr": 0.04769300568972744, "acc_norm": 0.5454545454545454, "acc_norm_stderr": 0.04769300568972744 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.28888888888888886, "acc_stderr": 0.027634907264178544, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.027634907264178544 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526733, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526733 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6069651741293532, "acc_stderr": 0.0345368246603156, "acc_norm": 0.6069651741293532, "acc_norm_stderr": 0.0345368246603156 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3815028901734104, "acc_stderr": 0.03703851193099521, "acc_norm": 0.3815028901734104, "acc_norm_stderr": 0.03703851193099521 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.35978835978835977, "acc_stderr": 0.024718075944129277, "acc_norm": 0.35978835978835977, "acc_norm_stderr": 0.024718075944129277 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3263888888888889, "acc_stderr": 0.03921067198982266, "acc_norm": 0.3263888888888889, "acc_norm_stderr": 0.03921067198982266 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.43641618497109824, "acc_stderr": 0.026700545424943677, "acc_norm": 0.43641618497109824, "acc_norm_stderr": 0.026700545424943677 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4171779141104294, "acc_stderr": 0.038741028598180814, "acc_norm": 0.4171779141104294, "acc_norm_stderr": 0.038741028598180814 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4567901234567901, "acc_stderr": 0.02771666165019404, "acc_norm": 0.4567901234567901, "acc_norm_stderr": 0.02771666165019404 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.44041450777202074, "acc_stderr": 0.03582724530036094, "acc_norm": 0.44041450777202074, "acc_norm_stderr": 0.03582724530036094 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.3157894736842105, "acc_stderr": 0.043727482902780085, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.043727482902780085 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.47706422018348627, "acc_stderr": 0.021414757058175506, "acc_norm": 0.47706422018348627, "acc_norm_stderr": 0.021414757058175506 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.04190596438871136, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.04190596438871136 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.46405228758169936, "acc_stderr": 0.02855582751652879, "acc_norm": 0.46405228758169936, "acc_norm_stderr": 0.02855582751652879 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6033057851239669, "acc_stderr": 0.044658697805310094, "acc_norm": 0.6033057851239669, "acc_norm_stderr": 0.044658697805310094 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.4605263157894737, "acc_stderr": 0.04056242252249033, "acc_norm": 0.4605263157894737, "acc_norm_stderr": 0.04056242252249033 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.38235294117647056, "acc_stderr": 0.01965992249362335, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.01965992249362335 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.35106382978723405, "acc_stderr": 0.028473501272963768, "acc_norm": 0.35106382978723405, "acc_norm_stderr": 0.028473501272963768 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.03372343271653063, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.03372343271653063 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.26927374301675977, "acc_stderr": 0.014835616582882611, "acc_norm": 0.26927374301675977, "acc_norm_stderr": 0.014835616582882611 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001974, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001974 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4264705882352941, "acc_stderr": 0.030042615832714864, "acc_norm": 0.4264705882352941, "acc_norm_stderr": 0.030042615832714864 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.5142857142857142, "acc_stderr": 0.03199615232806286, "acc_norm": 0.5142857142857142, "acc_norm_stderr": 0.03199615232806286 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.459915611814346, "acc_stderr": 0.03244246810187913, "acc_norm": 0.459915611814346, "acc_norm_stderr": 0.03244246810187913 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.31421121251629724, "acc_stderr": 0.011855911587048228, "acc_norm": 0.31421121251629724, "acc_norm_stderr": 0.011855911587048228 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.3627450980392157, "acc_stderr": 0.03374499356319354, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.03374499356319354 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.3939393939393939, "acc_stderr": 0.03815494308688929, "acc_norm": 0.3939393939393939, "acc_norm_stderr": 0.03815494308688929 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.31701346389228885, "mc1_stderr": 0.016289203374403396, "mc2": 0.48915259522870574, "mc2_stderr": 0.015572578945104198 }, "harness|ko_commongen_v2|2": { "acc": 0.2664319248826291, "acc_stderr": 0.01515474253336583, "acc_norm": 0.3532863849765258, "acc_norm_stderr": 0.016385310378526204 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "mncai/mistral-ko-f-1871-ep1", "model_sha": "1ab1ccefadb9c3e832b4d2018cf0220974f998b3", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }