{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3387372013651877, "acc_stderr": 0.013830568927974332, "acc_norm": 0.4035836177474403, "acc_norm_stderr": 0.01433715891426845 }, "harness|ko_hellaswag|10": { "acc": 0.3811989643497311, "acc_stderr": 0.00484688692976347, "acc_norm": 0.4954192391953794, "acc_norm_stderr": 0.004989572002196691 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.49707602339181284, "acc_stderr": 0.03834759370936839, "acc_norm": 0.49707602339181284, "acc_norm_stderr": 0.03834759370936839 }, "harness|ko_mmlu_management|5": { "acc": 0.5825242718446602, "acc_stderr": 0.048828405482122375, "acc_norm": 0.5825242718446602, "acc_norm_stderr": 0.048828405482122375 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.508301404853129, "acc_stderr": 0.017877498991072, "acc_norm": 0.508301404853129, "acc_norm_stderr": 0.017877498991072 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.31851851851851853, "acc_stderr": 0.040247784019771124, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.040247784019771124 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.37872340425531914, "acc_stderr": 0.03170995606040655, "acc_norm": 0.37872340425531914, "acc_norm_stderr": 0.03170995606040655 }, "harness|ko_mmlu_virology|5": { "acc": 0.43373493975903615, "acc_stderr": 0.03858158940685516, "acc_norm": 0.43373493975903615, "acc_norm_stderr": 0.03858158940685516 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.45980707395498394, "acc_stderr": 0.028306190403305696, "acc_norm": 0.45980707395498394, "acc_norm_stderr": 0.028306190403305696 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.48878923766816146, "acc_stderr": 0.033549366530984746, "acc_norm": 0.48878923766816146, "acc_norm_stderr": 0.033549366530984746 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4351145038167939, "acc_stderr": 0.04348208051644858, "acc_norm": 0.4351145038167939, "acc_norm_stderr": 0.04348208051644858 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5151515151515151, "acc_stderr": 0.03560716516531061, "acc_norm": 0.5151515151515151, "acc_norm_stderr": 0.03560716516531061 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4689655172413793, "acc_stderr": 0.04158632762097828, "acc_norm": 0.4689655172413793, "acc_norm_stderr": 0.04158632762097828 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.2549019607843137, "acc_stderr": 0.043364327079931785, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.043364327079931785 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.4327731092436975, "acc_stderr": 0.03218358107742613, "acc_norm": 0.4327731092436975, "acc_norm_stderr": 0.03218358107742613 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4230769230769231, "acc_stderr": 0.025049197876042335, "acc_norm": 0.4230769230769231, "acc_norm_stderr": 0.025049197876042335 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5185185185185185, "acc_stderr": 0.04830366024635331, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.04830366024635331 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4187192118226601, "acc_stderr": 0.03471192860518468, "acc_norm": 0.4187192118226601, "acc_norm_stderr": 0.03471192860518468 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.43870967741935485, "acc_stderr": 0.028229497320317216, "acc_norm": 0.43870967741935485, "acc_norm_stderr": 0.028229497320317216 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6923076923076923, "acc_stderr": 0.030236389942173078, "acc_norm": 0.6923076923076923, "acc_norm_stderr": 0.030236389942173078 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.44528301886792454, "acc_stderr": 0.030588052974270655, "acc_norm": 0.44528301886792454, "acc_norm_stderr": 0.030588052974270655 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5, "acc_stderr": 0.04789131426105757, "acc_norm": 0.5, "acc_norm_stderr": 0.04789131426105757 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3111111111111111, "acc_stderr": 0.028226446749683522, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.028226446749683522 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2847682119205298, "acc_stderr": 0.03684881521389023, "acc_norm": 0.2847682119205298, "acc_norm_stderr": 0.03684881521389023 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6069651741293532, "acc_stderr": 0.0345368246603156, "acc_norm": 0.6069651741293532, "acc_norm_stderr": 0.0345368246603156 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3872832369942196, "acc_stderr": 0.03714325906302064, "acc_norm": 0.3872832369942196, "acc_norm_stderr": 0.03714325906302064 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.373015873015873, "acc_stderr": 0.02490699045899257, "acc_norm": 0.373015873015873, "acc_norm_stderr": 0.02490699045899257 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3888888888888889, "acc_stderr": 0.04076663253918567, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.04076663253918567 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5115606936416185, "acc_stderr": 0.026911898686377913, "acc_norm": 0.5115606936416185, "acc_norm_stderr": 0.026911898686377913 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.43558282208588955, "acc_stderr": 0.03895632464138937, "acc_norm": 0.43558282208588955, "acc_norm_stderr": 0.03895632464138937 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4660493827160494, "acc_stderr": 0.027756535257347663, "acc_norm": 0.4660493827160494, "acc_norm_stderr": 0.027756535257347663 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5544041450777202, "acc_stderr": 0.03587014986075659, "acc_norm": 0.5544041450777202, "acc_norm_stderr": 0.03587014986075659 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.32456140350877194, "acc_stderr": 0.04404556157374768, "acc_norm": 0.32456140350877194, "acc_norm_stderr": 0.04404556157374768 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5045871559633027, "acc_stderr": 0.02143642095552942, "acc_norm": 0.5045871559633027, "acc_norm_stderr": 0.02143642095552942 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.30158730158730157, "acc_stderr": 0.04104947269903394, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.04104947269903394 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.45751633986928103, "acc_stderr": 0.02852638345214264, "acc_norm": 0.45751633986928103, "acc_norm_stderr": 0.02852638345214264 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6694214876033058, "acc_stderr": 0.04294340845212094, "acc_norm": 0.6694214876033058, "acc_norm_stderr": 0.04294340845212094 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.40789473684210525, "acc_stderr": 0.03999309712777473, "acc_norm": 0.40789473684210525, "acc_norm_stderr": 0.03999309712777473 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3790849673202614, "acc_stderr": 0.019627444748412236, "acc_norm": 0.3790849673202614, "acc_norm_stderr": 0.019627444748412236 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.35106382978723405, "acc_stderr": 0.02847350127296378, "acc_norm": 0.35106382978723405, "acc_norm_stderr": 0.02847350127296378 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.38392857142857145, "acc_stderr": 0.04616143075028547, "acc_norm": 0.38392857142857145, "acc_norm_stderr": 0.04616143075028547 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.35648148148148145, "acc_stderr": 0.032664783315272714, "acc_norm": 0.35648148148148145, "acc_norm_stderr": 0.032664783315272714 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.21899441340782122, "acc_stderr": 0.013831676687303205, "acc_norm": 0.21899441340782122, "acc_norm_stderr": 0.013831676687303205 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3492647058823529, "acc_stderr": 0.028959755196824873, "acc_norm": 0.3492647058823529, "acc_norm_stderr": 0.028959755196824873 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.39591836734693875, "acc_stderr": 0.03130802899065686, "acc_norm": 0.39591836734693875, "acc_norm_stderr": 0.03130802899065686 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6075949367088608, "acc_stderr": 0.0317847187456473, "acc_norm": 0.6075949367088608, "acc_norm_stderr": 0.0317847187456473 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.33833116036505867, "acc_stderr": 0.012084265626344204, "acc_norm": 0.33833116036505867, "acc_norm_stderr": 0.012084265626344204 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.47058823529411764, "acc_stderr": 0.03503235296367992, "acc_norm": 0.47058823529411764, "acc_norm_stderr": 0.03503235296367992 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5151515151515151, "acc_stderr": 0.03902551007374448, "acc_norm": 0.5151515151515151, "acc_norm_stderr": 0.03902551007374448 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2876376988984088, "mc1_stderr": 0.015846315101394812, "mc2": 0.4533712341088757, "mc2_stderr": 0.015449105919584536 }, "harness|ko_commongen_v2|2": { "acc": 0.3825265643447462, "acc_stderr": 0.016709165387228806, "acc_norm": 0.49586776859504134, "acc_norm_stderr": 0.017189767032130824 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "BM-K/mistral-7b-it-v1.0.2", "model_sha": "06668a57b990007d15d178c94aabd162d6af9531", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }