{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3643344709897611, "acc_stderr": 0.014063260279882417, "acc_norm": 0.4112627986348123, "acc_norm_stderr": 0.014379441068522084 }, "harness|ko_hellaswag|10": { "acc": 0.3732324238199562, "acc_stderr": 0.004826746160830189, "acc_norm": 0.4751045608444533, "acc_norm_stderr": 0.004983592410934169 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4853801169590643, "acc_stderr": 0.038331852752130205, "acc_norm": 0.4853801169590643, "acc_norm_stderr": 0.038331852752130205 }, "harness|ko_mmlu_management|5": { "acc": 0.5631067961165048, "acc_stderr": 0.04911147107365777, "acc_norm": 0.5631067961165048, "acc_norm_stderr": 0.04911147107365777 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.4878671775223499, "acc_stderr": 0.017874698667491355, "acc_norm": 0.4878671775223499, "acc_norm_stderr": 0.017874698667491355 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4148148148148148, "acc_stderr": 0.042561937679014075, "acc_norm": 0.4148148148148148, "acc_norm_stderr": 0.042561937679014075 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4, "acc_stderr": 0.03202563076101735, "acc_norm": 0.4, "acc_norm_stderr": 0.03202563076101735 }, "harness|ko_mmlu_virology|5": { "acc": 0.3855421686746988, "acc_stderr": 0.03789134424611548, "acc_norm": 0.3855421686746988, "acc_norm_stderr": 0.03789134424611548 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4533762057877814, "acc_stderr": 0.028274359854894245, "acc_norm": 0.4533762057877814, "acc_norm_stderr": 0.028274359854894245 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4663677130044843, "acc_stderr": 0.033481800170603065, "acc_norm": 0.4663677130044843, "acc_norm_stderr": 0.033481800170603065 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.48091603053435117, "acc_stderr": 0.04382094705550988, "acc_norm": 0.48091603053435117, "acc_norm_stderr": 0.04382094705550988 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.42, "acc_stderr": 0.04960449637488583, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488583 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5909090909090909, "acc_stderr": 0.03502975799413008, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.03502975799413008 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4413793103448276, "acc_stderr": 0.04137931034482758, "acc_norm": 0.4413793103448276, "acc_norm_stderr": 0.04137931034482758 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.16666666666666666, "acc_stderr": 0.03708284662416544, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.03708284662416544 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.4495798319327731, "acc_stderr": 0.03231293497137707, "acc_norm": 0.4495798319327731, "acc_norm_stderr": 0.03231293497137707 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4358974358974359, "acc_stderr": 0.025141801511177498, "acc_norm": 0.4358974358974359, "acc_norm_stderr": 0.025141801511177498 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5277777777777778, "acc_stderr": 0.048262172941398944, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.048262172941398944 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3645320197044335, "acc_stderr": 0.0338640574606209, "acc_norm": 0.3645320197044335, "acc_norm_stderr": 0.0338640574606209 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.47096774193548385, "acc_stderr": 0.028396016402761005, "acc_norm": 0.47096774193548385, "acc_norm_stderr": 0.028396016402761005 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6282051282051282, "acc_stderr": 0.03166098891888078, "acc_norm": 0.6282051282051282, "acc_norm_stderr": 0.03166098891888078 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4528301886792453, "acc_stderr": 0.030635627957961823, "acc_norm": 0.4528301886792453, "acc_norm_stderr": 0.030635627957961823 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.509090909090909, "acc_stderr": 0.04788339768702861, "acc_norm": 0.509090909090909, "acc_norm_stderr": 0.04788339768702861 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3111111111111111, "acc_stderr": 0.028226446749683526, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.028226446749683526 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5771144278606966, "acc_stderr": 0.034932317774212816, "acc_norm": 0.5771144278606966, "acc_norm_stderr": 0.034932317774212816 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3815028901734104, "acc_stderr": 0.03703851193099521, "acc_norm": 0.3815028901734104, "acc_norm_stderr": 0.03703851193099521 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.3253968253968254, "acc_stderr": 0.024130158299762613, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.024130158299762613 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3819444444444444, "acc_stderr": 0.040629907841466674, "acc_norm": 0.3819444444444444, "acc_norm_stderr": 0.040629907841466674 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.26, "acc_stderr": 0.04408440022768079, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5144508670520231, "acc_stderr": 0.02690784985628254, "acc_norm": 0.5144508670520231, "acc_norm_stderr": 0.02690784985628254 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.44171779141104295, "acc_stderr": 0.03901591825836184, "acc_norm": 0.44171779141104295, "acc_norm_stderr": 0.03901591825836184 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4444444444444444, "acc_stderr": 0.02764847787741332, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.02764847787741332 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5077720207253886, "acc_stderr": 0.03608003225569654, "acc_norm": 0.5077720207253886, "acc_norm_stderr": 0.03608003225569654 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.19298245614035087, "acc_stderr": 0.037124548537213684, "acc_norm": 0.19298245614035087, "acc_norm_stderr": 0.037124548537213684 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.47889908256880737, "acc_stderr": 0.021418224754264643, "acc_norm": 0.47889908256880737, "acc_norm_stderr": 0.021418224754264643 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.373015873015873, "acc_stderr": 0.04325506042017086, "acc_norm": 0.373015873015873, "acc_norm_stderr": 0.04325506042017086 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.43790849673202614, "acc_stderr": 0.028408302020332687, "acc_norm": 0.43790849673202614, "acc_norm_stderr": 0.028408302020332687 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.42, "acc_stderr": 0.04960449637488584, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488584 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6859504132231405, "acc_stderr": 0.042369647530410184, "acc_norm": 0.6859504132231405, "acc_norm_stderr": 0.042369647530410184 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.34868421052631576, "acc_stderr": 0.03878139888797609, "acc_norm": 0.34868421052631576, "acc_norm_stderr": 0.03878139888797609 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3480392156862745, "acc_stderr": 0.01927099870822398, "acc_norm": 0.3480392156862745, "acc_norm_stderr": 0.01927099870822398 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3475177304964539, "acc_stderr": 0.028406627809590947, "acc_norm": 0.3475177304964539, "acc_norm_stderr": 0.028406627809590947 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.23214285714285715, "acc_stderr": 0.040073418097558065, "acc_norm": 0.23214285714285715, "acc_norm_stderr": 0.040073418097558065 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3101851851851852, "acc_stderr": 0.031546962856566295, "acc_norm": 0.3101851851851852, "acc_norm_stderr": 0.031546962856566295 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.3106145251396648, "acc_stderr": 0.015476515438005566, "acc_norm": 0.3106145251396648, "acc_norm_stderr": 0.015476515438005566 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3235294117647059, "acc_stderr": 0.02841820861940679, "acc_norm": 0.3235294117647059, "acc_norm_stderr": 0.02841820861940679 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.5428571428571428, "acc_stderr": 0.03189141832421396, "acc_norm": 0.5428571428571428, "acc_norm_stderr": 0.03189141832421396 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5780590717299579, "acc_stderr": 0.032148146302403695, "acc_norm": 0.5780590717299579, "acc_norm_stderr": 0.032148146302403695 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.32333767926988266, "acc_stderr": 0.011946565758447202, "acc_norm": 0.32333767926988266, "acc_norm_stderr": 0.011946565758447202 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.47549019607843135, "acc_stderr": 0.035050931943487976, "acc_norm": 0.47549019607843135, "acc_norm_stderr": 0.035050931943487976 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5393939393939394, "acc_stderr": 0.03892207016552012, "acc_norm": 0.5393939393939394, "acc_norm_stderr": 0.03892207016552012 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.35006119951040393, "mc1_stderr": 0.01669794942015103, "mc2": 0.5148844380994511, "mc2_stderr": 0.015947695748354234 }, "harness|ko_commongen_v2|2": { "acc": 0.42857142857142855, "acc_stderr": 0.017014038119297473, "acc_norm": 0.44155844155844154, "acc_norm_stderr": 0.0170725258755631 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "42MARU/sitebunny-13b", "model_sha": "15c8578d2be688d6b03ed2076658865bb8752673", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }