{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3967576791808874, "acc_stderr": 0.014296513020180647, "acc_norm": 0.4590443686006826, "acc_norm_stderr": 0.014562291073601226 }, "harness|ko_hellaswag|10": { "acc": 0.4246166102370046, "acc_stderr": 0.004932745013072719, "acc_norm": 0.571400119498108, "acc_norm_stderr": 0.004938643787869549 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.45614035087719296, "acc_stderr": 0.03820042586602966, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.03820042586602966 }, "harness|ko_mmlu_management|5": { "acc": 0.5145631067961165, "acc_stderr": 0.04948637324026637, "acc_norm": 0.5145631067961165, "acc_norm_stderr": 0.04948637324026637 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5478927203065134, "acc_stderr": 0.017797751493865636, "acc_norm": 0.5478927203065134, "acc_norm_stderr": 0.017797751493865636 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.43703703703703706, "acc_stderr": 0.04284958639753399, "acc_norm": 0.43703703703703706, "acc_norm_stderr": 0.04284958639753399 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4127659574468085, "acc_stderr": 0.03218471141400351, "acc_norm": 0.4127659574468085, "acc_norm_stderr": 0.03218471141400351 }, "harness|ko_mmlu_virology|5": { "acc": 0.42168674698795183, "acc_stderr": 0.038444531817709175, "acc_norm": 0.42168674698795183, "acc_norm_stderr": 0.038444531817709175 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4919614147909968, "acc_stderr": 0.028394421370984545, "acc_norm": 0.4919614147909968, "acc_norm_stderr": 0.028394421370984545 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.5560538116591929, "acc_stderr": 0.03334625674242728, "acc_norm": 0.5560538116591929, "acc_norm_stderr": 0.03334625674242728 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.48854961832061067, "acc_stderr": 0.04384140024078016, "acc_norm": 0.48854961832061067, "acc_norm_stderr": 0.04384140024078016 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.44, "acc_stderr": 0.0498887651569859, "acc_norm": 0.44, "acc_norm_stderr": 0.0498887651569859 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5757575757575758, "acc_stderr": 0.035212249088415866, "acc_norm": 0.5757575757575758, "acc_norm_stderr": 0.035212249088415866 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.43448275862068964, "acc_stderr": 0.04130740879555497, "acc_norm": 0.43448275862068964, "acc_norm_stderr": 0.04130740879555497 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.46218487394957986, "acc_stderr": 0.032385469487589795, "acc_norm": 0.46218487394957986, "acc_norm_stderr": 0.032385469487589795 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4128205128205128, "acc_stderr": 0.024962683564331827, "acc_norm": 0.4128205128205128, "acc_norm_stderr": 0.024962683564331827 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5, "acc_stderr": 0.04833682445228318, "acc_norm": 0.5, "acc_norm_stderr": 0.04833682445228318 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4039408866995074, "acc_stderr": 0.0345245390382204, "acc_norm": 0.4039408866995074, "acc_norm_stderr": 0.0345245390382204 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.44193548387096776, "acc_stderr": 0.02825155790684973, "acc_norm": 0.44193548387096776, "acc_norm_stderr": 0.02825155790684973 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6324786324786325, "acc_stderr": 0.031585391577456365, "acc_norm": 0.6324786324786325, "acc_norm_stderr": 0.031585391577456365 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.43018867924528303, "acc_stderr": 0.030471445867183238, "acc_norm": 0.43018867924528303, "acc_norm_stderr": 0.030471445867183238 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5272727272727272, "acc_stderr": 0.0478200179138006, "acc_norm": 0.5272727272727272, "acc_norm_stderr": 0.0478200179138006 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.026719240783712177, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.026719240783712177 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2980132450331126, "acc_stderr": 0.03734535676787198, "acc_norm": 0.2980132450331126, "acc_norm_stderr": 0.03734535676787198 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6019900497512438, "acc_stderr": 0.034611994290400135, "acc_norm": 0.6019900497512438, "acc_norm_stderr": 0.034611994290400135 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3815028901734104, "acc_stderr": 0.03703851193099521, "acc_norm": 0.3815028901734104, "acc_norm_stderr": 0.03703851193099521 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2566137566137566, "acc_stderr": 0.022494510767503154, "acc_norm": 0.2566137566137566, "acc_norm_stderr": 0.022494510767503154 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4097222222222222, "acc_stderr": 0.04112490974670787, "acc_norm": 0.4097222222222222, "acc_norm_stderr": 0.04112490974670787 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.35, "acc_stderr": 0.04793724854411021, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411021 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.4797687861271676, "acc_stderr": 0.026897049996382875, "acc_norm": 0.4797687861271676, "acc_norm_stderr": 0.026897049996382875 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.49693251533742333, "acc_stderr": 0.03928297078179662, "acc_norm": 0.49693251533742333, "acc_norm_stderr": 0.03928297078179662 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.49382716049382713, "acc_stderr": 0.027818623962583295, "acc_norm": 0.49382716049382713, "acc_norm_stderr": 0.027818623962583295 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5233160621761658, "acc_stderr": 0.03604513672442202, "acc_norm": 0.5233160621761658, "acc_norm_stderr": 0.03604513672442202 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.04266339443159395, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.04266339443159395 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5853211009174312, "acc_stderr": 0.021122903208602585, "acc_norm": 0.5853211009174312, "acc_norm_stderr": 0.021122903208602585 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3412698412698413, "acc_stderr": 0.04240799327574924, "acc_norm": 0.3412698412698413, "acc_norm_stderr": 0.04240799327574924 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.39215686274509803, "acc_stderr": 0.027956046165424516, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.027956046165424516 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6033057851239669, "acc_stderr": 0.044658697805310094, "acc_norm": 0.6033057851239669, "acc_norm_stderr": 0.044658697805310094 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.39473684210526316, "acc_stderr": 0.039777499346220734, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.039777499346220734 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.380718954248366, "acc_stderr": 0.01964380155792481, "acc_norm": 0.380718954248366, "acc_norm_stderr": 0.01964380155792481 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3333333333333333, "acc_stderr": 0.02812163604063989, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.02812163604063989 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.24107142857142858, "acc_stderr": 0.040598672469526864, "acc_norm": 0.24107142857142858, "acc_norm_stderr": 0.040598672469526864 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.0316746870682898, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.0316746870682898 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4264705882352941, "acc_stderr": 0.030042615832714857, "acc_norm": 0.4264705882352941, "acc_norm_stderr": 0.030042615832714857 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.49795918367346936, "acc_stderr": 0.0320089533497105, "acc_norm": 0.49795918367346936, "acc_norm_stderr": 0.0320089533497105 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6497890295358649, "acc_stderr": 0.031052391937584346, "acc_norm": 0.6497890295358649, "acc_norm_stderr": 0.031052391937584346 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.33376792698826596, "acc_stderr": 0.012043812655846147, "acc_norm": 0.33376792698826596, "acc_norm_stderr": 0.012043812655846147 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5049019607843137, "acc_stderr": 0.03509143375606786, "acc_norm": 0.5049019607843137, "acc_norm_stderr": 0.03509143375606786 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5212121212121212, "acc_stderr": 0.03900828913737301, "acc_norm": 0.5212121212121212, "acc_norm_stderr": 0.03900828913737301 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.28151774785801714, "mc1_stderr": 0.015744027248256055, "mc2": 0.4544067221641174, "mc2_stderr": 0.014927896908949237 }, "harness|ko_commongen_v2|2": { "acc": 0.4805194805194805, "acc_stderr": 0.01717730199234255, "acc_norm": 0.5914994096812278, "acc_norm_stderr": 0.016900062879427122 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.18", "model_sha": "be58129e9338fbdc42bfc803860d4308f835cd6e", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }