{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.2764505119453925, "acc_stderr": 0.013069662474252428, "acc_norm": 0.33532423208191126, "acc_norm_stderr": 0.013796182947785562 }, "harness|ko_hellaswag|10": { "acc": 0.33031268671579367, "acc_stderr": 0.004693644357202052, "acc_norm": 0.41147181836287594, "acc_norm_stderr": 0.004910946424771612 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4152046783625731, "acc_stderr": 0.03779275945503201, "acc_norm": 0.4152046783625731, "acc_norm_stderr": 0.03779275945503201 }, "harness|ko_mmlu_management|5": { "acc": 0.27184466019417475, "acc_stderr": 0.044052680241409216, "acc_norm": 0.27184466019417475, "acc_norm_stderr": 0.044052680241409216 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.36015325670498083, "acc_stderr": 0.017166362471369306, "acc_norm": 0.36015325670498083, "acc_norm_stderr": 0.017166362471369306 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04072314811876837, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04072314811876837 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206824, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206824 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.33191489361702126, "acc_stderr": 0.030783736757745667, "acc_norm": 0.33191489361702126, "acc_norm_stderr": 0.030783736757745667 }, "harness|ko_mmlu_virology|5": { "acc": 0.2710843373493976, "acc_stderr": 0.03460579907553026, "acc_norm": 0.2710843373493976, "acc_norm_stderr": 0.03460579907553026 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.2990353697749196, "acc_stderr": 0.02600330111788513, "acc_norm": 0.2990353697749196, "acc_norm_stderr": 0.02600330111788513 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.30493273542600896, "acc_stderr": 0.030898610882477515, "acc_norm": 0.30493273542600896, "acc_norm_stderr": 0.030898610882477515 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.32061068702290074, "acc_stderr": 0.040933292298342784, "acc_norm": 0.32061068702290074, "acc_norm_stderr": 0.040933292298342784 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.23, "acc_stderr": 0.042295258468165085, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165085 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.3333333333333333, "acc_stderr": 0.03358618145732524, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.03358618145732524 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2827586206896552, "acc_stderr": 0.03752833958003337, "acc_norm": 0.2827586206896552, "acc_norm_stderr": 0.03752833958003337 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179963, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179963 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.29831932773109243, "acc_stderr": 0.029719142876342853, "acc_norm": 0.29831932773109243, "acc_norm_stderr": 0.029719142876342853 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.23333333333333334, "acc_stderr": 0.021444547301560465, "acc_norm": 0.23333333333333334, "acc_norm_stderr": 0.021444547301560465 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.35185185185185186, "acc_stderr": 0.04616631111801713, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.04616631111801713 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3054187192118227, "acc_stderr": 0.03240661565868408, "acc_norm": 0.3054187192118227, "acc_norm_stderr": 0.03240661565868408 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.2806451612903226, "acc_stderr": 0.025560604721022895, "acc_norm": 0.2806451612903226, "acc_norm_stderr": 0.025560604721022895 }, "harness|ko_mmlu_marketing|5": { "acc": 0.4017094017094017, "acc_stderr": 0.03211693751051621, "acc_norm": 0.4017094017094017, "acc_norm_stderr": 0.03211693751051621 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.2830188679245283, "acc_stderr": 0.027724236492700897, "acc_norm": 0.2830188679245283, "acc_norm_stderr": 0.027724236492700897 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.36363636363636365, "acc_stderr": 0.04607582090719976, "acc_norm": 0.36363636363636365, "acc_norm_stderr": 0.04607582090719976 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085626, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085626 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.25165562913907286, "acc_stderr": 0.03543304234389985, "acc_norm": 0.25165562913907286, "acc_norm_stderr": 0.03543304234389985 }, "harness|ko_mmlu_sociology|5": { "acc": 0.39800995024875624, "acc_stderr": 0.034611994290400135, "acc_norm": 0.39800995024875624, "acc_norm_stderr": 0.034611994290400135 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.31213872832369943, "acc_stderr": 0.03533133389323657, "acc_norm": 0.31213872832369943, "acc_norm_stderr": 0.03533133389323657 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.24338624338624337, "acc_stderr": 0.022101128787415426, "acc_norm": 0.24338624338624337, "acc_norm_stderr": 0.022101128787415426 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2916666666666667, "acc_stderr": 0.03800968060554857, "acc_norm": 0.2916666666666667, "acc_norm_stderr": 0.03800968060554857 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.31213872832369943, "acc_stderr": 0.02494679222527231, "acc_norm": 0.31213872832369943, "acc_norm_stderr": 0.02494679222527231 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.294478527607362, "acc_stderr": 0.03581165790474082, "acc_norm": 0.294478527607362, "acc_norm_stderr": 0.03581165790474082 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.3055555555555556, "acc_stderr": 0.02563082497562135, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.02563082497562135 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.22797927461139897, "acc_stderr": 0.03027690994517826, "acc_norm": 0.22797927461139897, "acc_norm_stderr": 0.03027690994517826 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.21929824561403508, "acc_stderr": 0.03892431106518752, "acc_norm": 0.21929824561403508, "acc_norm_stderr": 0.03892431106518752 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.26422018348623855, "acc_stderr": 0.018904164171510213, "acc_norm": 0.26422018348623855, "acc_norm_stderr": 0.018904164171510213 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.23809523809523808, "acc_stderr": 0.03809523809523811, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.03809523809523811 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.30718954248366015, "acc_stderr": 0.026415601914389, "acc_norm": 0.30718954248366015, "acc_norm_stderr": 0.026415601914389 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_international_law|5": { "acc": 0.4380165289256198, "acc_stderr": 0.045291468044357915, "acc_norm": 0.4380165289256198, "acc_norm_stderr": 0.045291468044357915 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.34868421052631576, "acc_stderr": 0.0387813988879761, "acc_norm": 0.34868421052631576, "acc_norm_stderr": 0.0387813988879761 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.28921568627450983, "acc_stderr": 0.018342529845275915, "acc_norm": 0.28921568627450983, "acc_norm_stderr": 0.018342529845275915 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.25886524822695034, "acc_stderr": 0.026129572527180848, "acc_norm": 0.25886524822695034, "acc_norm_stderr": 0.026129572527180848 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.19642857142857142, "acc_stderr": 0.037709700493470166, "acc_norm": 0.19642857142857142, "acc_norm_stderr": 0.037709700493470166 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.25, "acc_stderr": 0.029531221160930918, "acc_norm": 0.25, "acc_norm_stderr": 0.029531221160930918 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.1875, "acc_stderr": 0.023709788253811766, "acc_norm": 0.1875, "acc_norm_stderr": 0.023709788253811766 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.363265306122449, "acc_stderr": 0.03078905113903081, "acc_norm": 0.363265306122449, "acc_norm_stderr": 0.03078905113903081 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2790091264667536, "acc_stderr": 0.011455208832803548, "acc_norm": 0.2790091264667536, "acc_norm_stderr": 0.011455208832803548 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.23529411764705882, "acc_stderr": 0.029771775228145628, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.029771775228145628 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.32727272727272727, "acc_stderr": 0.03663974994391244, "acc_norm": 0.32727272727272727, "acc_norm_stderr": 0.03663974994391244 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2631578947368421, "mc1_stderr": 0.015415241740237035, "mc2": 0.4373029262876568, "mc2_stderr": 0.015588306319483176 }, "harness|ko_commongen_v2|2": { "acc": 0.2632821723730815, "acc_stderr": 0.015141752199573201, "acc_norm": 0.33884297520661155, "acc_norm_stderr": 0.016272952997019124 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "cepiloth/ko-llama2-finetune-ex3", "model_sha": "013b64f9d7f8155d95fedc7a859df06ae0c4fce9", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }