{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.378839590443686, "acc_stderr": 0.014175915490000322, "acc_norm": 0.4513651877133106, "acc_norm_stderr": 0.014542104569955264 }, "harness|ko_hellaswag|10": { "acc": 0.433877713602868, "acc_stderr": 0.004945956744943811, "acc_norm": 0.5880302728540131, "acc_norm_stderr": 0.004911837730582209 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.6608187134502924, "acc_stderr": 0.03631053496488904, "acc_norm": 0.6608187134502924, "acc_norm_stderr": 0.03631053496488904 }, "harness|ko_mmlu_management|5": { "acc": 0.6310679611650486, "acc_stderr": 0.0477761518115674, "acc_norm": 0.6310679611650486, "acc_norm_stderr": 0.0477761518115674 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.6526181353767561, "acc_stderr": 0.01702667174865574, "acc_norm": 0.6526181353767561, "acc_norm_stderr": 0.01702667174865574 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4148148148148148, "acc_stderr": 0.04256193767901407, "acc_norm": 0.4148148148148148, "acc_norm_stderr": 0.04256193767901407 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.41702127659574467, "acc_stderr": 0.03223276266711712, "acc_norm": 0.41702127659574467, "acc_norm_stderr": 0.03223276266711712 }, "harness|ko_mmlu_virology|5": { "acc": 0.40963855421686746, "acc_stderr": 0.03828401115079022, "acc_norm": 0.40963855421686746, "acc_norm_stderr": 0.03828401115079022 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4790996784565916, "acc_stderr": 0.028373270961069414, "acc_norm": 0.4790996784565916, "acc_norm_stderr": 0.028373270961069414 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.5426008968609866, "acc_stderr": 0.033435777055830646, "acc_norm": 0.5426008968609866, "acc_norm_stderr": 0.033435777055830646 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.5343511450381679, "acc_stderr": 0.04374928560599738, "acc_norm": 0.5343511450381679, "acc_norm_stderr": 0.04374928560599738 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5656565656565656, "acc_stderr": 0.03531505879359182, "acc_norm": 0.5656565656565656, "acc_norm_stderr": 0.03531505879359182 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.47586206896551725, "acc_stderr": 0.041618085035015295, "acc_norm": 0.47586206896551725, "acc_norm_stderr": 0.041618085035015295 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237655, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237655 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.46218487394957986, "acc_stderr": 0.032385469487589795, "acc_norm": 0.46218487394957986, "acc_norm_stderr": 0.032385469487589795 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4641025641025641, "acc_stderr": 0.025285585990017834, "acc_norm": 0.4641025641025641, "acc_norm_stderr": 0.025285585990017834 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5370370370370371, "acc_stderr": 0.04820403072760627, "acc_norm": 0.5370370370370371, "acc_norm_stderr": 0.04820403072760627 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.33497536945812806, "acc_stderr": 0.0332085274234831, "acc_norm": 0.33497536945812806, "acc_norm_stderr": 0.0332085274234831 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.5387096774193548, "acc_stderr": 0.028358634859836935, "acc_norm": 0.5387096774193548, "acc_norm_stderr": 0.028358634859836935 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7222222222222222, "acc_stderr": 0.029343114798094462, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.029343114798094462 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4867924528301887, "acc_stderr": 0.030762134874500476, "acc_norm": 0.4867924528301887, "acc_norm_stderr": 0.030762134874500476 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5272727272727272, "acc_stderr": 0.04782001791380061, "acc_norm": 0.5272727272727272, "acc_norm_stderr": 0.04782001791380061 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2851851851851852, "acc_stderr": 0.027528599210340492, "acc_norm": 0.2851851851851852, "acc_norm_stderr": 0.027528599210340492 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.26490066225165565, "acc_stderr": 0.03603038545360384, "acc_norm": 0.26490066225165565, "acc_norm_stderr": 0.03603038545360384 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6318407960199005, "acc_stderr": 0.03410410565495302, "acc_norm": 0.6318407960199005, "acc_norm_stderr": 0.03410410565495302 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.47398843930635837, "acc_stderr": 0.03807301726504513, "acc_norm": 0.47398843930635837, "acc_norm_stderr": 0.03807301726504513 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.328042328042328, "acc_stderr": 0.02418049716437691, "acc_norm": 0.328042328042328, "acc_norm_stderr": 0.02418049716437691 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4097222222222222, "acc_stderr": 0.04112490974670788, "acc_norm": 0.4097222222222222, "acc_norm_stderr": 0.04112490974670788 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.4797687861271676, "acc_stderr": 0.026897049996382868, "acc_norm": 0.4797687861271676, "acc_norm_stderr": 0.026897049996382868 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.5276073619631901, "acc_stderr": 0.0392237829061099, "acc_norm": 0.5276073619631901, "acc_norm_stderr": 0.0392237829061099 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.5, "acc_stderr": 0.02782074420373286, "acc_norm": 0.5, "acc_norm_stderr": 0.02782074420373286 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.6217616580310881, "acc_stderr": 0.03499807276193339, "acc_norm": 0.6217616580310881, "acc_norm_stderr": 0.03499807276193339 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022057, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022057 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.6238532110091743, "acc_stderr": 0.02076923196820508, "acc_norm": 0.6238532110091743, "acc_norm_stderr": 0.02076923196820508 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3492063492063492, "acc_stderr": 0.04263906892795133, "acc_norm": 0.3492063492063492, "acc_norm_stderr": 0.04263906892795133 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4869281045751634, "acc_stderr": 0.028620130800700246, "acc_norm": 0.4869281045751634, "acc_norm_stderr": 0.028620130800700246 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6528925619834711, "acc_stderr": 0.043457245702925335, "acc_norm": 0.6528925619834711, "acc_norm_stderr": 0.043457245702925335 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.5, "acc_stderr": 0.04068942293855797, "acc_norm": 0.5, "acc_norm_stderr": 0.04068942293855797 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.4166666666666667, "acc_stderr": 0.019944914136873583, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.019944914136873583 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3333333333333333, "acc_stderr": 0.02812163604063988, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.02812163604063988 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.3482142857142857, "acc_stderr": 0.04521829902833586, "acc_norm": 0.3482142857142857, "acc_norm_stderr": 0.04521829902833586 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.03114144782353604, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.03114144782353604 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.33519553072625696, "acc_stderr": 0.01578800719018589, "acc_norm": 0.33519553072625696, "acc_norm_stderr": 0.01578800719018589 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4411764705882353, "acc_stderr": 0.030161911930767105, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.030161911930767105 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.46122448979591835, "acc_stderr": 0.03191282052669277, "acc_norm": 0.46122448979591835, "acc_norm_stderr": 0.03191282052669277 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6624472573839663, "acc_stderr": 0.030781549102026223, "acc_norm": 0.6624472573839663, "acc_norm_stderr": 0.030781549102026223 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.31681877444589307, "acc_stderr": 0.011882349954722997, "acc_norm": 0.31681877444589307, "acc_norm_stderr": 0.011882349954722997 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.24019607843137256, "acc_stderr": 0.02998373305591361, "acc_norm": 0.24019607843137256, "acc_norm_stderr": 0.02998373305591361 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.2545454545454545, "acc_stderr": 0.03401506715249039, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.03401506715249039 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2839657282741738, "mc1_stderr": 0.01578537085839672, "mc2": 0.4359830362647055, "mc2_stderr": 0.015231569782976262 }, "harness|ko_commongen_v2|2": { "acc": 0.5785123966942148, "acc_stderr": 0.016977101932601518, "acc_norm": 0.5855962219598583, "acc_norm_stderr": 0.016936583383943615 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "KT-AI/midm-bitext-S-7B-inst-v2", "model_sha": "a1d976b7852265bdac94f05a9ce34a48add84236", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }