{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.4257679180887372, "acc_stderr": 0.01444946427886881, "acc_norm": 0.4812286689419795, "acc_norm_stderr": 0.014601090150633964 }, "harness|ko_hellaswag|10": { "acc": 0.4166500697072296, "acc_stderr": 0.004919962822208316, "acc_norm": 0.5664210316669986, "acc_norm_stderr": 0.004945558069852528 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4269005847953216, "acc_stderr": 0.03793620616529917, "acc_norm": 0.4269005847953216, "acc_norm_stderr": 0.03793620616529917 }, "harness|ko_mmlu_management|5": { "acc": 0.42718446601941745, "acc_stderr": 0.04897957737781168, "acc_norm": 0.42718446601941745, "acc_norm_stderr": 0.04897957737781168 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.42656449553001274, "acc_stderr": 0.01768606697567564, "acc_norm": 0.42656449553001274, "acc_norm_stderr": 0.01768606697567564 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3111111111111111, "acc_stderr": 0.03999262876617723, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.03999262876617723 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.37872340425531914, "acc_stderr": 0.03170995606040655, "acc_norm": 0.37872340425531914, "acc_norm_stderr": 0.03170995606040655 }, "harness|ko_mmlu_virology|5": { "acc": 0.3493975903614458, "acc_stderr": 0.037117251907407486, "acc_norm": 0.3493975903614458, "acc_norm_stderr": 0.037117251907407486 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.34726688102893893, "acc_stderr": 0.027040745502307336, "acc_norm": 0.34726688102893893, "acc_norm_stderr": 0.027040745502307336 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4977578475336323, "acc_stderr": 0.033557465352232634, "acc_norm": 0.4977578475336323, "acc_norm_stderr": 0.033557465352232634 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.3282442748091603, "acc_stderr": 0.041184385658062976, "acc_norm": 0.3282442748091603, "acc_norm_stderr": 0.041184385658062976 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.4292929292929293, "acc_stderr": 0.03526552724601199, "acc_norm": 0.4292929292929293, "acc_norm_stderr": 0.03526552724601199 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.27586206896551724, "acc_stderr": 0.03724563619774634, "acc_norm": 0.27586206896551724, "acc_norm_stderr": 0.03724563619774634 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.040233822736177455, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.040233822736177455 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.3739495798319328, "acc_stderr": 0.031429466378837076, "acc_norm": 0.3739495798319328, "acc_norm_stderr": 0.031429466378837076 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.33076923076923076, "acc_stderr": 0.023854795680971142, "acc_norm": 0.33076923076923076, "acc_norm_stderr": 0.023854795680971142 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.44, "acc_stderr": 0.049888765156985884, "acc_norm": 0.44, "acc_norm_stderr": 0.049888765156985884 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4166666666666667, "acc_stderr": 0.04766075165356461, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.04766075165356461 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.2857142857142857, "acc_stderr": 0.03178529710642751, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.03178529710642751 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3935483870967742, "acc_stderr": 0.027791878753132274, "acc_norm": 0.3935483870967742, "acc_norm_stderr": 0.027791878753132274 }, "harness|ko_mmlu_marketing|5": { "acc": 0.5769230769230769, "acc_stderr": 0.032366121762202014, "acc_norm": 0.5769230769230769, "acc_norm_stderr": 0.032366121762202014 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4339622641509434, "acc_stderr": 0.030503292013342596, "acc_norm": 0.4339622641509434, "acc_norm_stderr": 0.030503292013342596 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.44545454545454544, "acc_stderr": 0.04760548821460325, "acc_norm": 0.44545454545454544, "acc_norm_stderr": 0.04760548821460325 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.29259259259259257, "acc_stderr": 0.02773896963217609, "acc_norm": 0.29259259259259257, "acc_norm_stderr": 0.02773896963217609 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.26490066225165565, "acc_stderr": 0.03603038545360384, "acc_norm": 0.26490066225165565, "acc_norm_stderr": 0.03603038545360384 }, "harness|ko_mmlu_sociology|5": { "acc": 0.44776119402985076, "acc_stderr": 0.03516184772952167, "acc_norm": 0.44776119402985076, "acc_norm_stderr": 0.03516184772952167 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3063583815028902, "acc_stderr": 0.03514942551267437, "acc_norm": 0.3063583815028902, "acc_norm_stderr": 0.03514942551267437 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.291005291005291, "acc_stderr": 0.023393826500484875, "acc_norm": 0.291005291005291, "acc_norm_stderr": 0.023393826500484875 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3125, "acc_stderr": 0.038760854559127644, "acc_norm": 0.3125, "acc_norm_stderr": 0.038760854559127644 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.37, "acc_stderr": 0.048523658709390974, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709390974 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.36416184971098264, "acc_stderr": 0.025906632631016124, "acc_norm": 0.36416184971098264, "acc_norm_stderr": 0.025906632631016124 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3067484662576687, "acc_stderr": 0.036230899157241474, "acc_norm": 0.3067484662576687, "acc_norm_stderr": 0.036230899157241474 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.3395061728395062, "acc_stderr": 0.026348564412011628, "acc_norm": 0.3395061728395062, "acc_norm_stderr": 0.026348564412011628 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.3471502590673575, "acc_stderr": 0.03435696168361356, "acc_norm": 0.3471502590673575, "acc_norm_stderr": 0.03435696168361356 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.30701754385964913, "acc_stderr": 0.043391383225798594, "acc_norm": 0.30701754385964913, "acc_norm_stderr": 0.043391383225798594 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.3908256880733945, "acc_stderr": 0.020920058346111055, "acc_norm": 0.3908256880733945, "acc_norm_stderr": 0.020920058346111055 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.23015873015873015, "acc_stderr": 0.03764950879790606, "acc_norm": 0.23015873015873015, "acc_norm_stderr": 0.03764950879790606 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.32679738562091504, "acc_stderr": 0.026857294663281413, "acc_norm": 0.32679738562091504, "acc_norm_stderr": 0.026857294663281413 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_international_law|5": { "acc": 0.3884297520661157, "acc_stderr": 0.04449270350068382, "acc_norm": 0.3884297520661157, "acc_norm_stderr": 0.04449270350068382 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.2631578947368421, "acc_stderr": 0.03583496176361064, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.03583496176361064 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.35294117647058826, "acc_stderr": 0.019333142020797063, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.019333142020797063 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2978723404255319, "acc_stderr": 0.027281608344469414, "acc_norm": 0.2978723404255319, "acc_norm_stderr": 0.027281608344469414 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.4017857142857143, "acc_stderr": 0.04653333146973646, "acc_norm": 0.4017857142857143, "acc_norm_stderr": 0.04653333146973646 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.25462962962962965, "acc_stderr": 0.02971127586000533, "acc_norm": 0.25462962962962965, "acc_norm_stderr": 0.02971127586000533 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2324022346368715, "acc_stderr": 0.014125968754673392, "acc_norm": 0.2324022346368715, "acc_norm_stderr": 0.014125968754673392 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.2426470588235294, "acc_stderr": 0.026040662474201257, "acc_norm": 0.2426470588235294, "acc_norm_stderr": 0.026040662474201257 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.27346938775510204, "acc_stderr": 0.028535560337128438, "acc_norm": 0.27346938775510204, "acc_norm_stderr": 0.028535560337128438 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.45569620253164556, "acc_stderr": 0.03241920684693334, "acc_norm": 0.45569620253164556, "acc_norm_stderr": 0.03241920684693334 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2835723598435463, "acc_stderr": 0.011511900775968325, "acc_norm": 0.2835723598435463, "acc_norm_stderr": 0.011511900775968325 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.3627450980392157, "acc_stderr": 0.03374499356319355, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.03374499356319355 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.3696969696969697, "acc_stderr": 0.037694303145125674, "acc_norm": 0.3696969696969697, "acc_norm_stderr": 0.037694303145125674 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.3219094247246022, "mc1_stderr": 0.016355567611960393, "mc2": 0.49596301282377164, "mc2_stderr": 0.01583649347283567 }, "harness|ko_commongen_v2|2": { "acc": 0.45218417945690675, "acc_stderr": 0.017111567130916785, "acc_norm": 0.45808736717827625, "acc_norm_stderr": 0.017129852117911147 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "mssma/ko-solar-10.7b-v0.7", "model_sha": "d5f7cc575a22efc6bee5e6694dc04a5104b0a5c4", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }