{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.6407849829351536, "acc_stderr": 0.014020224155839154, "acc_norm": 0.7005119453924915, "acc_norm_stderr": 0.013385021637313565 }, "harness|ko_hellaswag|10": { "acc": 0.3986257717586138, "acc_stderr": 0.004886147907627405, "acc_norm": 0.5222067317267477, "acc_norm_stderr": 0.004984857671187099 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5906432748538012, "acc_stderr": 0.037712831076265434, "acc_norm": 0.5906432748538012, "acc_norm_stderr": 0.037712831076265434 }, "harness|ko_mmlu_management|5": { "acc": 0.5825242718446602, "acc_stderr": 0.048828405482122375, "acc_norm": 0.5825242718446602, "acc_norm_stderr": 0.048828405482122375 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.6296296296296297, "acc_stderr": 0.017268607560005794, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.017268607560005794 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.3925925925925926, "acc_stderr": 0.04218506215368879, "acc_norm": 0.3925925925925926, "acc_norm_stderr": 0.04218506215368879 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.43829787234042555, "acc_stderr": 0.03243618636108102, "acc_norm": 0.43829787234042555, "acc_norm_stderr": 0.03243618636108102 }, "harness|ko_mmlu_virology|5": { "acc": 0.5180722891566265, "acc_stderr": 0.038899512528272166, "acc_norm": 0.5180722891566265, "acc_norm_stderr": 0.038899512528272166 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5755627009646302, "acc_stderr": 0.028071928247946205, "acc_norm": 0.5755627009646302, "acc_norm_stderr": 0.028071928247946205 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.600896860986547, "acc_stderr": 0.03286745312567961, "acc_norm": 0.600896860986547, "acc_norm_stderr": 0.03286745312567961 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.5572519083969466, "acc_stderr": 0.04356447202665069, "acc_norm": 0.5572519083969466, "acc_norm_stderr": 0.04356447202665069 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.6363636363636364, "acc_stderr": 0.034273086529999344, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.034273086529999344 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.496551724137931, "acc_stderr": 0.041665675771015785, "acc_norm": 0.496551724137931, "acc_norm_stderr": 0.041665675771015785 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.29411764705882354, "acc_stderr": 0.04533838195929775, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.04533838195929775 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.5126050420168067, "acc_stderr": 0.032468167657521745, "acc_norm": 0.5126050420168067, "acc_norm_stderr": 0.032468167657521745 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.5564102564102564, "acc_stderr": 0.025189149894764215, "acc_norm": 0.5564102564102564, "acc_norm_stderr": 0.025189149894764215 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5277777777777778, "acc_stderr": 0.048262172941398944, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.048262172941398944 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4482758620689655, "acc_stderr": 0.03499113137676744, "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.03499113137676744 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.5806451612903226, "acc_stderr": 0.02807158890109185, "acc_norm": 0.5806451612903226, "acc_norm_stderr": 0.02807158890109185 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7905982905982906, "acc_stderr": 0.02665569965392273, "acc_norm": 0.7905982905982906, "acc_norm_stderr": 0.02665569965392273 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.5320754716981132, "acc_stderr": 0.030709486992556545, "acc_norm": 0.5320754716981132, "acc_norm_stderr": 0.030709486992556545 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5636363636363636, "acc_stderr": 0.04750185058907296, "acc_norm": 0.5636363636363636, "acc_norm_stderr": 0.04750185058907296 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.32222222222222224, "acc_stderr": 0.028493465091028604, "acc_norm": 0.32222222222222224, "acc_norm_stderr": 0.028493465091028604 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526733, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526733 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6616915422885572, "acc_stderr": 0.03345563070339191, "acc_norm": 0.6616915422885572, "acc_norm_stderr": 0.03345563070339191 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.48554913294797686, "acc_stderr": 0.03810871630454764, "acc_norm": 0.48554913294797686, "acc_norm_stderr": 0.03810871630454764 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.35978835978835977, "acc_stderr": 0.024718075944129277, "acc_norm": 0.35978835978835977, "acc_norm_stderr": 0.024718075944129277 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4305555555555556, "acc_stderr": 0.041406856391115014, "acc_norm": 0.4305555555555556, "acc_norm_stderr": 0.041406856391115014 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.78, "acc_stderr": 0.04163331998932264, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932264 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5, "acc_stderr": 0.026919095102908273, "acc_norm": 0.5, "acc_norm_stderr": 0.026919095102908273 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.5398773006134969, "acc_stderr": 0.03915857291436972, "acc_norm": 0.5398773006134969, "acc_norm_stderr": 0.03915857291436972 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.5802469135802469, "acc_stderr": 0.027460099557005128, "acc_norm": 0.5802469135802469, "acc_norm_stderr": 0.027460099557005128 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.6373056994818653, "acc_stderr": 0.034697137917043715, "acc_norm": 0.6373056994818653, "acc_norm_stderr": 0.034697137917043715 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.30701754385964913, "acc_stderr": 0.0433913832257986, "acc_norm": 0.30701754385964913, "acc_norm_stderr": 0.0433913832257986 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.6990825688073394, "acc_stderr": 0.01966475136680211, "acc_norm": 0.6990825688073394, "acc_norm_stderr": 0.01966475136680211 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.041905964388711366, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.041905964388711366 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.5196078431372549, "acc_stderr": 0.028607893699576073, "acc_norm": 0.5196078431372549, "acc_norm_stderr": 0.028607893699576073 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6611570247933884, "acc_stderr": 0.04320767807536671, "acc_norm": 0.6611570247933884, "acc_norm_stderr": 0.04320767807536671 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.5657894736842105, "acc_stderr": 0.040335656678483205, "acc_norm": 0.5657894736842105, "acc_norm_stderr": 0.040335656678483205 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.46568627450980393, "acc_stderr": 0.020180144843307293, "acc_norm": 0.46568627450980393, "acc_norm_stderr": 0.020180144843307293 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.40425531914893614, "acc_stderr": 0.029275532159704725, "acc_norm": 0.40425531914893614, "acc_norm_stderr": 0.029275532159704725 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.35714285714285715, "acc_stderr": 0.04547960999764376, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.04547960999764376 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.39814814814814814, "acc_stderr": 0.033384734032074016, "acc_norm": 0.39814814814814814, "acc_norm_stderr": 0.033384734032074016 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24581005586592178, "acc_stderr": 0.014400296429225608, "acc_norm": 0.24581005586592178, "acc_norm_stderr": 0.014400296429225608 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.58, "acc_stderr": 0.04960449637488583, "acc_norm": 0.58, "acc_norm_stderr": 0.04960449637488583 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4485294117647059, "acc_stderr": 0.0302114796091216, "acc_norm": 0.4485294117647059, "acc_norm_stderr": 0.0302114796091216 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.5795918367346938, "acc_stderr": 0.031601069934496004, "acc_norm": 0.5795918367346938, "acc_norm_stderr": 0.031601069934496004 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6075949367088608, "acc_stderr": 0.03178471874564729, "acc_norm": 0.6075949367088608, "acc_norm_stderr": 0.03178471874564729 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3891786179921773, "acc_stderr": 0.012452613934287022, "acc_norm": 0.3891786179921773, "acc_norm_stderr": 0.012452613934287022 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5049019607843137, "acc_stderr": 0.035091433756067866, "acc_norm": 0.5049019607843137, "acc_norm_stderr": 0.035091433756067866 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5151515151515151, "acc_stderr": 0.03902551007374449, "acc_norm": 0.5151515151515151, "acc_norm_stderr": 0.03902551007374449 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.5397796817625459, "mc1_stderr": 0.017448017223960877, "mc2": 0.646964625973436, "mc2_stderr": 0.014905252003582121 }, "harness|ko_commongen_v2|2": { "acc": 0.45218417945690675, "acc_stderr": 0.017111567130916785, "acc_norm": 0.5112160566706021, "acc_norm_stderr": 0.017186028469489287 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Deepnoid/deep-solar-v2.0.2", "model_sha": "3a3d413e5fdbf6add6c9d79a994d0328665e46d3", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }