{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.44880546075085326, "acc_stderr": 0.014534599585097667, "acc_norm": 0.5059726962457338, "acc_norm_stderr": 0.014610348300255793 }, "harness|ko_hellaswag|10": { "acc": 0.4371639115714001, "acc_stderr": 0.004950221546187574, "acc_norm": 0.6050587532364071, "acc_norm_stderr": 0.0048783902265917105 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.5730994152046783, "acc_stderr": 0.03793620616529916, "acc_norm": 0.5730994152046783, "acc_norm_stderr": 0.03793620616529916 }, "harness|ko_mmlu_management|5": { "acc": 0.6504854368932039, "acc_stderr": 0.047211885060971716, "acc_norm": 0.6504854368932039, "acc_norm_stderr": 0.047211885060971716 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.6372924648786717, "acc_stderr": 0.017192708674602302, "acc_norm": 0.6372924648786717, "acc_norm_stderr": 0.017192708674602302 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4740740740740741, "acc_stderr": 0.04313531696750573, "acc_norm": 0.4740740740740741, "acc_norm_stderr": 0.04313531696750573 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.18, "acc_stderr": 0.038612291966536955, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536955 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4595744680851064, "acc_stderr": 0.03257901482099836, "acc_norm": 0.4595744680851064, "acc_norm_stderr": 0.03257901482099836 }, "harness|ko_mmlu_virology|5": { "acc": 0.4879518072289157, "acc_stderr": 0.0389136449583582, "acc_norm": 0.4879518072289157, "acc_norm_stderr": 0.0389136449583582 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.6077170418006431, "acc_stderr": 0.027731258647011994, "acc_norm": 0.6077170418006431, "acc_norm_stderr": 0.027731258647011994 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.5739910313901345, "acc_stderr": 0.0331883328621728, "acc_norm": 0.5739910313901345, "acc_norm_stderr": 0.0331883328621728 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.6030534351145038, "acc_stderr": 0.04291135671009224, "acc_norm": 0.6030534351145038, "acc_norm_stderr": 0.04291135671009224 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.48, "acc_stderr": 0.05021167315686779, "acc_norm": 0.48, "acc_norm_stderr": 0.05021167315686779 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.7222222222222222, "acc_stderr": 0.03191178226713545, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.03191178226713545 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.46206896551724136, "acc_stderr": 0.04154659671707548, "acc_norm": 0.46206896551724136, "acc_norm_stderr": 0.04154659671707548 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.3137254901960784, "acc_stderr": 0.04617034827006718, "acc_norm": 0.3137254901960784, "acc_norm_stderr": 0.04617034827006718 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.6134453781512605, "acc_stderr": 0.03163145807552378, "acc_norm": 0.6134453781512605, "acc_norm_stderr": 0.03163145807552378 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.558974358974359, "acc_stderr": 0.025174048384000718, "acc_norm": 0.558974358974359, "acc_norm_stderr": 0.025174048384000718 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.6388888888888888, "acc_stderr": 0.04643454608906275, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.04643454608906275 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3793103448275862, "acc_stderr": 0.034139638059062345, "acc_norm": 0.3793103448275862, "acc_norm_stderr": 0.034139638059062345 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.5870967741935483, "acc_stderr": 0.028009138125400374, "acc_norm": 0.5870967741935483, "acc_norm_stderr": 0.028009138125400374 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7735042735042735, "acc_stderr": 0.027421007295392926, "acc_norm": 0.7735042735042735, "acc_norm_stderr": 0.027421007295392926 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.5245283018867924, "acc_stderr": 0.030735822206205615, "acc_norm": 0.5245283018867924, "acc_norm_stderr": 0.030735822206205615 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5909090909090909, "acc_stderr": 0.04709306978661895, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.04709306978661895 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3, "acc_stderr": 0.027940457136228402, "acc_norm": 0.3, "acc_norm_stderr": 0.027940457136228402 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.03802039760107903, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.03802039760107903 }, "harness|ko_mmlu_sociology|5": { "acc": 0.7263681592039801, "acc_stderr": 0.031524391865554044, "acc_norm": 0.7263681592039801, "acc_norm_stderr": 0.031524391865554044 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.48554913294797686, "acc_stderr": 0.03810871630454764, "acc_norm": 0.48554913294797686, "acc_norm_stderr": 0.03810871630454764 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.4417989417989418, "acc_stderr": 0.02557625706125383, "acc_norm": 0.4417989417989418, "acc_norm_stderr": 0.02557625706125383 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4791666666666667, "acc_stderr": 0.04177578950739993, "acc_norm": 0.4791666666666667, "acc_norm_stderr": 0.04177578950739993 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.65, "acc_stderr": 0.047937248544110196, "acc_norm": 0.65, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.5809248554913294, "acc_stderr": 0.026564178111422622, "acc_norm": 0.5809248554913294, "acc_norm_stderr": 0.026564178111422622 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.5153374233128835, "acc_stderr": 0.039265223787088445, "acc_norm": 0.5153374233128835, "acc_norm_stderr": 0.039265223787088445 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.5864197530864198, "acc_stderr": 0.02740204204026996, "acc_norm": 0.5864197530864198, "acc_norm_stderr": 0.02740204204026996 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.7046632124352331, "acc_stderr": 0.032922966391551414, "acc_norm": 0.7046632124352331, "acc_norm_stderr": 0.032922966391551414 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.046774730044912, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.046774730044912 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.6623853211009174, "acc_stderr": 0.020275265986638917, "acc_norm": 0.6623853211009174, "acc_norm_stderr": 0.020275265986638917 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.4126984126984127, "acc_stderr": 0.04403438954768177, "acc_norm": 0.4126984126984127, "acc_norm_stderr": 0.04403438954768177 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.5882352941176471, "acc_stderr": 0.02818059632825929, "acc_norm": 0.5882352941176471, "acc_norm_stderr": 0.02818059632825929 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.61, "acc_stderr": 0.049020713000019756, "acc_norm": 0.61, "acc_norm_stderr": 0.049020713000019756 }, "harness|ko_mmlu_international_law|5": { "acc": 0.71900826446281, "acc_stderr": 0.04103203830514511, "acc_norm": 0.71900826446281, "acc_norm_stderr": 0.04103203830514511 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.5263157894736842, "acc_stderr": 0.04063302731486671, "acc_norm": 0.5263157894736842, "acc_norm_stderr": 0.04063302731486671 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.5, "acc_stderr": 0.020227834851568375, "acc_norm": 0.5, "acc_norm_stderr": 0.020227834851568375 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.40425531914893614, "acc_stderr": 0.02927553215970473, "acc_norm": 0.40425531914893614, "acc_norm_stderr": 0.02927553215970473 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.4107142857142857, "acc_stderr": 0.04669510663875192, "acc_norm": 0.4107142857142857, "acc_norm_stderr": 0.04669510663875192 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.5324074074074074, "acc_stderr": 0.034028015813589656, "acc_norm": 0.5324074074074074, "acc_norm_stderr": 0.034028015813589656 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2737430167597765, "acc_stderr": 0.014912413096372432, "acc_norm": 0.2737430167597765, "acc_norm_stderr": 0.014912413096372432 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.65, "acc_stderr": 0.047937248544110196, "acc_norm": 0.65, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.49264705882352944, "acc_stderr": 0.030369552523902173, "acc_norm": 0.49264705882352944, "acc_norm_stderr": 0.030369552523902173 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.6040816326530613, "acc_stderr": 0.03130802899065686, "acc_norm": 0.6040816326530613, "acc_norm_stderr": 0.03130802899065686 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.759493670886076, "acc_stderr": 0.027820781981149678, "acc_norm": 0.759493670886076, "acc_norm_stderr": 0.027820781981149678 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.41264667535853977, "acc_stderr": 0.012573836633799022, "acc_norm": 0.41264667535853977, "acc_norm_stderr": 0.012573836633799022 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.6617647058823529, "acc_stderr": 0.0332057461294543, "acc_norm": 0.6617647058823529, "acc_norm_stderr": 0.0332057461294543 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.7090909090909091, "acc_stderr": 0.03546563019624335, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.03546563019624335 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.423500611995104, "mc1_stderr": 0.01729742144853472, "mc2": 0.5984445406996183, "mc2_stderr": 0.015659729754718226 }, "harness|ko_commongen_v2|2": { "acc": 0.6174734356552538, "acc_stderr": 0.01670916538722883, "acc_norm": 0.6340023612750886, "acc_norm_stderr": 0.016561489664895714 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "LDCC/LDCC-SOLAR-10.7B", "model_sha": "43af13fd87ce9041d0a60489f7b0d357febf14de", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }