{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.44283276450511944, "acc_stderr": 0.0145155738733489, "acc_norm": 0.4906143344709898, "acc_norm_stderr": 0.014608816322065003 }, "harness|ko_hellaswag|10": { "acc": 0.2562238597888867, "acc_stderr": 0.0043565471858470406, "acc_norm": 0.2566221868153754, "acc_norm_stderr": 0.00435876459640104 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.6491228070175439, "acc_stderr": 0.03660298834049163, "acc_norm": 0.6491228070175439, "acc_norm_stderr": 0.03660298834049163 }, "harness|ko_mmlu_management|5": { "acc": 0.6407766990291263, "acc_stderr": 0.04750458399041696, "acc_norm": 0.6407766990291263, "acc_norm_stderr": 0.04750458399041696 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.6602809706257982, "acc_stderr": 0.01693639411430163, "acc_norm": 0.6602809706257982, "acc_norm_stderr": 0.01693639411430163 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.45925925925925926, "acc_stderr": 0.04304979692464244, "acc_norm": 0.45925925925925926, "acc_norm_stderr": 0.04304979692464244 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.5106382978723404, "acc_stderr": 0.03267862331014063, "acc_norm": 0.5106382978723404, "acc_norm_stderr": 0.03267862331014063 }, "harness|ko_mmlu_virology|5": { "acc": 0.4939759036144578, "acc_stderr": 0.03892212195333047, "acc_norm": 0.4939759036144578, "acc_norm_stderr": 0.03892212195333047 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5884244372990354, "acc_stderr": 0.027950481494401273, "acc_norm": 0.5884244372990354, "acc_norm_stderr": 0.027950481494401273 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.6098654708520179, "acc_stderr": 0.03273766725459156, "acc_norm": 0.6098654708520179, "acc_norm_stderr": 0.03273766725459156 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.5877862595419847, "acc_stderr": 0.043171711948702556, "acc_norm": 0.5877862595419847, "acc_norm_stderr": 0.043171711948702556 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.44, "acc_stderr": 0.049888765156985905, "acc_norm": 0.44, "acc_norm_stderr": 0.049888765156985905 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.7626262626262627, "acc_stderr": 0.030313710538198906, "acc_norm": 0.7626262626262627, "acc_norm_stderr": 0.030313710538198906 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.5379310344827586, "acc_stderr": 0.04154659671707548, "acc_norm": 0.5379310344827586, "acc_norm_stderr": 0.04154659671707548 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.30392156862745096, "acc_stderr": 0.045766654032077615, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.045766654032077615 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.6302521008403361, "acc_stderr": 0.03135709599613591, "acc_norm": 0.6302521008403361, "acc_norm_stderr": 0.03135709599613591 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.5435897435897435, "acc_stderr": 0.025254485424799595, "acc_norm": 0.5435897435897435, "acc_norm_stderr": 0.025254485424799595 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.6388888888888888, "acc_stderr": 0.04643454608906275, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.04643454608906275 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4236453201970443, "acc_stderr": 0.03476725747649037, "acc_norm": 0.4236453201970443, "acc_norm_stderr": 0.03476725747649037 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.6161290322580645, "acc_stderr": 0.02766618207553965, "acc_norm": 0.6161290322580645, "acc_norm_stderr": 0.02766618207553965 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7948717948717948, "acc_stderr": 0.026453508054040346, "acc_norm": 0.7948717948717948, "acc_norm_stderr": 0.026453508054040346 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.5471698113207547, "acc_stderr": 0.030635627957961816, "acc_norm": 0.5471698113207547, "acc_norm_stderr": 0.030635627957961816 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5909090909090909, "acc_stderr": 0.047093069786618945, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.047093069786618945 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3592592592592593, "acc_stderr": 0.029252905927251976, "acc_norm": 0.3592592592592593, "acc_norm_stderr": 0.029252905927251976 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.3841059602649007, "acc_stderr": 0.03971301814719198, "acc_norm": 0.3841059602649007, "acc_norm_stderr": 0.03971301814719198 }, "harness|ko_mmlu_sociology|5": { "acc": 0.7064676616915423, "acc_stderr": 0.032200241045342054, "acc_norm": 0.7064676616915423, "acc_norm_stderr": 0.032200241045342054 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.48554913294797686, "acc_stderr": 0.03810871630454764, "acc_norm": 0.48554913294797686, "acc_norm_stderr": 0.03810871630454764 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.3941798941798942, "acc_stderr": 0.025167982333894143, "acc_norm": 0.3941798941798942, "acc_norm_stderr": 0.025167982333894143 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.5069444444444444, "acc_stderr": 0.04180806750294938, "acc_norm": 0.5069444444444444, "acc_norm_stderr": 0.04180806750294938 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.653179190751445, "acc_stderr": 0.025624723994030457, "acc_norm": 0.653179190751445, "acc_norm_stderr": 0.025624723994030457 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.5276073619631901, "acc_stderr": 0.03922378290610991, "acc_norm": 0.5276073619631901, "acc_norm_stderr": 0.03922378290610991 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.6111111111111112, "acc_stderr": 0.02712511551316686, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.02712511551316686 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.7357512953367875, "acc_stderr": 0.03182155050916647, "acc_norm": 0.7357512953367875, "acc_norm_stderr": 0.03182155050916647 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.41228070175438597, "acc_stderr": 0.046306532033665956, "acc_norm": 0.41228070175438597, "acc_norm_stderr": 0.046306532033665956 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.6880733944954128, "acc_stderr": 0.019862967976707245, "acc_norm": 0.6880733944954128, "acc_norm_stderr": 0.019862967976707245 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.40476190476190477, "acc_stderr": 0.04390259265377561, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.04390259265377561 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.5686274509803921, "acc_stderr": 0.028358956313423552, "acc_norm": 0.5686274509803921, "acc_norm_stderr": 0.028358956313423552 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_international_law|5": { "acc": 0.743801652892562, "acc_stderr": 0.03984979653302871, "acc_norm": 0.743801652892562, "acc_norm_stderr": 0.03984979653302871 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.5657894736842105, "acc_stderr": 0.040335656678483205, "acc_norm": 0.5657894736842105, "acc_norm_stderr": 0.040335656678483205 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.5065359477124183, "acc_stderr": 0.020226106567657807, "acc_norm": 0.5065359477124183, "acc_norm_stderr": 0.020226106567657807 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3829787234042553, "acc_stderr": 0.02899908090480618, "acc_norm": 0.3829787234042553, "acc_norm_stderr": 0.02899908090480618 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.3482142857142857, "acc_stderr": 0.045218299028335865, "acc_norm": 0.3482142857142857, "acc_norm_stderr": 0.045218299028335865 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.5787037037037037, "acc_stderr": 0.03367462138896078, "acc_norm": 0.5787037037037037, "acc_norm_stderr": 0.03367462138896078 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2100558659217877, "acc_stderr": 0.013623755371333528, "acc_norm": 0.2100558659217877, "acc_norm_stderr": 0.013623755371333528 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.5220588235294118, "acc_stderr": 0.03034326422421352, "acc_norm": 0.5220588235294118, "acc_norm_stderr": 0.03034326422421352 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.6081632653061224, "acc_stderr": 0.031251275910891656, "acc_norm": 0.6081632653061224, "acc_norm_stderr": 0.031251275910891656 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.7552742616033755, "acc_stderr": 0.027985699387036423, "acc_norm": 0.7552742616033755, "acc_norm_stderr": 0.027985699387036423 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3878748370273794, "acc_stderr": 0.012444998309675631, "acc_norm": 0.3878748370273794, "acc_norm_stderr": 0.012444998309675631 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5392156862745098, "acc_stderr": 0.03498501649369527, "acc_norm": 0.5392156862745098, "acc_norm_stderr": 0.03498501649369527 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5818181818181818, "acc_stderr": 0.03851716319398394, "acc_norm": 0.5818181818181818, "acc_norm_stderr": 0.03851716319398394 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2998776009791922, "mc1_stderr": 0.016040352966713613, "mc2": 0.4576126744740946, "mc2_stderr": 0.015112274979113303 }, "harness|ko_commongen_v2|2": { "acc": 0.08382526564344746, "acc_stderr": 0.009527773913592174, "acc_norm": 0.29634002361275086, "acc_norm_stderr": 0.015699701628594232 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Edentns/DataVortexS-10.7B-v1.0", "model_sha": "888a73a4281e4cb1b64696e5d4c8a1a7b59b3024", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }