{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.34897610921501704, "acc_stderr": 0.0139289334613825, "acc_norm": 0.4129692832764505, "acc_norm_stderr": 0.014388344935398324 }, "harness|ko_hellaswag|10": { "acc": 0.2504481179047998, "acc_stderr": 0.004323856300539177, "acc_norm": 0.2504481179047998, "acc_norm_stderr": 0.004323856300539177 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.6198830409356725, "acc_stderr": 0.03722965741385539, "acc_norm": 0.6198830409356725, "acc_norm_stderr": 0.03722965741385539 }, "harness|ko_mmlu_management|5": { "acc": 0.6699029126213593, "acc_stderr": 0.0465614711001235, "acc_norm": 0.6699029126213593, "acc_norm_stderr": 0.0465614711001235 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5530012771392082, "acc_stderr": 0.017779225233394216, "acc_norm": 0.5530012771392082, "acc_norm_stderr": 0.017779225233394216 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4222222222222222, "acc_stderr": 0.042667634040995814, "acc_norm": 0.4222222222222222, "acc_norm_stderr": 0.042667634040995814 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.44680851063829785, "acc_stderr": 0.032500536843658404, "acc_norm": 0.44680851063829785, "acc_norm_stderr": 0.032500536843658404 }, "harness|ko_mmlu_virology|5": { "acc": 0.3855421686746988, "acc_stderr": 0.03789134424611549, "acc_norm": 0.3855421686746988, "acc_norm_stderr": 0.03789134424611549 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5369774919614148, "acc_stderr": 0.028320325830105915, "acc_norm": 0.5369774919614148, "acc_norm_stderr": 0.028320325830105915 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.5112107623318386, "acc_stderr": 0.033549366530984746, "acc_norm": 0.5112107623318386, "acc_norm_stderr": 0.033549366530984746 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.5267175572519084, "acc_stderr": 0.04379024936553893, "acc_norm": 0.5267175572519084, "acc_norm_stderr": 0.04379024936553893 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.6616161616161617, "acc_stderr": 0.033711241426263014, "acc_norm": 0.6616161616161617, "acc_norm_stderr": 0.033711241426263014 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.5172413793103449, "acc_stderr": 0.04164188720169375, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.04164188720169375 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.3137254901960784, "acc_stderr": 0.04617034827006718, "acc_norm": 0.3137254901960784, "acc_norm_stderr": 0.04617034827006718 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.5882352941176471, "acc_stderr": 0.031968769891957786, "acc_norm": 0.5882352941176471, "acc_norm_stderr": 0.031968769891957786 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.517948717948718, "acc_stderr": 0.02533466708095489, "acc_norm": 0.517948717948718, "acc_norm_stderr": 0.02533466708095489 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.58, "acc_stderr": 0.04960449637488583, "acc_norm": 0.58, "acc_norm_stderr": 0.04960449637488583 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.6388888888888888, "acc_stderr": 0.04643454608906275, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.04643454608906275 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4039408866995074, "acc_stderr": 0.0345245390382204, "acc_norm": 0.4039408866995074, "acc_norm_stderr": 0.0345245390382204 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.5161290322580645, "acc_stderr": 0.02842920317672455, "acc_norm": 0.5161290322580645, "acc_norm_stderr": 0.02842920317672455 }, "harness|ko_mmlu_marketing|5": { "acc": 0.7649572649572649, "acc_stderr": 0.027778835904935434, "acc_norm": 0.7649572649572649, "acc_norm_stderr": 0.027778835904935434 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4867924528301887, "acc_stderr": 0.030762134874500482, "acc_norm": 0.4867924528301887, "acc_norm_stderr": 0.030762134874500482 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.509090909090909, "acc_stderr": 0.04788339768702861, "acc_norm": 0.509090909090909, "acc_norm_stderr": 0.04788339768702861 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.3962962962962963, "acc_stderr": 0.029822619458533997, "acc_norm": 0.3962962962962963, "acc_norm_stderr": 0.029822619458533997 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6318407960199005, "acc_stderr": 0.03410410565495302, "acc_norm": 0.6318407960199005, "acc_norm_stderr": 0.03410410565495302 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.43352601156069365, "acc_stderr": 0.03778621079092055, "acc_norm": 0.43352601156069365, "acc_norm_stderr": 0.03778621079092055 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.4365079365079365, "acc_stderr": 0.02554284681740051, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.02554284681740051 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4652777777777778, "acc_stderr": 0.04171115858181618, "acc_norm": 0.4652777777777778, "acc_norm_stderr": 0.04171115858181618 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.56, "acc_stderr": 0.049888765156985884, "acc_norm": 0.56, "acc_norm_stderr": 0.049888765156985884 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.49710982658959535, "acc_stderr": 0.02691864538323901, "acc_norm": 0.49710982658959535, "acc_norm_stderr": 0.02691864538323901 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.5214723926380368, "acc_stderr": 0.03924746876751129, "acc_norm": 0.5214723926380368, "acc_norm_stderr": 0.03924746876751129 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.49691358024691357, "acc_stderr": 0.027820214158594377, "acc_norm": 0.49691358024691357, "acc_norm_stderr": 0.027820214158594377 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5647668393782384, "acc_stderr": 0.03578038165008586, "acc_norm": 0.5647668393782384, "acc_norm_stderr": 0.03578038165008586 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.38596491228070173, "acc_stderr": 0.04579639422070434, "acc_norm": 0.38596491228070173, "acc_norm_stderr": 0.04579639422070434 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.6018348623853211, "acc_stderr": 0.020987989422654264, "acc_norm": 0.6018348623853211, "acc_norm_stderr": 0.020987989422654264 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04216370213557835, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04216370213557835 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.5098039215686274, "acc_stderr": 0.02862441255016795, "acc_norm": 0.5098039215686274, "acc_norm_stderr": 0.02862441255016795 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.65, "acc_stderr": 0.047937248544110196, "acc_norm": 0.65, "acc_norm_stderr": 0.047937248544110196 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6446280991735537, "acc_stderr": 0.0436923632657398, "acc_norm": 0.6446280991735537, "acc_norm_stderr": 0.0436923632657398 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.5526315789473685, "acc_stderr": 0.040463368839782486, "acc_norm": 0.5526315789473685, "acc_norm_stderr": 0.040463368839782486 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.4411764705882353, "acc_stderr": 0.02008736207670285, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.02008736207670285 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3723404255319149, "acc_stderr": 0.02883892147125146, "acc_norm": 0.3723404255319149, "acc_norm_stderr": 0.02883892147125146 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.35714285714285715, "acc_stderr": 0.04547960999764376, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.04547960999764376 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.38425925925925924, "acc_stderr": 0.03317354514310742, "acc_norm": 0.38425925925925924, "acc_norm_stderr": 0.03317354514310742 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2569832402234637, "acc_stderr": 0.014614465821966337, "acc_norm": 0.2569832402234637, "acc_norm_stderr": 0.014614465821966337 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001974, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001974 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4117647058823529, "acc_stderr": 0.02989616303312547, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.02989616303312547 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.46938775510204084, "acc_stderr": 0.031949171367580624, "acc_norm": 0.46938775510204084, "acc_norm_stderr": 0.031949171367580624 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5864978902953587, "acc_stderr": 0.03205649904851858, "acc_norm": 0.5864978902953587, "acc_norm_stderr": 0.03205649904851858 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3644067796610169, "acc_stderr": 0.012291694983056477, "acc_norm": 0.3644067796610169, "acc_norm_stderr": 0.012291694983056477 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5441176470588235, "acc_stderr": 0.03495624522015477, "acc_norm": 0.5441176470588235, "acc_norm_stderr": 0.03495624522015477 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5272727272727272, "acc_stderr": 0.03898531605579418, "acc_norm": 0.5272727272727272, "acc_norm_stderr": 0.03898531605579418 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2962056303549572, "mc1_stderr": 0.015983595101811392, "mc2": 0.4689813000124781, "mc2_stderr": 0.015471857359723505 }, "harness|ko_commongen_v2|2": { "acc": 0.4002361275088548, "acc_stderr": 0.016844693510505045, "acc_norm": 0.4817001180637544, "acc_norm_stderr": 0.01717883663917776 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Edentns/Worktro-Small-v0.1", "model_sha": "881ea618f197432245c6be4f7cf7758031f1648c", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }