{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.28498293515358364, "acc_stderr": 0.013191348179838792, "acc_norm": 0.3302047781569966, "acc_norm_stderr": 0.013743085603760427 }, "harness|ko_hellaswag|10": { "acc": 0.3630750846444931, "acc_stderr": 0.004799034356969394, "acc_norm": 0.46285600477992433, "acc_norm_stderr": 0.004975993795562018 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.30994152046783624, "acc_stderr": 0.03546976959393163, "acc_norm": 0.30994152046783624, "acc_norm_stderr": 0.03546976959393163 }, "harness|ko_mmlu_management|5": { "acc": 0.1553398058252427, "acc_stderr": 0.03586594738573975, "acc_norm": 0.1553398058252427, "acc_norm_stderr": 0.03586594738573975 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.2796934865900383, "acc_stderr": 0.01605079214803654, "acc_norm": 0.2796934865900383, "acc_norm_stderr": 0.01605079214803654 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.28888888888888886, "acc_stderr": 0.0391545063041425, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.0391545063041425 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.28085106382978725, "acc_stderr": 0.029379170464124818, "acc_norm": 0.28085106382978725, "acc_norm_stderr": 0.029379170464124818 }, "harness|ko_mmlu_virology|5": { "acc": 0.3313253012048193, "acc_stderr": 0.03664314777288085, "acc_norm": 0.3313253012048193, "acc_norm_stderr": 0.03664314777288085 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.33762057877813506, "acc_stderr": 0.026858825879488554, "acc_norm": 0.33762057877813506, "acc_norm_stderr": 0.026858825879488554 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.24663677130044842, "acc_stderr": 0.028930413120910867, "acc_norm": 0.24663677130044842, "acc_norm_stderr": 0.028930413120910867 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.32061068702290074, "acc_stderr": 0.040933292298342784, "acc_norm": 0.32061068702290074, "acc_norm_stderr": 0.040933292298342784 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.23, "acc_stderr": 0.042295258468165044, "acc_norm": 0.23, "acc_norm_stderr": 0.042295258468165044 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.21717171717171718, "acc_stderr": 0.029376616484945616, "acc_norm": 0.21717171717171718, "acc_norm_stderr": 0.029376616484945616 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.296551724137931, "acc_stderr": 0.03806142687309993, "acc_norm": 0.296551724137931, "acc_norm_stderr": 0.03806142687309993 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.040925639582376536, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.040925639582376536 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.24369747899159663, "acc_stderr": 0.02788682807838055, "acc_norm": 0.24369747899159663, "acc_norm_stderr": 0.02788682807838055 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2230769230769231, "acc_stderr": 0.02110773012724399, "acc_norm": 0.2230769230769231, "acc_norm_stderr": 0.02110773012724399 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.3055555555555556, "acc_stderr": 0.044531975073749834, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.044531975073749834 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.22167487684729065, "acc_stderr": 0.029225575892489614, "acc_norm": 0.22167487684729065, "acc_norm_stderr": 0.029225575892489614 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3096774193548387, "acc_stderr": 0.026302774983517414, "acc_norm": 0.3096774193548387, "acc_norm_stderr": 0.026302774983517414 }, "harness|ko_mmlu_marketing|5": { "acc": 0.2863247863247863, "acc_stderr": 0.02961432369045665, "acc_norm": 0.2863247863247863, "acc_norm_stderr": 0.02961432369045665 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.23773584905660378, "acc_stderr": 0.026199808807561936, "acc_norm": 0.23773584905660378, "acc_norm_stderr": 0.026199808807561936 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.32727272727272727, "acc_stderr": 0.04494290866252088, "acc_norm": 0.32727272727272727, "acc_norm_stderr": 0.04494290866252088 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085622, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085622 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.271523178807947, "acc_stderr": 0.03631329803969653, "acc_norm": 0.271523178807947, "acc_norm_stderr": 0.03631329803969653 }, "harness|ko_mmlu_sociology|5": { "acc": 0.2935323383084577, "acc_stderr": 0.03220024104534205, "acc_norm": 0.2935323383084577, "acc_norm_stderr": 0.03220024104534205 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.2254335260115607, "acc_stderr": 0.03186209851641144, "acc_norm": 0.2254335260115607, "acc_norm_stderr": 0.03186209851641144 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.23809523809523808, "acc_stderr": 0.021935878081184756, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.021935878081184756 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.19444444444444445, "acc_stderr": 0.03309615177059005, "acc_norm": 0.19444444444444445, "acc_norm_stderr": 0.03309615177059005 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036845, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.24277456647398843, "acc_stderr": 0.023083658586984204, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.023083658586984204 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3312883435582822, "acc_stderr": 0.03697983910025588, "acc_norm": 0.3312883435582822, "acc_norm_stderr": 0.03697983910025588 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.2654320987654321, "acc_stderr": 0.02456922360046085, "acc_norm": 0.2654320987654321, "acc_norm_stderr": 0.02456922360046085 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.26424870466321243, "acc_stderr": 0.03182155050916647, "acc_norm": 0.26424870466321243, "acc_norm_stderr": 0.03182155050916647 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.03999423879281336, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.03999423879281336 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.30091743119266057, "acc_stderr": 0.019664751366802114, "acc_norm": 0.30091743119266057, "acc_norm_stderr": 0.019664751366802114 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.30952380952380953, "acc_stderr": 0.041349130183033156, "acc_norm": 0.30952380952380953, "acc_norm_stderr": 0.041349130183033156 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.2549019607843137, "acc_stderr": 0.02495418432487991, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.02495418432487991 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_international_law|5": { "acc": 0.3884297520661157, "acc_stderr": 0.04449270350068382, "acc_norm": 0.3884297520661157, "acc_norm_stderr": 0.04449270350068382 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.19736842105263158, "acc_stderr": 0.03238981601699397, "acc_norm": 0.19736842105263158, "acc_norm_stderr": 0.03238981601699397 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.2581699346405229, "acc_stderr": 0.01770453165325007, "acc_norm": 0.2581699346405229, "acc_norm_stderr": 0.01770453165325007 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.2801418439716312, "acc_stderr": 0.026789172351140235, "acc_norm": 0.2801418439716312, "acc_norm_stderr": 0.026789172351140235 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.22321428571428573, "acc_stderr": 0.039523019677025116, "acc_norm": 0.22321428571428573, "acc_norm_stderr": 0.039523019677025116 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.20833333333333334, "acc_stderr": 0.02769691071309395, "acc_norm": 0.20833333333333334, "acc_norm_stderr": 0.02769691071309395 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.27262569832402234, "acc_stderr": 0.014893391735249608, "acc_norm": 0.27262569832402234, "acc_norm_stderr": 0.014893391735249608 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.40441176470588236, "acc_stderr": 0.02981263070156974, "acc_norm": 0.40441176470588236, "acc_norm_stderr": 0.02981263070156974 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.20816326530612245, "acc_stderr": 0.025991117672813296, "acc_norm": 0.20816326530612245, "acc_norm_stderr": 0.025991117672813296 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.3291139240506329, "acc_stderr": 0.030587326294702358, "acc_norm": 0.3291139240506329, "acc_norm_stderr": 0.030587326294702358 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.25097783572359844, "acc_stderr": 0.011073730299187226, "acc_norm": 0.25097783572359844, "acc_norm_stderr": 0.011073730299187226 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.27941176470588236, "acc_stderr": 0.031493281045079556, "acc_norm": 0.27941176470588236, "acc_norm_stderr": 0.031493281045079556 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.3151515151515151, "acc_stderr": 0.0362773057502241, "acc_norm": 0.3151515151515151, "acc_norm_stderr": 0.0362773057502241 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2521419828641371, "mc1_stderr": 0.015201522246299953, "mc2": 0.4099653189995733, "mc2_stderr": 0.014725570270046994 }, "harness|ko_commongen_v2|2": { "acc": 0.2857142857142857, "acc_stderr": 0.015531620786986732, "acc_norm": 0.34946871310507677, "acc_norm_stderr": 0.01639279708576984 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "ITT-AF/ITT-42dot_LLM-PLM-1.3B-v4.0", "model_sha": "498a506ef58ee8018caa360731cbfeb61ddd2ef5", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }