{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.2790102389078498, "acc_stderr": 0.013106784883601352, "acc_norm": 0.3199658703071672, "acc_norm_stderr": 0.013631345807016198 }, "harness|ko_hellaswag|10": { "acc": 0.3842859988050189, "acc_stderr": 0.004854318994447741, "acc_norm": 0.4954192391953794, "acc_norm_stderr": 0.0049895720021966876 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.3157894736842105, "acc_stderr": 0.03565079670708313, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.03565079670708313 }, "harness|ko_mmlu_management|5": { "acc": 0.1941747572815534, "acc_stderr": 0.039166677628225836, "acc_norm": 0.1941747572815534, "acc_norm_stderr": 0.039166677628225836 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.2848020434227331, "acc_stderr": 0.016139174096522563, "acc_norm": 0.2848020434227331, "acc_norm_stderr": 0.016139174096522563 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.2962962962962963, "acc_stderr": 0.03944624162501116, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.03944624162501116 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.20425531914893616, "acc_stderr": 0.02635515841334941, "acc_norm": 0.20425531914893616, "acc_norm_stderr": 0.02635515841334941 }, "harness|ko_mmlu_virology|5": { "acc": 0.19879518072289157, "acc_stderr": 0.03106939026078942, "acc_norm": 0.19879518072289157, "acc_norm_stderr": 0.03106939026078942 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.26688102893890675, "acc_stderr": 0.025122637608816653, "acc_norm": 0.26688102893890675, "acc_norm_stderr": 0.025122637608816653 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.2600896860986547, "acc_stderr": 0.029442495585857473, "acc_norm": 0.2600896860986547, "acc_norm_stderr": 0.029442495585857473 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.1984732824427481, "acc_stderr": 0.034981493854624686, "acc_norm": 0.1984732824427481, "acc_norm_stderr": 0.034981493854624686 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.25252525252525254, "acc_stderr": 0.030954055470365907, "acc_norm": 0.25252525252525254, "acc_norm_stderr": 0.030954055470365907 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.2896551724137931, "acc_stderr": 0.03780019230438014, "acc_norm": 0.2896551724137931, "acc_norm_stderr": 0.03780019230438014 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.18627450980392157, "acc_stderr": 0.03873958714149353, "acc_norm": 0.18627450980392157, "acc_norm_stderr": 0.03873958714149353 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2128205128205128, "acc_stderr": 0.020752423722128002, "acc_norm": 0.2128205128205128, "acc_norm_stderr": 0.020752423722128002 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.2777777777777778, "acc_stderr": 0.04330043749650742, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.04330043749650742 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.2561576354679803, "acc_stderr": 0.0307127300709826, "acc_norm": 0.2561576354679803, "acc_norm_stderr": 0.0307127300709826 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.24516129032258063, "acc_stderr": 0.024472243840895518, "acc_norm": 0.24516129032258063, "acc_norm_stderr": 0.024472243840895518 }, "harness|ko_mmlu_marketing|5": { "acc": 0.28205128205128205, "acc_stderr": 0.029480360549541198, "acc_norm": 0.28205128205128205, "acc_norm_stderr": 0.029480360549541198 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.24528301886792453, "acc_stderr": 0.026480357179895688, "acc_norm": 0.24528301886792453, "acc_norm_stderr": 0.026480357179895688 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.20909090909090908, "acc_stderr": 0.03895091015724135, "acc_norm": 0.20909090909090908, "acc_norm_stderr": 0.03895091015724135 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.026962424325073838, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.026962424325073838 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2185430463576159, "acc_stderr": 0.033742355504256936, "acc_norm": 0.2185430463576159, "acc_norm_stderr": 0.033742355504256936 }, "harness|ko_mmlu_sociology|5": { "acc": 0.24875621890547264, "acc_stderr": 0.030567675938916714, "acc_norm": 0.24875621890547264, "acc_norm_stderr": 0.030567675938916714 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.23699421965317918, "acc_stderr": 0.03242414757483099, "acc_norm": 0.23699421965317918, "acc_norm_stderr": 0.03242414757483099 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.29894179894179895, "acc_stderr": 0.023577604791655805, "acc_norm": 0.29894179894179895, "acc_norm_stderr": 0.023577604791655805 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2638888888888889, "acc_stderr": 0.03685651095897532, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.03685651095897532 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.15, "acc_stderr": 0.03588702812826371, "acc_norm": 0.15, "acc_norm_stderr": 0.03588702812826371 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.2745664739884393, "acc_stderr": 0.02402774515526501, "acc_norm": 0.2745664739884393, "acc_norm_stderr": 0.02402774515526501 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.3128834355828221, "acc_stderr": 0.036429145782924034, "acc_norm": 0.3128834355828221, "acc_norm_stderr": 0.036429145782924034 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.30864197530864196, "acc_stderr": 0.025702640260603767, "acc_norm": 0.30864197530864196, "acc_norm_stderr": 0.025702640260603767 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.22797927461139897, "acc_stderr": 0.030276909945178256, "acc_norm": 0.22797927461139897, "acc_norm_stderr": 0.030276909945178256 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.03999423879281336, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.03999423879281336 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.23853211009174313, "acc_stderr": 0.018272575810231857, "acc_norm": 0.23853211009174313, "acc_norm_stderr": 0.018272575810231857 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.1746031746031746, "acc_stderr": 0.033954900208561116, "acc_norm": 0.1746031746031746, "acc_norm_stderr": 0.033954900208561116 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.26143790849673204, "acc_stderr": 0.025160998214292456, "acc_norm": 0.26143790849673204, "acc_norm_stderr": 0.025160998214292456 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|ko_mmlu_international_law|5": { "acc": 0.3884297520661157, "acc_stderr": 0.04449270350068382, "acc_norm": 0.3884297520661157, "acc_norm_stderr": 0.04449270350068382 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.23026315789473684, "acc_stderr": 0.03426059424403165, "acc_norm": 0.23026315789473684, "acc_norm_stderr": 0.03426059424403165 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.2908496732026144, "acc_stderr": 0.018373116915903966, "acc_norm": 0.2908496732026144, "acc_norm_stderr": 0.018373116915903966 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.26595744680851063, "acc_stderr": 0.026358065698880582, "acc_norm": 0.26595744680851063, "acc_norm_stderr": 0.026358065698880582 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.25, "acc_stderr": 0.04109974682633932, "acc_norm": 0.25, "acc_norm_stderr": 0.04109974682633932 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.23148148148148148, "acc_stderr": 0.028765111718046937, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.028765111718046937 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.24692737430167597, "acc_stderr": 0.014422292204808852, "acc_norm": 0.24692737430167597, "acc_norm_stderr": 0.014422292204808852 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.16911764705882354, "acc_stderr": 0.022770868010113014, "acc_norm": 0.16911764705882354, "acc_norm_stderr": 0.022770868010113014 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.2530612244897959, "acc_stderr": 0.02783302387139968, "acc_norm": 0.2530612244897959, "acc_norm_stderr": 0.02783302387139968 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.2869198312236287, "acc_stderr": 0.029443773022594693, "acc_norm": 0.2869198312236287, "acc_norm_stderr": 0.029443773022594693 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.24967405475880053, "acc_stderr": 0.011054538377832318, "acc_norm": 0.24967405475880053, "acc_norm_stderr": 0.011054538377832318 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.27941176470588236, "acc_stderr": 0.03149328104507957, "acc_norm": 0.27941176470588236, "acc_norm_stderr": 0.03149328104507957 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.296969696969697, "acc_stderr": 0.03567969772268047, "acc_norm": 0.296969696969697, "acc_norm_stderr": 0.03567969772268047 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.24724602203182375, "mc1_stderr": 0.015102404797359649, "mc2": 0.38771109052404834, "mc2_stderr": 0.014784638195990142 }, "harness|ko_commongen_v2|2": { "acc": 0.5140845070422535, "acc_stderr": 0.017132977754804355, "acc_norm": 0.6068075117370892, "acc_norm_stderr": 0.016744157492949253 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Trofish/KULLM-RLHF", "model_sha": "857362e5fbb814aa76ed6aceb9314fc8ddbcdd63", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }