{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3924914675767918, "acc_stderr": 0.01426963463567073, "acc_norm": 0.46331058020477817, "acc_norm_stderr": 0.014572000527756994 }, "harness|ko_hellaswag|10": { "acc": 0.42202748456482775, "acc_stderr": 0.004928735103635848, "acc_norm": 0.572495518820952, "acc_norm_stderr": 0.004937054233711569 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.52046783625731, "acc_stderr": 0.038316105328219316, "acc_norm": 0.52046783625731, "acc_norm_stderr": 0.038316105328219316 }, "harness|ko_mmlu_management|5": { "acc": 0.46601941747572817, "acc_stderr": 0.04939291447273481, "acc_norm": 0.46601941747572817, "acc_norm_stderr": 0.04939291447273481 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5338441890166028, "acc_stderr": 0.017838956009136802, "acc_norm": 0.5338441890166028, "acc_norm_stderr": 0.017838956009136802 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4962962962962963, "acc_stderr": 0.043192236258113303, "acc_norm": 0.4962962962962963, "acc_norm_stderr": 0.043192236258113303 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.045126085985421255, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421255 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4, "acc_stderr": 0.03202563076101737, "acc_norm": 0.4, "acc_norm_stderr": 0.03202563076101737 }, "harness|ko_mmlu_virology|5": { "acc": 0.42771084337349397, "acc_stderr": 0.03851597683718533, "acc_norm": 0.42771084337349397, "acc_norm_stderr": 0.03851597683718533 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5080385852090032, "acc_stderr": 0.028394421370984538, "acc_norm": 0.5080385852090032, "acc_norm_stderr": 0.028394421370984538 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.5291479820627802, "acc_stderr": 0.03350073248773404, "acc_norm": 0.5291479820627802, "acc_norm_stderr": 0.03350073248773404 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.48091603053435117, "acc_stderr": 0.04382094705550989, "acc_norm": 0.48091603053435117, "acc_norm_stderr": 0.04382094705550989 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5454545454545454, "acc_stderr": 0.035476014940069384, "acc_norm": 0.5454545454545454, "acc_norm_stderr": 0.035476014940069384 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3931034482758621, "acc_stderr": 0.040703290137070705, "acc_norm": 0.3931034482758621, "acc_norm_stderr": 0.040703290137070705 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171453, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171453 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.4495798319327731, "acc_stderr": 0.03231293497137707, "acc_norm": 0.4495798319327731, "acc_norm_stderr": 0.03231293497137707 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4307692307692308, "acc_stderr": 0.02510682066053975, "acc_norm": 0.4307692307692308, "acc_norm_stderr": 0.02510682066053975 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5185185185185185, "acc_stderr": 0.04830366024635331, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.04830366024635331 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.4236453201970443, "acc_stderr": 0.034767257476490364, "acc_norm": 0.4236453201970443, "acc_norm_stderr": 0.034767257476490364 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.46774193548387094, "acc_stderr": 0.028384747788813326, "acc_norm": 0.46774193548387094, "acc_norm_stderr": 0.028384747788813326 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6538461538461539, "acc_stderr": 0.031166957367235903, "acc_norm": 0.6538461538461539, "acc_norm_stderr": 0.031166957367235903 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.43018867924528303, "acc_stderr": 0.030471445867183238, "acc_norm": 0.43018867924528303, "acc_norm_stderr": 0.030471445867183238 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5272727272727272, "acc_stderr": 0.0478200179138006, "acc_norm": 0.5272727272727272, "acc_norm_stderr": 0.0478200179138006 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.02730914058823018, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.02730914058823018 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2913907284768212, "acc_stderr": 0.037101857261199946, "acc_norm": 0.2913907284768212, "acc_norm_stderr": 0.037101857261199946 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5870646766169154, "acc_stderr": 0.03481520803367348, "acc_norm": 0.5870646766169154, "acc_norm_stderr": 0.03481520803367348 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.37572254335260113, "acc_stderr": 0.03692820767264867, "acc_norm": 0.37572254335260113, "acc_norm_stderr": 0.03692820767264867 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.29365079365079366, "acc_stderr": 0.02345603738398203, "acc_norm": 0.29365079365079366, "acc_norm_stderr": 0.02345603738398203 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3611111111111111, "acc_stderr": 0.040166600304512336, "acc_norm": 0.3611111111111111, "acc_norm_stderr": 0.040166600304512336 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.48265895953757226, "acc_stderr": 0.026902900458666647, "acc_norm": 0.48265895953757226, "acc_norm_stderr": 0.026902900458666647 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4294478527607362, "acc_stderr": 0.03889066619112722, "acc_norm": 0.4294478527607362, "acc_norm_stderr": 0.03889066619112722 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4722222222222222, "acc_stderr": 0.027777777777777797, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.027777777777777797 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5077720207253886, "acc_stderr": 0.03608003225569654, "acc_norm": 0.5077720207253886, "acc_norm_stderr": 0.03608003225569654 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022058, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022058 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5412844036697247, "acc_stderr": 0.021364122533881695, "acc_norm": 0.5412844036697247, "acc_norm_stderr": 0.021364122533881695 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.29365079365079366, "acc_stderr": 0.040735243221471255, "acc_norm": 0.29365079365079366, "acc_norm_stderr": 0.040735243221471255 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.42810457516339867, "acc_stderr": 0.028332397483664274, "acc_norm": 0.42810457516339867, "acc_norm_stderr": 0.028332397483664274 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_international_law|5": { "acc": 0.628099173553719, "acc_stderr": 0.04412015806624504, "acc_norm": 0.628099173553719, "acc_norm_stderr": 0.04412015806624504 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.40789473684210525, "acc_stderr": 0.03999309712777472, "acc_norm": 0.40789473684210525, "acc_norm_stderr": 0.03999309712777472 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3790849673202614, "acc_stderr": 0.01962744474841224, "acc_norm": 0.3790849673202614, "acc_norm_stderr": 0.01962744474841224 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.32269503546099293, "acc_stderr": 0.027889139300534778, "acc_norm": 0.32269503546099293, "acc_norm_stderr": 0.027889139300534778 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.23214285714285715, "acc_stderr": 0.04007341809755806, "acc_norm": 0.23214285714285715, "acc_norm_stderr": 0.04007341809755806 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3472222222222222, "acc_stderr": 0.032468872436376486, "acc_norm": 0.3472222222222222, "acc_norm_stderr": 0.032468872436376486 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3786764705882353, "acc_stderr": 0.02946513363977613, "acc_norm": 0.3786764705882353, "acc_norm_stderr": 0.02946513363977613 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.44081632653061226, "acc_stderr": 0.03178419114175363, "acc_norm": 0.44081632653061226, "acc_norm_stderr": 0.03178419114175363 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.5864978902953587, "acc_stderr": 0.03205649904851858, "acc_norm": 0.5864978902953587, "acc_norm_stderr": 0.03205649904851858 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.31421121251629724, "acc_stderr": 0.01185591158704823, "acc_norm": 0.31421121251629724, "acc_norm_stderr": 0.01185591158704823 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.4264705882352941, "acc_stderr": 0.034711579079534254, "acc_norm": 0.4264705882352941, "acc_norm_stderr": 0.034711579079534254 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5393939393939394, "acc_stderr": 0.03892207016552012, "acc_norm": 0.5393939393939394, "acc_norm_stderr": 0.03892207016552012 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.26438188494492043, "mc1_stderr": 0.015438211119522505, "mc2": 0.4107667883351212, "mc2_stderr": 0.014847145006763885 }, "harness|ko_commongen_v2|2": { "acc": 0.4793388429752066, "acc_stderr": 0.017175671279836446, "acc_norm": 0.5572609208972845, "acc_norm_stderr": 0.017077254131556228 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Cartinoe5930/KoRAE-13b", "model_sha": "ea6b5bc5c26f06cbb2a0cb973b691f4080bbee72", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }