{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.38310580204778155, "acc_stderr": 0.01420647266167288, "acc_norm": 0.4453924914675768, "acc_norm_stderr": 0.014523987638344074 }, "harness|ko_hellaswag|10": { "acc": 0.42113124875522806, "acc_stderr": 0.004927314729433555, "acc_norm": 0.578370842461661, "acc_norm_stderr": 0.004928105880776078 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.4619883040935672, "acc_stderr": 0.038237270928823064, "acc_norm": 0.4619883040935672, "acc_norm_stderr": 0.038237270928823064 }, "harness|ko_mmlu_management|5": { "acc": 0.49514563106796117, "acc_stderr": 0.04950504382128921, "acc_norm": 0.49514563106796117, "acc_norm_stderr": 0.04950504382128921 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5644955300127714, "acc_stderr": 0.017730589927926588, "acc_norm": 0.5644955300127714, "acc_norm_stderr": 0.017730589927926588 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4666666666666667, "acc_stderr": 0.043097329010363554, "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.043097329010363554 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.4127659574468085, "acc_stderr": 0.03218471141400352, "acc_norm": 0.4127659574468085, "acc_norm_stderr": 0.03218471141400352 }, "harness|ko_mmlu_virology|5": { "acc": 0.41566265060240964, "acc_stderr": 0.03836722176598052, "acc_norm": 0.41566265060240964, "acc_norm_stderr": 0.03836722176598052 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.5144694533762058, "acc_stderr": 0.028386198084177687, "acc_norm": 0.5144694533762058, "acc_norm_stderr": 0.028386198084177687 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.5112107623318386, "acc_stderr": 0.033549366530984746, "acc_norm": 0.5112107623318386, "acc_norm_stderr": 0.033549366530984746 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.48854961832061067, "acc_stderr": 0.04384140024078016, "acc_norm": 0.48854961832061067, "acc_norm_stderr": 0.04384140024078016 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5858585858585859, "acc_stderr": 0.03509438348879629, "acc_norm": 0.5858585858585859, "acc_norm_stderr": 0.03509438348879629 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.4206896551724138, "acc_stderr": 0.0411391498118926, "acc_norm": 0.4206896551724138, "acc_norm_stderr": 0.0411391498118926 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237655, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237655 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.46638655462184875, "acc_stderr": 0.03240501447690071, "acc_norm": 0.46638655462184875, "acc_norm_stderr": 0.03240501447690071 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.3974358974358974, "acc_stderr": 0.024811920017903836, "acc_norm": 0.3974358974358974, "acc_norm_stderr": 0.024811920017903836 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.62, "acc_stderr": 0.04878317312145632, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145632 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.46296296296296297, "acc_stderr": 0.04820403072760627, "acc_norm": 0.46296296296296297, "acc_norm_stderr": 0.04820403072760627 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3645320197044335, "acc_stderr": 0.0338640574606209, "acc_norm": 0.3645320197044335, "acc_norm_stderr": 0.0338640574606209 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.4935483870967742, "acc_stderr": 0.02844163823354051, "acc_norm": 0.4935483870967742, "acc_norm_stderr": 0.02844163823354051 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6538461538461539, "acc_stderr": 0.0311669573672359, "acc_norm": 0.6538461538461539, "acc_norm_stderr": 0.0311669573672359 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4226415094339623, "acc_stderr": 0.03040233144576954, "acc_norm": 0.4226415094339623, "acc_norm_stderr": 0.03040233144576954 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5545454545454546, "acc_stderr": 0.047605488214603246, "acc_norm": 0.5545454545454546, "acc_norm_stderr": 0.047605488214603246 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2074074074074074, "acc_stderr": 0.024720713193952148, "acc_norm": 0.2074074074074074, "acc_norm_stderr": 0.024720713193952148 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.03861557546255169, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.03861557546255169 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5124378109452736, "acc_stderr": 0.0353443984853958, "acc_norm": 0.5124378109452736, "acc_norm_stderr": 0.0353443984853958 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3872832369942196, "acc_stderr": 0.03714325906302065, "acc_norm": 0.3872832369942196, "acc_norm_stderr": 0.03714325906302065 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.023517294335963286, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.023517294335963286 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4166666666666667, "acc_stderr": 0.041227287076512825, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.041227287076512825 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.49710982658959535, "acc_stderr": 0.02691864538323901, "acc_norm": 0.49710982658959535, "acc_norm_stderr": 0.02691864538323901 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.4233128834355828, "acc_stderr": 0.038818912133343826, "acc_norm": 0.4233128834355828, "acc_norm_stderr": 0.038818912133343826 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.49691358024691357, "acc_stderr": 0.027820214158594377, "acc_norm": 0.49691358024691357, "acc_norm_stderr": 0.027820214158594377 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5181347150259067, "acc_stderr": 0.036060650018329185, "acc_norm": 0.5181347150259067, "acc_norm_stderr": 0.036060650018329185 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.0414243971948936, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.0414243971948936 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5688073394495413, "acc_stderr": 0.02123336503031956, "acc_norm": 0.5688073394495413, "acc_norm_stderr": 0.02123336503031956 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2857142857142857, "acc_stderr": 0.0404061017820884, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.0404061017820884 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.434640522875817, "acc_stderr": 0.028384256704883037, "acc_norm": 0.434640522875817, "acc_norm_stderr": 0.028384256704883037 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6528925619834711, "acc_stderr": 0.04345724570292535, "acc_norm": 0.6528925619834711, "acc_norm_stderr": 0.04345724570292535 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.39473684210526316, "acc_stderr": 0.039777499346220734, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.039777499346220734 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.380718954248366, "acc_stderr": 0.019643801557924806, "acc_norm": 0.380718954248366, "acc_norm_stderr": 0.019643801557924806 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.30141843971631205, "acc_stderr": 0.02737412888263115, "acc_norm": 0.30141843971631205, "acc_norm_stderr": 0.02737412888263115 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467762, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467762 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3425925925925926, "acc_stderr": 0.032365852526021574, "acc_norm": 0.3425925925925926, "acc_norm_stderr": 0.032365852526021574 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2681564245810056, "acc_stderr": 0.014816119635317, "acc_norm": 0.2681564245810056, "acc_norm_stderr": 0.014816119635317 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3272058823529412, "acc_stderr": 0.028501452860396587, "acc_norm": 0.3272058823529412, "acc_norm_stderr": 0.028501452860396587 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4326530612244898, "acc_stderr": 0.03171752824062664, "acc_norm": 0.4326530612244898, "acc_norm_stderr": 0.03171752824062664 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6286919831223629, "acc_stderr": 0.03145068600744859, "acc_norm": 0.6286919831223629, "acc_norm_stderr": 0.03145068600744859 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.35658409387222945, "acc_stderr": 0.012233642989273886, "acc_norm": 0.35658409387222945, "acc_norm_stderr": 0.012233642989273886 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.553921568627451, "acc_stderr": 0.034888454513049734, "acc_norm": 0.553921568627451, "acc_norm_stderr": 0.034888454513049734 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5757575757575758, "acc_stderr": 0.038592681420702636, "acc_norm": 0.5757575757575758, "acc_norm_stderr": 0.038592681420702636 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2521419828641371, "mc1_stderr": 0.015201522246299946, "mc2": 0.4052899642454083, "mc2_stderr": 0.014924042516908636 }, "harness|ko_commongen_v2|2": { "acc": 0.38488783943329397, "acc_stderr": 0.016728579701498672, "acc_norm": 0.4510035419126328, "acc_norm_stderr": 0.017107618859549357 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "AIFT/aift-llama2-koen-instruct-v1.2", "model_sha": "95f3e7cce5bebe90ac4ff8f07597be444e7e1a9e", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }