{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.3890784982935154, "acc_stderr": 0.014247309976045607, "acc_norm": 0.45819112627986347, "acc_norm_stderr": 0.014560220308714702 }, "harness|ko_hellaswag|10": { "acc": 0.4206333399721171, "acc_stderr": 0.004926518439372259, "acc_norm": 0.5676160127464649, "acc_norm_stderr": 0.004943945069611462 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.49122807017543857, "acc_stderr": 0.038342347441649924, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.038342347441649924 }, "harness|ko_mmlu_management|5": { "acc": 0.4563106796116505, "acc_stderr": 0.049318019942204146, "acc_norm": 0.4563106796116505, "acc_norm_stderr": 0.049318019942204146 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5287356321839081, "acc_stderr": 0.017850410794380173, "acc_norm": 0.5287356321839081, "acc_norm_stderr": 0.017850410794380173 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.5111111111111111, "acc_stderr": 0.043182754919779756, "acc_norm": 0.5111111111111111, "acc_norm_stderr": 0.043182754919779756 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.045126085985421255, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421255 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.41702127659574467, "acc_stderr": 0.03223276266711712, "acc_norm": 0.41702127659574467, "acc_norm_stderr": 0.03223276266711712 }, "harness|ko_mmlu_virology|5": { "acc": 0.40963855421686746, "acc_stderr": 0.038284011150790206, "acc_norm": 0.40963855421686746, "acc_norm_stderr": 0.038284011150790206 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4790996784565916, "acc_stderr": 0.028373270961069414, "acc_norm": 0.4790996784565916, "acc_norm_stderr": 0.028373270961069414 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.5022421524663677, "acc_stderr": 0.03355746535223263, "acc_norm": 0.5022421524663677, "acc_norm_stderr": 0.03355746535223263 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4580152671755725, "acc_stderr": 0.04369802690578757, "acc_norm": 0.4580152671755725, "acc_norm_stderr": 0.04369802690578757 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.494949494949495, "acc_stderr": 0.035621707606254015, "acc_norm": 0.494949494949495, "acc_norm_stderr": 0.035621707606254015 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3931034482758621, "acc_stderr": 0.040703290137070705, "acc_norm": 0.3931034482758621, "acc_norm_stderr": 0.040703290137070705 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.18627450980392157, "acc_stderr": 0.03873958714149352, "acc_norm": 0.18627450980392157, "acc_norm_stderr": 0.03873958714149352 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.032145368597886394, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.032145368597886394 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4076923076923077, "acc_stderr": 0.02491524398598784, "acc_norm": 0.4076923076923077, "acc_norm_stderr": 0.02491524398598784 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.4722222222222222, "acc_stderr": 0.04826217294139894, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.04826217294139894 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.3448275862068966, "acc_stderr": 0.03344283744280458, "acc_norm": 0.3448275862068966, "acc_norm_stderr": 0.03344283744280458 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.47096774193548385, "acc_stderr": 0.028396016402761005, "acc_norm": 0.47096774193548385, "acc_norm_stderr": 0.028396016402761005 }, "harness|ko_mmlu_marketing|5": { "acc": 0.5811965811965812, "acc_stderr": 0.03232128912157792, "acc_norm": 0.5811965811965812, "acc_norm_stderr": 0.03232128912157792 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4490566037735849, "acc_stderr": 0.030612730713641095, "acc_norm": 0.4490566037735849, "acc_norm_stderr": 0.030612730713641095 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.43636363636363634, "acc_stderr": 0.04750185058907297, "acc_norm": 0.43636363636363634, "acc_norm_stderr": 0.04750185058907297 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085626, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085626 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2847682119205298, "acc_stderr": 0.03684881521389023, "acc_norm": 0.2847682119205298, "acc_norm_stderr": 0.03684881521389023 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5771144278606966, "acc_stderr": 0.034932317774212816, "acc_norm": 0.5771144278606966, "acc_norm_stderr": 0.034932317774212816 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.3872832369942196, "acc_stderr": 0.03714325906302064, "acc_norm": 0.3872832369942196, "acc_norm_stderr": 0.03714325906302064 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.023517294335963286, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.023517294335963286 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.3472222222222222, "acc_stderr": 0.039812405437178615, "acc_norm": 0.3472222222222222, "acc_norm_stderr": 0.039812405437178615 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.47109826589595377, "acc_stderr": 0.026874085883518348, "acc_norm": 0.47109826589595377, "acc_norm_stderr": 0.026874085883518348 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.43558282208588955, "acc_stderr": 0.03895632464138938, "acc_norm": 0.43558282208588955, "acc_norm_stderr": 0.03895632464138938 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.4537037037037037, "acc_stderr": 0.0277012284685426, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.0277012284685426 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.45595854922279794, "acc_stderr": 0.035944137112724366, "acc_norm": 0.45595854922279794, "acc_norm_stderr": 0.035944137112724366 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.040493392977481404, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.040493392977481404 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5045871559633027, "acc_stderr": 0.021436420955529424, "acc_norm": 0.5045871559633027, "acc_norm_stderr": 0.021436420955529424 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.2698412698412698, "acc_stderr": 0.03970158273235173, "acc_norm": 0.2698412698412698, "acc_norm_stderr": 0.03970158273235173 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.4150326797385621, "acc_stderr": 0.028213504177824093, "acc_norm": 0.4150326797385621, "acc_norm_stderr": 0.028213504177824093 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6198347107438017, "acc_stderr": 0.04431324501968432, "acc_norm": 0.6198347107438017, "acc_norm_stderr": 0.04431324501968432 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.40131578947368424, "acc_stderr": 0.039889037033362836, "acc_norm": 0.40131578947368424, "acc_norm_stderr": 0.039889037033362836 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3611111111111111, "acc_stderr": 0.01943177567703731, "acc_norm": 0.3611111111111111, "acc_norm_stderr": 0.01943177567703731 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.28368794326241137, "acc_stderr": 0.026891709428343957, "acc_norm": 0.28368794326241137, "acc_norm_stderr": 0.026891709428343957 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.25892857142857145, "acc_stderr": 0.041577515398656284, "acc_norm": 0.25892857142857145, "acc_norm_stderr": 0.041577515398656284 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.2175925925925926, "acc_stderr": 0.028139689444859672, "acc_norm": 0.2175925925925926, "acc_norm_stderr": 0.028139689444859672 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3860294117647059, "acc_stderr": 0.029573269134411124, "acc_norm": 0.3860294117647059, "acc_norm_stderr": 0.029573269134411124 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.40408163265306124, "acc_stderr": 0.0314147080258659, "acc_norm": 0.40408163265306124, "acc_norm_stderr": 0.0314147080258659 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6075949367088608, "acc_stderr": 0.0317847187456473, "acc_norm": 0.6075949367088608, "acc_norm_stderr": 0.0317847187456473 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3050847457627119, "acc_stderr": 0.011759939618085455, "acc_norm": 0.3050847457627119, "acc_norm_stderr": 0.011759939618085455 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.43137254901960786, "acc_stderr": 0.03476099060501637, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.03476099060501637 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5575757575757576, "acc_stderr": 0.03878372113711275, "acc_norm": 0.5575757575757576, "acc_norm_stderr": 0.03878372113711275 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.2607099143206854, "mc1_stderr": 0.015368841620766368, "mc2": 0.4100851120970672, "mc2_stderr": 0.014797143070922393 }, "harness|ko_commongen_v2|2": { "acc": 0.5029515938606848, "acc_stderr": 0.017190054580194694, "acc_norm": 0.5914994096812278, "acc_norm_stderr": 0.016900062879427125 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Kaeri-Jenti/llama-2-koen-13b-v1.2", "model_sha": "cb9e8ff37d427ab588d666b5c6994498a10084de", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }