{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.4061433447098976, "acc_stderr": 0.014351656690097858, "acc_norm": 0.46757679180887374, "acc_norm_stderr": 0.014580637569995421 }, "harness|ko_hellaswag|10": { "acc": 0.4268074088826927, "acc_stderr": 0.004936029827672038, "acc_norm": 0.5713005377414858, "acc_norm_stderr": 0.004938787067611805 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.52046783625731, "acc_stderr": 0.038316105328219316, "acc_norm": 0.52046783625731, "acc_norm_stderr": 0.038316105328219316 }, "harness|ko_mmlu_management|5": { "acc": 0.5728155339805825, "acc_stderr": 0.04897957737781168, "acc_norm": 0.5728155339805825, "acc_norm_stderr": 0.04897957737781168 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.5542784163473818, "acc_stderr": 0.017774297282479506, "acc_norm": 0.5542784163473818, "acc_norm_stderr": 0.017774297282479506 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.4888888888888889, "acc_stderr": 0.04318275491977976, "acc_norm": 0.4888888888888889, "acc_norm_stderr": 0.04318275491977976 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.425531914893617, "acc_stderr": 0.03232146916224469, "acc_norm": 0.425531914893617, "acc_norm_stderr": 0.03232146916224469 }, "harness|ko_mmlu_virology|5": { "acc": 0.42771084337349397, "acc_stderr": 0.038515976837185335, "acc_norm": 0.42771084337349397, "acc_norm_stderr": 0.038515976837185335 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4919614147909968, "acc_stderr": 0.028394421370984545, "acc_norm": 0.4919614147909968, "acc_norm_stderr": 0.028394421370984545 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.5336322869955157, "acc_stderr": 0.033481800170603065, "acc_norm": 0.5336322869955157, "acc_norm_stderr": 0.033481800170603065 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4732824427480916, "acc_stderr": 0.04379024936553894, "acc_norm": 0.4732824427480916, "acc_norm_stderr": 0.04379024936553894 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145631, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145631 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5909090909090909, "acc_stderr": 0.03502975799413008, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.03502975799413008 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.43448275862068964, "acc_stderr": 0.041307408795554966, "acc_norm": 0.43448275862068964, "acc_norm_stderr": 0.041307408795554966 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.04280105837364395, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.04280105837364395 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.49159663865546216, "acc_stderr": 0.0324739027656967, "acc_norm": 0.49159663865546216, "acc_norm_stderr": 0.0324739027656967 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4717948717948718, "acc_stderr": 0.025310639254933917, "acc_norm": 0.4717948717948718, "acc_norm_stderr": 0.025310639254933917 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5092592592592593, "acc_stderr": 0.04832853553437055, "acc_norm": 0.5092592592592593, "acc_norm_stderr": 0.04832853553437055 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.41379310344827586, "acc_stderr": 0.03465304488406796, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.03465304488406796 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.46774193548387094, "acc_stderr": 0.02838474778881333, "acc_norm": 0.46774193548387094, "acc_norm_stderr": 0.02838474778881333 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6495726495726496, "acc_stderr": 0.0312561082442188, "acc_norm": 0.6495726495726496, "acc_norm_stderr": 0.0312561082442188 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.44528301886792454, "acc_stderr": 0.030588052974270655, "acc_norm": 0.44528301886792454, "acc_norm_stderr": 0.030588052974270655 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.509090909090909, "acc_stderr": 0.04788339768702861, "acc_norm": 0.509090909090909, "acc_norm_stderr": 0.04788339768702861 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.23333333333333334, "acc_stderr": 0.025787874220959316, "acc_norm": 0.23333333333333334, "acc_norm_stderr": 0.025787874220959316 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.2847682119205298, "acc_stderr": 0.03684881521389024, "acc_norm": 0.2847682119205298, "acc_norm_stderr": 0.03684881521389024 }, "harness|ko_mmlu_sociology|5": { "acc": 0.5870646766169154, "acc_stderr": 0.03481520803367348, "acc_norm": 0.5870646766169154, "acc_norm_stderr": 0.03481520803367348 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.42196531791907516, "acc_stderr": 0.0376574669386515, "acc_norm": 0.42196531791907516, "acc_norm_stderr": 0.0376574669386515 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.291005291005291, "acc_stderr": 0.023393826500484875, "acc_norm": 0.291005291005291, "acc_norm_stderr": 0.023393826500484875 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.375, "acc_stderr": 0.04048439222695598, "acc_norm": 0.375, "acc_norm_stderr": 0.04048439222695598 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.65, "acc_stderr": 0.047937248544110175, "acc_norm": 0.65, "acc_norm_stderr": 0.047937248544110175 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.48265895953757226, "acc_stderr": 0.026902900458666647, "acc_norm": 0.48265895953757226, "acc_norm_stderr": 0.026902900458666647 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.5276073619631901, "acc_stderr": 0.0392237829061099, "acc_norm": 0.5276073619631901, "acc_norm_stderr": 0.0392237829061099 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.5123456790123457, "acc_stderr": 0.027812262269327235, "acc_norm": 0.5123456790123457, "acc_norm_stderr": 0.027812262269327235 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5699481865284974, "acc_stderr": 0.03572954333144808, "acc_norm": 0.5699481865284974, "acc_norm_stderr": 0.03572954333144808 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.0409698513984367, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.0409698513984367 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.6128440366972477, "acc_stderr": 0.02088423199264345, "acc_norm": 0.6128440366972477, "acc_norm_stderr": 0.02088423199264345 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.31746031746031744, "acc_stderr": 0.04163453031302859, "acc_norm": 0.31746031746031744, "acc_norm_stderr": 0.04163453031302859 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.3954248366013072, "acc_stderr": 0.02799672318063146, "acc_norm": 0.3954248366013072, "acc_norm_stderr": 0.02799672318063146 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.42, "acc_stderr": 0.04960449637488584, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488584 }, "harness|ko_mmlu_international_law|5": { "acc": 0.628099173553719, "acc_stderr": 0.04412015806624504, "acc_norm": 0.628099173553719, "acc_norm_stderr": 0.04412015806624504 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.42105263157894735, "acc_stderr": 0.040179012759817494, "acc_norm": 0.42105263157894735, "acc_norm_stderr": 0.040179012759817494 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3888888888888889, "acc_stderr": 0.01972205893961807, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.01972205893961807 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3262411347517731, "acc_stderr": 0.027968453043563168, "acc_norm": 0.3262411347517731, "acc_norm_stderr": 0.027968453043563168 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.22321428571428573, "acc_stderr": 0.039523019677025116, "acc_norm": 0.22321428571428573, "acc_norm_stderr": 0.039523019677025116 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3611111111111111, "acc_stderr": 0.03275773486100999, "acc_norm": 0.3611111111111111, "acc_norm_stderr": 0.03275773486100999 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.4632352941176471, "acc_stderr": 0.030290619180485687, "acc_norm": 0.4632352941176471, "acc_norm_stderr": 0.030290619180485687 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.4857142857142857, "acc_stderr": 0.03199615232806287, "acc_norm": 0.4857142857142857, "acc_norm_stderr": 0.03199615232806287 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6371308016877637, "acc_stderr": 0.03129920825530213, "acc_norm": 0.6371308016877637, "acc_norm_stderr": 0.03129920825530213 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.34876140808344197, "acc_stderr": 0.012172035157127113, "acc_norm": 0.34876140808344197, "acc_norm_stderr": 0.012172035157127113 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5245098039215687, "acc_stderr": 0.035050931943487976, "acc_norm": 0.5245098039215687, "acc_norm_stderr": 0.035050931943487976 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5393939393939394, "acc_stderr": 0.03892207016552012, "acc_norm": 0.5393939393939394, "acc_norm_stderr": 0.03892207016552012 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.26805385556915545, "mc1_stderr": 0.015506204722834559, "mc2": 0.4231914841328799, "mc2_stderr": 0.014828818370873126 }, "harness|ko_commongen_v2|2": { "acc": 0.4946871310507674, "acc_stderr": 0.01718938362722969, "acc_norm": 0.5667060212514758, "acc_norm_stderr": 0.017036683641893098 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "HumanF-MarkrAI/pub-llama-13B-v5", "model_sha": "1f872cab411ce3259a7fc23816b8bce1ca67f4b7", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }