{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.40955631399317405, "acc_stderr": 0.014370358632472427, "acc_norm": 0.4667235494880546, "acc_norm_stderr": 0.014578995859605814 }, "harness|ko_hellaswag|10": { "acc": 0.4279028082055367, "acc_stderr": 0.004937635112830286, "acc_norm": 0.5790679147580163, "acc_norm_stderr": 0.004926996830194243 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.45614035087719296, "acc_stderr": 0.03820042586602966, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.03820042586602966 }, "harness|ko_mmlu_management|5": { "acc": 0.5533980582524272, "acc_stderr": 0.04922424153458933, "acc_norm": 0.5533980582524272, "acc_norm_stderr": 0.04922424153458933 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.545338441890166, "acc_stderr": 0.0178063045850526, "acc_norm": 0.545338441890166, "acc_norm_stderr": 0.0178063045850526 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.45185185185185184, "acc_stderr": 0.04299268905480863, "acc_norm": 0.45185185185185184, "acc_norm_stderr": 0.04299268905480863 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.42127659574468085, "acc_stderr": 0.03227834510146267, "acc_norm": 0.42127659574468085, "acc_norm_stderr": 0.03227834510146267 }, "harness|ko_mmlu_virology|5": { "acc": 0.43373493975903615, "acc_stderr": 0.03858158940685515, "acc_norm": 0.43373493975903615, "acc_norm_stderr": 0.03858158940685515 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.4887459807073955, "acc_stderr": 0.028390897396863526, "acc_norm": 0.4887459807073955, "acc_norm_stderr": 0.028390897396863526 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.5515695067264574, "acc_stderr": 0.03337883736255098, "acc_norm": 0.5515695067264574, "acc_norm_stderr": 0.03337883736255098 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.4580152671755725, "acc_stderr": 0.04369802690578756, "acc_norm": 0.4580152671755725, "acc_norm_stderr": 0.04369802690578756 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.5757575757575758, "acc_stderr": 0.035212249088415866, "acc_norm": 0.5757575757575758, "acc_norm_stderr": 0.035212249088415866 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.41379310344827586, "acc_stderr": 0.04104269211806232, "acc_norm": 0.41379310344827586, "acc_norm_stderr": 0.04104269211806232 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171452, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171452 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.46638655462184875, "acc_stderr": 0.03240501447690071, "acc_norm": 0.46638655462184875, "acc_norm_stderr": 0.03240501447690071 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.4205128205128205, "acc_stderr": 0.025028610276710852, "acc_norm": 0.4205128205128205, "acc_norm_stderr": 0.025028610276710852 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.5370370370370371, "acc_stderr": 0.04820403072760627, "acc_norm": 0.5370370370370371, "acc_norm_stderr": 0.04820403072760627 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.39408866995073893, "acc_stderr": 0.03438157967036545, "acc_norm": 0.39408866995073893, "acc_norm_stderr": 0.03438157967036545 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.44193548387096776, "acc_stderr": 0.028251557906849734, "acc_norm": 0.44193548387096776, "acc_norm_stderr": 0.028251557906849734 }, "harness|ko_mmlu_marketing|5": { "acc": 0.6452991452991453, "acc_stderr": 0.031342504862454025, "acc_norm": 0.6452991452991453, "acc_norm_stderr": 0.031342504862454025 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.4490566037735849, "acc_stderr": 0.030612730713641092, "acc_norm": 0.4490566037735849, "acc_norm_stderr": 0.030612730713641092 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.5545454545454546, "acc_stderr": 0.047605488214603246, "acc_norm": 0.5545454545454546, "acc_norm_stderr": 0.047605488214603246 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.026962424325073828, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.026962424325073828 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526732, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526732 }, "harness|ko_mmlu_sociology|5": { "acc": 0.6169154228855721, "acc_stderr": 0.034375193373382504, "acc_norm": 0.6169154228855721, "acc_norm_stderr": 0.034375193373382504 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.37572254335260113, "acc_stderr": 0.03692820767264867, "acc_norm": 0.37572254335260113, "acc_norm_stderr": 0.03692820767264867 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.25132275132275134, "acc_stderr": 0.022340482339643898, "acc_norm": 0.25132275132275134, "acc_norm_stderr": 0.022340482339643898 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.4097222222222222, "acc_stderr": 0.04112490974670787, "acc_norm": 0.4097222222222222, "acc_norm_stderr": 0.04112490974670787 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.62, "acc_stderr": 0.048783173121456344, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456344 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.48554913294797686, "acc_stderr": 0.026907849856282542, "acc_norm": 0.48554913294797686, "acc_norm_stderr": 0.026907849856282542 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.50920245398773, "acc_stderr": 0.03927705600787443, "acc_norm": 0.50920245398773, "acc_norm_stderr": 0.03927705600787443 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.49382716049382713, "acc_stderr": 0.027818623962583295, "acc_norm": 0.49382716049382713, "acc_norm_stderr": 0.027818623962583295 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.5181347150259067, "acc_stderr": 0.036060650018329185, "acc_norm": 0.5181347150259067, "acc_norm_stderr": 0.036060650018329185 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.04266339443159394, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.04266339443159394 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.5834862385321101, "acc_stderr": 0.021136376504030874, "acc_norm": 0.5834862385321101, "acc_norm_stderr": 0.021136376504030874 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.041905964388711366, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.041905964388711366 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.39869281045751637, "acc_stderr": 0.028036092273891776, "acc_norm": 0.39869281045751637, "acc_norm_stderr": 0.028036092273891776 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|ko_mmlu_international_law|5": { "acc": 0.6446280991735537, "acc_stderr": 0.0436923632657398, "acc_norm": 0.6446280991735537, "acc_norm_stderr": 0.0436923632657398 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.40789473684210525, "acc_stderr": 0.039993097127774734, "acc_norm": 0.40789473684210525, "acc_norm_stderr": 0.039993097127774734 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.3888888888888889, "acc_stderr": 0.019722058939618068, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.019722058939618068 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.3191489361702128, "acc_stderr": 0.027807990141320186, "acc_norm": 0.3191489361702128, "acc_norm_stderr": 0.027807990141320186 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.23214285714285715, "acc_stderr": 0.04007341809755806, "acc_norm": 0.23214285714285715, "acc_norm_stderr": 0.04007341809755806 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.0316746870682898, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.0316746870682898 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.01433352205921789, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.01433352205921789 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3897058823529412, "acc_stderr": 0.029624663581159685, "acc_norm": 0.3897058823529412, "acc_norm_stderr": 0.029624663581159685 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.49795918367346936, "acc_stderr": 0.0320089533497105, "acc_norm": 0.49795918367346936, "acc_norm_stderr": 0.0320089533497105 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.6582278481012658, "acc_stderr": 0.030874537537553617, "acc_norm": 0.6582278481012658, "acc_norm_stderr": 0.030874537537553617 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.3350717079530639, "acc_stderr": 0.012055499471330371, "acc_norm": 0.3350717079530639, "acc_norm_stderr": 0.012055499471330371 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.5294117647058824, "acc_stderr": 0.03503235296367992, "acc_norm": 0.5294117647058824, "acc_norm_stderr": 0.03503235296367992 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.5333333333333333, "acc_stderr": 0.03895658065271847, "acc_norm": 0.5333333333333333, "acc_norm_stderr": 0.03895658065271847 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.28518971848225216, "mc1_stderr": 0.015805827874454892, "mc2": 0.46011374521819187, "mc2_stderr": 0.014998848301007965 }, "harness|ko_commongen_v2|2": { "acc": 0.47461629279811096, "acc_stderr": 0.01716818720142925, "acc_norm": 0.5726092089728453, "acc_norm_stderr": 0.017008129844823156 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.20", "model_sha": "ea626222a55229e517e4c9f75ba9bbd64cd892a5", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }