{ "results": { "harness|ko_arc_challenge|25": { "acc": 0.27047781569965873, "acc_stderr": 0.012980954547659556, "acc_norm": 0.3319112627986348, "acc_norm_stderr": 0.013760988200880541 }, "harness|ko_hellaswag|10": { "acc": 0.3505277833100976, "acc_stderr": 0.004761601303258889, "acc_norm": 0.44722166899024096, "acc_norm_stderr": 0.0049619049491713965 }, "harness|ko_mmlu_world_religions|5": { "acc": 0.32748538011695905, "acc_stderr": 0.03599335771456027, "acc_norm": 0.32748538011695905, "acc_norm_stderr": 0.03599335771456027 }, "harness|ko_mmlu_management|5": { "acc": 0.3106796116504854, "acc_stderr": 0.04582124160161549, "acc_norm": 0.3106796116504854, "acc_norm_stderr": 0.04582124160161549 }, "harness|ko_mmlu_miscellaneous|5": { "acc": 0.351213282247765, "acc_stderr": 0.01706998205149943, "acc_norm": 0.351213282247765, "acc_norm_stderr": 0.01706998205149943 }, "harness|ko_mmlu_anatomy|5": { "acc": 0.31851851851851853, "acc_stderr": 0.040247784019771096, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.040247784019771096 }, "harness|ko_mmlu_abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|ko_mmlu_conceptual_physics|5": { "acc": 0.3659574468085106, "acc_stderr": 0.031489558297455304, "acc_norm": 0.3659574468085106, "acc_norm_stderr": 0.031489558297455304 }, "harness|ko_mmlu_virology|5": { "acc": 0.3493975903614458, "acc_stderr": 0.0371172519074075, "acc_norm": 0.3493975903614458, "acc_norm_stderr": 0.0371172519074075 }, "harness|ko_mmlu_philosophy|5": { "acc": 0.31511254019292606, "acc_stderr": 0.026385273703464496, "acc_norm": 0.31511254019292606, "acc_norm_stderr": 0.026385273703464496 }, "harness|ko_mmlu_human_aging|5": { "acc": 0.4125560538116592, "acc_stderr": 0.03304062175449297, "acc_norm": 0.4125560538116592, "acc_norm_stderr": 0.03304062175449297 }, "harness|ko_mmlu_human_sexuality|5": { "acc": 0.29770992366412213, "acc_stderr": 0.040103589424622034, "acc_norm": 0.29770992366412213, "acc_norm_stderr": 0.040103589424622034 }, "harness|ko_mmlu_medical_genetics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|ko_mmlu_high_school_geography|5": { "acc": 0.3333333333333333, "acc_stderr": 0.03358618145732524, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.03358618145732524 }, "harness|ko_mmlu_electrical_engineering|5": { "acc": 0.3448275862068966, "acc_stderr": 0.03960933549451207, "acc_norm": 0.3448275862068966, "acc_norm_stderr": 0.03960933549451207 }, "harness|ko_mmlu_college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237655, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237655 }, "harness|ko_mmlu_high_school_microeconomics|5": { "acc": 0.27310924369747897, "acc_stderr": 0.028942004040998167, "acc_norm": 0.27310924369747897, "acc_norm_stderr": 0.028942004040998167 }, "harness|ko_mmlu_high_school_macroeconomics|5": { "acc": 0.2717948717948718, "acc_stderr": 0.02255655101013235, "acc_norm": 0.2717948717948718, "acc_norm_stderr": 0.02255655101013235 }, "harness|ko_mmlu_computer_security|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|ko_mmlu_global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|ko_mmlu_jurisprudence|5": { "acc": 0.3055555555555556, "acc_stderr": 0.044531975073749834, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.044531975073749834 }, "harness|ko_mmlu_high_school_chemistry|5": { "acc": 0.23645320197044334, "acc_stderr": 0.029896114291733552, "acc_norm": 0.23645320197044334, "acc_norm_stderr": 0.029896114291733552 }, "harness|ko_mmlu_high_school_biology|5": { "acc": 0.3161290322580645, "acc_stderr": 0.026450874489042767, "acc_norm": 0.3161290322580645, "acc_norm_stderr": 0.026450874489042767 }, "harness|ko_mmlu_marketing|5": { "acc": 0.4658119658119658, "acc_stderr": 0.03267942734081228, "acc_norm": 0.4658119658119658, "acc_norm_stderr": 0.03267942734081228 }, "harness|ko_mmlu_clinical_knowledge|5": { "acc": 0.3320754716981132, "acc_stderr": 0.028985455652334395, "acc_norm": 0.3320754716981132, "acc_norm_stderr": 0.028985455652334395 }, "harness|ko_mmlu_public_relations|5": { "acc": 0.39090909090909093, "acc_stderr": 0.04673752333670237, "acc_norm": 0.39090909090909093, "acc_norm_stderr": 0.04673752333670237 }, "harness|ko_mmlu_high_school_mathematics|5": { "acc": 0.22962962962962963, "acc_stderr": 0.025644108639267645, "acc_norm": 0.22962962962962963, "acc_norm_stderr": 0.025644108639267645 }, "harness|ko_mmlu_high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|ko_mmlu_sociology|5": { "acc": 0.3333333333333333, "acc_stderr": 0.03333333333333336, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.03333333333333336 }, "harness|ko_mmlu_college_medicine|5": { "acc": 0.2658959537572254, "acc_stderr": 0.033687629322594295, "acc_norm": 0.2658959537572254, "acc_norm_stderr": 0.033687629322594295 }, "harness|ko_mmlu_elementary_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.022569897074918428, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.022569897074918428 }, "harness|ko_mmlu_college_biology|5": { "acc": 0.2777777777777778, "acc_stderr": 0.037455547914624576, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.037455547914624576 }, "harness|ko_mmlu_college_chemistry|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|ko_mmlu_us_foreign_policy|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|ko_mmlu_moral_disputes|5": { "acc": 0.28901734104046245, "acc_stderr": 0.02440517393578323, "acc_norm": 0.28901734104046245, "acc_norm_stderr": 0.02440517393578323 }, "harness|ko_mmlu_logical_fallacies|5": { "acc": 0.2822085889570552, "acc_stderr": 0.03536117886664743, "acc_norm": 0.2822085889570552, "acc_norm_stderr": 0.03536117886664743 }, "harness|ko_mmlu_prehistory|5": { "acc": 0.31790123456790126, "acc_stderr": 0.025910063528240865, "acc_norm": 0.31790123456790126, "acc_norm_stderr": 0.025910063528240865 }, "harness|ko_mmlu_college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|ko_mmlu_high_school_government_and_politics|5": { "acc": 0.2694300518134715, "acc_stderr": 0.032018671228777947, "acc_norm": 0.2694300518134715, "acc_norm_stderr": 0.032018671228777947 }, "harness|ko_mmlu_econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022056, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022056 }, "harness|ko_mmlu_high_school_psychology|5": { "acc": 0.3155963302752294, "acc_stderr": 0.019926117513869666, "acc_norm": 0.3155963302752294, "acc_norm_stderr": 0.019926117513869666 }, "harness|ko_mmlu_formal_logic|5": { "acc": 0.20634920634920634, "acc_stderr": 0.0361960452412425, "acc_norm": 0.20634920634920634, "acc_norm_stderr": 0.0361960452412425 }, "harness|ko_mmlu_nutrition|5": { "acc": 0.3006535947712418, "acc_stderr": 0.026256053835718964, "acc_norm": 0.3006535947712418, "acc_norm_stderr": 0.026256053835718964 }, "harness|ko_mmlu_business_ethics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|ko_mmlu_international_law|5": { "acc": 0.38016528925619836, "acc_stderr": 0.04431324501968431, "acc_norm": 0.38016528925619836, "acc_norm_stderr": 0.04431324501968431 }, "harness|ko_mmlu_astronomy|5": { "acc": 0.2631578947368421, "acc_stderr": 0.035834961763610625, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.035834961763610625 }, "harness|ko_mmlu_professional_psychology|5": { "acc": 0.2565359477124183, "acc_stderr": 0.01766784161237899, "acc_norm": 0.2565359477124183, "acc_norm_stderr": 0.01766784161237899 }, "harness|ko_mmlu_professional_accounting|5": { "acc": 0.24113475177304963, "acc_stderr": 0.025518731049537773, "acc_norm": 0.24113475177304963, "acc_norm_stderr": 0.025518731049537773 }, "harness|ko_mmlu_machine_learning|5": { "acc": 0.3392857142857143, "acc_stderr": 0.044939490686135404, "acc_norm": 0.3392857142857143, "acc_norm_stderr": 0.044939490686135404 }, "harness|ko_mmlu_high_school_statistics|5": { "acc": 0.23148148148148148, "acc_stderr": 0.028765111718046972, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.028765111718046972 }, "harness|ko_mmlu_moral_scenarios|5": { "acc": 0.22793296089385476, "acc_stderr": 0.014030149950805095, "acc_norm": 0.22793296089385476, "acc_norm_stderr": 0.014030149950805095 }, "harness|ko_mmlu_college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|ko_mmlu_high_school_computer_science|5": { "acc": 0.26, "acc_stderr": 0.04408440022768079, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "harness|ko_mmlu_professional_medicine|5": { "acc": 0.3492647058823529, "acc_stderr": 0.02895975519682486, "acc_norm": 0.3492647058823529, "acc_norm_stderr": 0.02895975519682486 }, "harness|ko_mmlu_security_studies|5": { "acc": 0.19183673469387755, "acc_stderr": 0.025206963154225395, "acc_norm": 0.19183673469387755, "acc_norm_stderr": 0.025206963154225395 }, "harness|ko_mmlu_high_school_world_history|5": { "acc": 0.3037974683544304, "acc_stderr": 0.0299366963871386, "acc_norm": 0.3037974683544304, "acc_norm_stderr": 0.0299366963871386 }, "harness|ko_mmlu_professional_law|5": { "acc": 0.2666232073011734, "acc_stderr": 0.01129383603161213, "acc_norm": 0.2666232073011734, "acc_norm_stderr": 0.01129383603161213 }, "harness|ko_mmlu_high_school_us_history|5": { "acc": 0.3235294117647059, "acc_stderr": 0.03283472056108567, "acc_norm": 0.3235294117647059, "acc_norm_stderr": 0.03283472056108567 }, "harness|ko_mmlu_high_school_european_history|5": { "acc": 0.30303030303030304, "acc_stderr": 0.035886248000917075, "acc_norm": 0.30303030303030304, "acc_norm_stderr": 0.035886248000917075 }, "harness|ko_truthfulqa_mc|0": { "mc1": 0.27539779681762544, "mc1_stderr": 0.015638135667775523, "mc2": 0.4297360873033464, "mc2_stderr": 0.016304548005749996 }, "harness|ko_commongen_v2|2": { "acc": 0.23258559622195984, "acc_stderr": 0.014525169182416493, "acc_norm": 0.27508854781582054, "acc_norm_stderr": 0.015353010757952649 } }, "versions": { "all": 0, "harness|ko_arc_challenge|25": 0, "harness|ko_hellaswag|10": 0, "harness|ko_mmlu_world_religions|5": 1, "harness|ko_mmlu_management|5": 1, "harness|ko_mmlu_miscellaneous|5": 1, "harness|ko_mmlu_anatomy|5": 1, "harness|ko_mmlu_abstract_algebra|5": 1, "harness|ko_mmlu_conceptual_physics|5": 1, "harness|ko_mmlu_virology|5": 1, "harness|ko_mmlu_philosophy|5": 1, "harness|ko_mmlu_human_aging|5": 1, "harness|ko_mmlu_human_sexuality|5": 1, "harness|ko_mmlu_medical_genetics|5": 1, "harness|ko_mmlu_high_school_geography|5": 1, "harness|ko_mmlu_electrical_engineering|5": 1, "harness|ko_mmlu_college_physics|5": 1, "harness|ko_mmlu_high_school_microeconomics|5": 1, "harness|ko_mmlu_high_school_macroeconomics|5": 1, "harness|ko_mmlu_computer_security|5": 1, "harness|ko_mmlu_global_facts|5": 1, "harness|ko_mmlu_jurisprudence|5": 1, "harness|ko_mmlu_high_school_chemistry|5": 1, "harness|ko_mmlu_high_school_biology|5": 1, "harness|ko_mmlu_marketing|5": 1, "harness|ko_mmlu_clinical_knowledge|5": 1, "harness|ko_mmlu_public_relations|5": 1, "harness|ko_mmlu_high_school_mathematics|5": 1, "harness|ko_mmlu_high_school_physics|5": 1, "harness|ko_mmlu_sociology|5": 1, "harness|ko_mmlu_college_medicine|5": 1, "harness|ko_mmlu_elementary_mathematics|5": 1, "harness|ko_mmlu_college_biology|5": 1, "harness|ko_mmlu_college_chemistry|5": 1, "harness|ko_mmlu_us_foreign_policy|5": 1, "harness|ko_mmlu_moral_disputes|5": 1, "harness|ko_mmlu_logical_fallacies|5": 1, "harness|ko_mmlu_prehistory|5": 1, "harness|ko_mmlu_college_mathematics|5": 1, "harness|ko_mmlu_high_school_government_and_politics|5": 1, "harness|ko_mmlu_econometrics|5": 1, "harness|ko_mmlu_high_school_psychology|5": 1, "harness|ko_mmlu_formal_logic|5": 1, "harness|ko_mmlu_nutrition|5": 1, "harness|ko_mmlu_business_ethics|5": 1, "harness|ko_mmlu_international_law|5": 1, "harness|ko_mmlu_astronomy|5": 1, "harness|ko_mmlu_professional_psychology|5": 1, "harness|ko_mmlu_professional_accounting|5": 1, "harness|ko_mmlu_machine_learning|5": 1, "harness|ko_mmlu_high_school_statistics|5": 1, "harness|ko_mmlu_moral_scenarios|5": 1, "harness|ko_mmlu_college_computer_science|5": 1, "harness|ko_mmlu_high_school_computer_science|5": 1, "harness|ko_mmlu_professional_medicine|5": 1, "harness|ko_mmlu_security_studies|5": 1, "harness|ko_mmlu_high_school_world_history|5": 1, "harness|ko_mmlu_professional_law|5": 1, "harness|ko_mmlu_high_school_us_history|5": 1, "harness|ko_mmlu_high_school_european_history|5": 1, "harness|ko_truthfulqa_mc|0": 0, "harness|ko_commongen_v2|2": 1 }, "config_general": { "model_name": "Jaewoo1/KoT-Platypus2_foundation", "model_sha": "7e97a65b825f9aa4691fe2bebf14696d80ba831d", "model_dtype": "torch.float16", "lighteval_sha": "", "num_few_shot_default": 0, "num_fewshot_seeds": 1, "override_batch_size": 1, "max_samples": null } }