|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.36177474402730375, |
|
"acc_stderr": 0.014041957945038075, |
|
"acc_norm": 0.4121160409556314, |
|
"acc_norm_stderr": 0.014383915302225402 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.3858793069109739, |
|
"acc_stderr": 0.004858074013443988, |
|
"acc_norm": 0.4956184027086238, |
|
"acc_norm_stderr": 0.004989589816180235 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4502923976608187, |
|
"acc_stderr": 0.038158273659132366, |
|
"acc_norm": 0.4502923976608187, |
|
"acc_norm_stderr": 0.038158273659132366 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5631067961165048, |
|
"acc_stderr": 0.04911147107365777, |
|
"acc_norm": 0.5631067961165048, |
|
"acc_norm_stderr": 0.04911147107365777 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.47381864623243936, |
|
"acc_stderr": 0.017855434554041982, |
|
"acc_norm": 0.47381864623243936, |
|
"acc_norm_stderr": 0.017855434554041982 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.37037037037037035, |
|
"acc_stderr": 0.04171654161354544, |
|
"acc_norm": 0.37037037037037035, |
|
"acc_norm_stderr": 0.04171654161354544 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.0440844002276808, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.0440844002276808 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3659574468085106, |
|
"acc_stderr": 0.0314895582974553, |
|
"acc_norm": 0.3659574468085106, |
|
"acc_norm_stderr": 0.0314895582974553 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3674698795180723, |
|
"acc_stderr": 0.03753267402120574, |
|
"acc_norm": 0.3674698795180723, |
|
"acc_norm_stderr": 0.03753267402120574 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.4758842443729904, |
|
"acc_stderr": 0.028365041542564577, |
|
"acc_norm": 0.4758842443729904, |
|
"acc_norm_stderr": 0.028365041542564577 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.43946188340807174, |
|
"acc_stderr": 0.03331092511038179, |
|
"acc_norm": 0.43946188340807174, |
|
"acc_norm_stderr": 0.03331092511038179 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.42748091603053434, |
|
"acc_stderr": 0.04338920305792401, |
|
"acc_norm": 0.42748091603053434, |
|
"acc_norm_stderr": 0.04338920305792401 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.46, |
|
"acc_stderr": 0.05009082659620332, |
|
"acc_norm": 0.46, |
|
"acc_norm_stderr": 0.05009082659620332 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5858585858585859, |
|
"acc_stderr": 0.035094383488796295, |
|
"acc_norm": 0.5858585858585859, |
|
"acc_norm_stderr": 0.035094383488796295 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4206896551724138, |
|
"acc_stderr": 0.0411391498118926, |
|
"acc_norm": 0.4206896551724138, |
|
"acc_norm_stderr": 0.0411391498118926 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.21568627450980393, |
|
"acc_stderr": 0.04092563958237655, |
|
"acc_norm": 0.21568627450980393, |
|
"acc_norm_stderr": 0.04092563958237655 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4831932773109244, |
|
"acc_stderr": 0.03246013680375308, |
|
"acc_norm": 0.4831932773109244, |
|
"acc_norm_stderr": 0.03246013680375308 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4512820512820513, |
|
"acc_stderr": 0.025230381238934833, |
|
"acc_norm": 0.4512820512820513, |
|
"acc_norm_stderr": 0.025230381238934833 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.65, |
|
"acc_stderr": 0.04793724854411021, |
|
"acc_norm": 0.65, |
|
"acc_norm_stderr": 0.04793724854411021 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5462962962962963, |
|
"acc_stderr": 0.048129173245368216, |
|
"acc_norm": 0.5462962962962963, |
|
"acc_norm_stderr": 0.048129173245368216 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.3891625615763547, |
|
"acc_stderr": 0.03430462416103872, |
|
"acc_norm": 0.3891625615763547, |
|
"acc_norm_stderr": 0.03430462416103872 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.43548387096774194, |
|
"acc_stderr": 0.028206225591502737, |
|
"acc_norm": 0.43548387096774194, |
|
"acc_norm_stderr": 0.028206225591502737 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.717948717948718, |
|
"acc_stderr": 0.029480360549541194, |
|
"acc_norm": 0.717948717948718, |
|
"acc_norm_stderr": 0.029480360549541194 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.43018867924528303, |
|
"acc_stderr": 0.03047144586718323, |
|
"acc_norm": 0.43018867924528303, |
|
"acc_norm_stderr": 0.03047144586718323 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4909090909090909, |
|
"acc_stderr": 0.04788339768702861, |
|
"acc_norm": 0.4909090909090909, |
|
"acc_norm_stderr": 0.04788339768702861 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.2814814814814815, |
|
"acc_stderr": 0.027420019350945277, |
|
"acc_norm": 0.2814814814814815, |
|
"acc_norm_stderr": 0.027420019350945277 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.304635761589404, |
|
"acc_stderr": 0.03757949922943343, |
|
"acc_norm": 0.304635761589404, |
|
"acc_norm_stderr": 0.03757949922943343 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5671641791044776, |
|
"acc_stderr": 0.03503490923673281, |
|
"acc_norm": 0.5671641791044776, |
|
"acc_norm_stderr": 0.03503490923673281 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.35260115606936415, |
|
"acc_stderr": 0.03643037168958548, |
|
"acc_norm": 0.35260115606936415, |
|
"acc_norm_stderr": 0.03643037168958548 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.36772486772486773, |
|
"acc_stderr": 0.024833839825562413, |
|
"acc_norm": 0.36772486772486773, |
|
"acc_norm_stderr": 0.024833839825562413 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3680555555555556, |
|
"acc_stderr": 0.04032999053960718, |
|
"acc_norm": 0.3680555555555556, |
|
"acc_norm_stderr": 0.04032999053960718 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.55, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.55, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.49710982658959535, |
|
"acc_stderr": 0.02691864538323901, |
|
"acc_norm": 0.49710982658959535, |
|
"acc_norm_stderr": 0.02691864538323901 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4785276073619632, |
|
"acc_stderr": 0.03924746876751129, |
|
"acc_norm": 0.4785276073619632, |
|
"acc_norm_stderr": 0.03924746876751129 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.44135802469135804, |
|
"acc_stderr": 0.027628737155668773, |
|
"acc_norm": 0.44135802469135804, |
|
"acc_norm_stderr": 0.027628737155668773 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001974, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001974 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5025906735751295, |
|
"acc_stderr": 0.03608390745384487, |
|
"acc_norm": 0.5025906735751295, |
|
"acc_norm_stderr": 0.03608390745384487 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2719298245614035, |
|
"acc_stderr": 0.04185774424022056, |
|
"acc_norm": 0.2719298245614035, |
|
"acc_norm_stderr": 0.04185774424022056 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.4935779816513762, |
|
"acc_stderr": 0.02143555482001308, |
|
"acc_norm": 0.4935779816513762, |
|
"acc_norm_stderr": 0.02143555482001308 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.373015873015873, |
|
"acc_stderr": 0.04325506042017086, |
|
"acc_norm": 0.373015873015873, |
|
"acc_norm_stderr": 0.04325506042017086 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.4411764705882353, |
|
"acc_stderr": 0.028431095444176643, |
|
"acc_norm": 0.4411764705882353, |
|
"acc_norm_stderr": 0.028431095444176643 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5619834710743802, |
|
"acc_stderr": 0.045291468044357915, |
|
"acc_norm": 0.5619834710743802, |
|
"acc_norm_stderr": 0.045291468044357915 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.4144736842105263, |
|
"acc_stderr": 0.04008973785779206, |
|
"acc_norm": 0.4144736842105263, |
|
"acc_norm_stderr": 0.04008973785779206 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.38235294117647056, |
|
"acc_stderr": 0.01965992249362334, |
|
"acc_norm": 0.38235294117647056, |
|
"acc_norm_stderr": 0.01965992249362334 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.028121636040639893, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.028121636040639893 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.4017857142857143, |
|
"acc_stderr": 0.04653333146973646, |
|
"acc_norm": 0.4017857142857143, |
|
"acc_norm_stderr": 0.04653333146973646 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.033016908987210894, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.033016908987210894 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.23016759776536314, |
|
"acc_stderr": 0.01407833925342581, |
|
"acc_norm": 0.23016759776536314, |
|
"acc_norm_stderr": 0.01407833925342581 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.63, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.63, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.33088235294117646, |
|
"acc_stderr": 0.028582709753898445, |
|
"acc_norm": 0.33088235294117646, |
|
"acc_norm_stderr": 0.028582709753898445 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.4326530612244898, |
|
"acc_stderr": 0.03171752824062664, |
|
"acc_norm": 0.4326530612244898, |
|
"acc_norm_stderr": 0.03171752824062664 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5991561181434599, |
|
"acc_stderr": 0.03190080389473236, |
|
"acc_norm": 0.5991561181434599, |
|
"acc_norm_stderr": 0.03190080389473236 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.31681877444589307, |
|
"acc_stderr": 0.011882349954723016, |
|
"acc_norm": 0.31681877444589307, |
|
"acc_norm_stderr": 0.011882349954723016 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.46078431372549017, |
|
"acc_stderr": 0.03498501649369527, |
|
"acc_norm": 0.46078431372549017, |
|
"acc_norm_stderr": 0.03498501649369527 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.46060606060606063, |
|
"acc_stderr": 0.03892207016552013, |
|
"acc_norm": 0.46060606060606063, |
|
"acc_norm_stderr": 0.03892207016552013 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.29865361077111385, |
|
"mc1_stderr": 0.016021570613768542, |
|
"mc2": 0.4796330162483247, |
|
"mc2_stderr": 0.015594823470032292 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.48406139315230223, |
|
"acc_stderr": 0.017181617837190195, |
|
"acc_norm": 0.5230224321133412, |
|
"acc_norm_stderr": 0.017172121546727627 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "dltjdgh0928/test_instruction", |
|
"model_sha": "7850d81409e5abbe9170009f0b463eb25042313b", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |