|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.318259385665529, |
|
"acc_stderr": 0.013611993916971451, |
|
"acc_norm": 0.3583617747440273, |
|
"acc_norm_stderr": 0.01401288333485986 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.368352917745469, |
|
"acc_stderr": 0.00481371995282996, |
|
"acc_norm": 0.46265684126667994, |
|
"acc_norm_stderr": 0.0049758453350866195 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.40350877192982454, |
|
"acc_stderr": 0.03762738699917055, |
|
"acc_norm": 0.40350877192982454, |
|
"acc_norm_stderr": 0.03762738699917055 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.4368932038834951, |
|
"acc_stderr": 0.04911147107365777, |
|
"acc_norm": 0.4368932038834951, |
|
"acc_norm_stderr": 0.04911147107365777 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.44061302681992337, |
|
"acc_stderr": 0.017753396973908486, |
|
"acc_norm": 0.44061302681992337, |
|
"acc_norm_stderr": 0.017753396973908486 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.04232073695151589, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.04232073695151589 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.32340425531914896, |
|
"acc_stderr": 0.030579442773610334, |
|
"acc_norm": 0.32340425531914896, |
|
"acc_norm_stderr": 0.030579442773610334 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3493975903614458, |
|
"acc_stderr": 0.0371172519074075, |
|
"acc_norm": 0.3493975903614458, |
|
"acc_norm_stderr": 0.0371172519074075 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.43086816720257237, |
|
"acc_stderr": 0.02812534098397271, |
|
"acc_norm": 0.43086816720257237, |
|
"acc_norm_stderr": 0.02812534098397271 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.31390134529147984, |
|
"acc_stderr": 0.03114679648297246, |
|
"acc_norm": 0.31390134529147984, |
|
"acc_norm_stderr": 0.03114679648297246 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4351145038167939, |
|
"acc_stderr": 0.04348208051644858, |
|
"acc_norm": 0.4351145038167939, |
|
"acc_norm_stderr": 0.04348208051644858 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.4898989898989899, |
|
"acc_stderr": 0.035616254886737454, |
|
"acc_norm": 0.4898989898989899, |
|
"acc_norm_stderr": 0.035616254886737454 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3793103448275862, |
|
"acc_stderr": 0.04043461861916747, |
|
"acc_norm": 0.3793103448275862, |
|
"acc_norm_stderr": 0.04043461861916747 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.27450980392156865, |
|
"acc_stderr": 0.04440521906179327, |
|
"acc_norm": 0.27450980392156865, |
|
"acc_norm_stderr": 0.04440521906179327 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.35714285714285715, |
|
"acc_stderr": 0.031124619309328177, |
|
"acc_norm": 0.35714285714285715, |
|
"acc_norm_stderr": 0.031124619309328177 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.38974358974358975, |
|
"acc_stderr": 0.024726967886647078, |
|
"acc_norm": 0.38974358974358975, |
|
"acc_norm_stderr": 0.024726967886647078 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.05016135580465919, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.05016135580465919 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.19, |
|
"acc_stderr": 0.039427724440366234, |
|
"acc_norm": 0.19, |
|
"acc_norm_stderr": 0.039427724440366234 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.4074074074074074, |
|
"acc_stderr": 0.04750077341199985, |
|
"acc_norm": 0.4074074074074074, |
|
"acc_norm_stderr": 0.04750077341199985 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.33497536945812806, |
|
"acc_stderr": 0.033208527423483104, |
|
"acc_norm": 0.33497536945812806, |
|
"acc_norm_stderr": 0.033208527423483104 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.35161290322580646, |
|
"acc_stderr": 0.027162537826948458, |
|
"acc_norm": 0.35161290322580646, |
|
"acc_norm_stderr": 0.027162537826948458 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6666666666666666, |
|
"acc_stderr": 0.03088273697413866, |
|
"acc_norm": 0.6666666666666666, |
|
"acc_norm_stderr": 0.03088273697413866 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.35471698113207545, |
|
"acc_stderr": 0.029445175328199596, |
|
"acc_norm": 0.35471698113207545, |
|
"acc_norm_stderr": 0.029445175328199596 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.0469237132203465, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.0469237132203465 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.29259259259259257, |
|
"acc_stderr": 0.027738969632176088, |
|
"acc_norm": 0.29259259259259257, |
|
"acc_norm_stderr": 0.027738969632176088 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.31125827814569534, |
|
"acc_stderr": 0.03780445850526732, |
|
"acc_norm": 0.31125827814569534, |
|
"acc_norm_stderr": 0.03780445850526732 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.4626865671641791, |
|
"acc_stderr": 0.03525675167467974, |
|
"acc_norm": 0.4626865671641791, |
|
"acc_norm_stderr": 0.03525675167467974 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.34104046242774566, |
|
"acc_stderr": 0.03614665424180826, |
|
"acc_norm": 0.34104046242774566, |
|
"acc_norm_stderr": 0.03614665424180826 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.3253968253968254, |
|
"acc_stderr": 0.02413015829976262, |
|
"acc_norm": 0.3253968253968254, |
|
"acc_norm_stderr": 0.02413015829976262 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3194444444444444, |
|
"acc_stderr": 0.03899073687357335, |
|
"acc_norm": 0.3194444444444444, |
|
"acc_norm_stderr": 0.03899073687357335 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.49, |
|
"acc_stderr": 0.05024183937956912, |
|
"acc_norm": 0.49, |
|
"acc_norm_stderr": 0.05024183937956912 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.36416184971098264, |
|
"acc_stderr": 0.025906632631016127, |
|
"acc_norm": 0.36416184971098264, |
|
"acc_norm_stderr": 0.025906632631016127 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.44171779141104295, |
|
"acc_stderr": 0.03901591825836184, |
|
"acc_norm": 0.44171779141104295, |
|
"acc_norm_stderr": 0.03901591825836184 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.3734567901234568, |
|
"acc_stderr": 0.02691500301138015, |
|
"acc_norm": 0.3734567901234568, |
|
"acc_norm_stderr": 0.02691500301138015 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939098, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939098 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.37305699481865284, |
|
"acc_stderr": 0.03490205592048574, |
|
"acc_norm": 0.37305699481865284, |
|
"acc_norm_stderr": 0.03490205592048574 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.044346007015849245, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.044346007015849245 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.45688073394495415, |
|
"acc_stderr": 0.021357458785226206, |
|
"acc_norm": 0.45688073394495415, |
|
"acc_norm_stderr": 0.021357458785226206 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.36507936507936506, |
|
"acc_stderr": 0.04306241259127153, |
|
"acc_norm": 0.36507936507936506, |
|
"acc_norm_stderr": 0.04306241259127153 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.3464052287581699, |
|
"acc_stderr": 0.027245613047215362, |
|
"acc_norm": 0.3464052287581699, |
|
"acc_norm_stderr": 0.027245613047215362 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5206611570247934, |
|
"acc_stderr": 0.04560456086387235, |
|
"acc_norm": 0.5206611570247934, |
|
"acc_norm_stderr": 0.04560456086387235 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.34868421052631576, |
|
"acc_stderr": 0.03878139888797611, |
|
"acc_norm": 0.34868421052631576, |
|
"acc_norm_stderr": 0.03878139888797611 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3366013071895425, |
|
"acc_stderr": 0.019117213911495175, |
|
"acc_norm": 0.3366013071895425, |
|
"acc_norm_stderr": 0.019117213911495175 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.29432624113475175, |
|
"acc_stderr": 0.027187127011503803, |
|
"acc_norm": 0.29432624113475175, |
|
"acc_norm_stderr": 0.027187127011503803 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.25892857142857145, |
|
"acc_stderr": 0.04157751539865629, |
|
"acc_norm": 0.25892857142857145, |
|
"acc_norm_stderr": 0.04157751539865629 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.37962962962962965, |
|
"acc_stderr": 0.03309682581119035, |
|
"acc_norm": 0.37962962962962965, |
|
"acc_norm_stderr": 0.03309682581119035 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2435754189944134, |
|
"acc_stderr": 0.014355911964767867, |
|
"acc_norm": 0.2435754189944134, |
|
"acc_norm_stderr": 0.014355911964767867 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.26, |
|
"acc_stderr": 0.044084400227680794, |
|
"acc_norm": 0.26, |
|
"acc_norm_stderr": 0.044084400227680794 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.45, |
|
"acc_stderr": 0.05, |
|
"acc_norm": 0.45, |
|
"acc_norm_stderr": 0.05 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4227941176470588, |
|
"acc_stderr": 0.03000856284500348, |
|
"acc_norm": 0.4227941176470588, |
|
"acc_norm_stderr": 0.03000856284500348 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.3510204081632653, |
|
"acc_stderr": 0.030555316755573637, |
|
"acc_norm": 0.3510204081632653, |
|
"acc_norm_stderr": 0.030555316755573637 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.45569620253164556, |
|
"acc_stderr": 0.03241920684693334, |
|
"acc_norm": 0.45569620253164556, |
|
"acc_norm_stderr": 0.03241920684693334 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.2835723598435463, |
|
"acc_stderr": 0.011511900775968302, |
|
"acc_norm": 0.2835723598435463, |
|
"acc_norm_stderr": 0.011511900775968302 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.38235294117647056, |
|
"acc_stderr": 0.03410785338904719, |
|
"acc_norm": 0.38235294117647056, |
|
"acc_norm_stderr": 0.03410785338904719 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.4121212121212121, |
|
"acc_stderr": 0.03843566993588717, |
|
"acc_norm": 0.4121212121212121, |
|
"acc_norm_stderr": 0.03843566993588717 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.29008567931456547, |
|
"mc1_stderr": 0.01588623687420952, |
|
"mc2": 0.459471439183592, |
|
"mc2_stderr": 0.016149154578981872 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.345926800472255, |
|
"acc_stderr": 0.016353853414347568, |
|
"acc_norm": 0.3624557260920897, |
|
"acc_norm_stderr": 0.016527131240453713 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "caisarl76/Mistral-7B-v0.1-orca_platy-1k-ep4", |
|
"model_sha": "e3e91aad9d307bf43b516f95440a35a1db3e1c68", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |