|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.28668941979522183, |
|
"acc_stderr": 0.01321498632927477, |
|
"acc_norm": 0.3387372013651877, |
|
"acc_norm_stderr": 0.01383056892797433 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.344353714399522, |
|
"acc_stderr": 0.004741859753178415, |
|
"acc_norm": 0.4213304122684724, |
|
"acc_norm_stderr": 0.004927631806477553 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.3157894736842105, |
|
"acc_stderr": 0.03565079670708311, |
|
"acc_norm": 0.3157894736842105, |
|
"acc_norm_stderr": 0.03565079670708311 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.4174757281553398, |
|
"acc_stderr": 0.048828405482122375, |
|
"acc_norm": 0.4174757281553398, |
|
"acc_norm_stderr": 0.048828405482122375 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.3946360153256705, |
|
"acc_stderr": 0.017478464305911542, |
|
"acc_norm": 0.3946360153256705, |
|
"acc_norm_stderr": 0.017478464305911542 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.3037037037037037, |
|
"acc_stderr": 0.039725528847851375, |
|
"acc_norm": 0.3037037037037037, |
|
"acc_norm_stderr": 0.039725528847851375 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206824, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206824 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.3872340425531915, |
|
"acc_stderr": 0.03184389265339526, |
|
"acc_norm": 0.3872340425531915, |
|
"acc_norm_stderr": 0.03184389265339526 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3373493975903614, |
|
"acc_stderr": 0.03680783690727581, |
|
"acc_norm": 0.3373493975903614, |
|
"acc_norm_stderr": 0.03680783690727581 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.40514469453376206, |
|
"acc_stderr": 0.027882383791325946, |
|
"acc_norm": 0.40514469453376206, |
|
"acc_norm_stderr": 0.027882383791325946 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.38565022421524664, |
|
"acc_stderr": 0.03266842214289202, |
|
"acc_norm": 0.38565022421524664, |
|
"acc_norm_stderr": 0.03266842214289202 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.2824427480916031, |
|
"acc_stderr": 0.03948406125768362, |
|
"acc_norm": 0.2824427480916031, |
|
"acc_norm_stderr": 0.03948406125768362 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.39, |
|
"acc_stderr": 0.04902071300001975, |
|
"acc_norm": 0.39, |
|
"acc_norm_stderr": 0.04902071300001975 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.35353535353535354, |
|
"acc_stderr": 0.03406086723547155, |
|
"acc_norm": 0.35353535353535354, |
|
"acc_norm_stderr": 0.03406086723547155 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.32413793103448274, |
|
"acc_stderr": 0.03900432069185554, |
|
"acc_norm": 0.32413793103448274, |
|
"acc_norm_stderr": 0.03900432069185554 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.23529411764705882, |
|
"acc_stderr": 0.042207736591714534, |
|
"acc_norm": 0.23529411764705882, |
|
"acc_norm_stderr": 0.042207736591714534 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.3403361344537815, |
|
"acc_stderr": 0.030778057422931673, |
|
"acc_norm": 0.3403361344537815, |
|
"acc_norm_stderr": 0.030778057422931673 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.34615384615384615, |
|
"acc_stderr": 0.024121125416941183, |
|
"acc_norm": 0.34615384615384615, |
|
"acc_norm_stderr": 0.024121125416941183 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.51, |
|
"acc_stderr": 0.05024183937956912, |
|
"acc_norm": 0.51, |
|
"acc_norm_stderr": 0.05024183937956912 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.0471282125742677, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.0471282125742677 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.2561576354679803, |
|
"acc_stderr": 0.0307127300709826, |
|
"acc_norm": 0.2561576354679803, |
|
"acc_norm_stderr": 0.0307127300709826 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.36774193548387096, |
|
"acc_stderr": 0.027430866579973474, |
|
"acc_norm": 0.36774193548387096, |
|
"acc_norm_stderr": 0.027430866579973474 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.5683760683760684, |
|
"acc_stderr": 0.0324483553531149, |
|
"acc_norm": 0.5683760683760684, |
|
"acc_norm_stderr": 0.0324483553531149 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.2981132075471698, |
|
"acc_stderr": 0.028152837942493854, |
|
"acc_norm": 0.2981132075471698, |
|
"acc_norm_stderr": 0.028152837942493854 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.4636363636363636, |
|
"acc_stderr": 0.047764491623961985, |
|
"acc_norm": 0.4636363636363636, |
|
"acc_norm_stderr": 0.047764491623961985 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.027940457136228416, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.027940457136228416 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2251655629139073, |
|
"acc_stderr": 0.03410435282008937, |
|
"acc_norm": 0.2251655629139073, |
|
"acc_norm_stderr": 0.03410435282008937 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.43283582089552236, |
|
"acc_stderr": 0.0350349092367328, |
|
"acc_norm": 0.43283582089552236, |
|
"acc_norm_stderr": 0.0350349092367328 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2543352601156069, |
|
"acc_stderr": 0.0332055644308557, |
|
"acc_norm": 0.2543352601156069, |
|
"acc_norm_stderr": 0.0332055644308557 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.30158730158730157, |
|
"acc_stderr": 0.0236369759961018, |
|
"acc_norm": 0.30158730158730157, |
|
"acc_norm_stderr": 0.0236369759961018 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2847222222222222, |
|
"acc_stderr": 0.037738099906869334, |
|
"acc_norm": 0.2847222222222222, |
|
"acc_norm_stderr": 0.037738099906869334 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.044619604333847394, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.044619604333847394 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.050251890762960605, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.050251890762960605 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.3786127167630058, |
|
"acc_stderr": 0.026113749361310338, |
|
"acc_norm": 0.3786127167630058, |
|
"acc_norm_stderr": 0.026113749361310338 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.294478527607362, |
|
"acc_stderr": 0.03581165790474082, |
|
"acc_norm": 0.294478527607362, |
|
"acc_norm_stderr": 0.03581165790474082 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.33641975308641975, |
|
"acc_stderr": 0.026289734945952926, |
|
"acc_norm": 0.33641975308641975, |
|
"acc_norm_stderr": 0.026289734945952926 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.34196891191709844, |
|
"acc_stderr": 0.034234651001042816, |
|
"acc_norm": 0.34196891191709844, |
|
"acc_norm_stderr": 0.034234651001042816 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.039994238792813365, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.039994238792813365 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3871559633027523, |
|
"acc_stderr": 0.020884231992643453, |
|
"acc_norm": 0.3871559633027523, |
|
"acc_norm_stderr": 0.020884231992643453 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.21428571428571427, |
|
"acc_stderr": 0.03670066451047182, |
|
"acc_norm": 0.21428571428571427, |
|
"acc_norm_stderr": 0.03670066451047182 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.35947712418300654, |
|
"acc_stderr": 0.027475969910660952, |
|
"acc_norm": 0.35947712418300654, |
|
"acc_norm_stderr": 0.027475969910660952 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.04878317312145632, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.04878317312145632 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5041322314049587, |
|
"acc_stderr": 0.04564198767432754, |
|
"acc_norm": 0.5041322314049587, |
|
"acc_norm_stderr": 0.04564198767432754 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.03523807393012047, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.03523807393012047 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.32516339869281047, |
|
"acc_stderr": 0.018950886770806308, |
|
"acc_norm": 0.32516339869281047, |
|
"acc_norm_stderr": 0.018950886770806308 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2553191489361702, |
|
"acc_stderr": 0.02601199293090201, |
|
"acc_norm": 0.2553191489361702, |
|
"acc_norm_stderr": 0.02601199293090201 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04109974682633932, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04109974682633932 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.029531221160930918, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.029531221160930918 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2659217877094972, |
|
"acc_stderr": 0.014776765066438888, |
|
"acc_norm": 0.2659217877094972, |
|
"acc_norm_stderr": 0.014776765066438888 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.2977941176470588, |
|
"acc_stderr": 0.027778298701545436, |
|
"acc_norm": 0.2977941176470588, |
|
"acc_norm_stderr": 0.027778298701545436 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.3510204081632653, |
|
"acc_stderr": 0.03055531675557364, |
|
"acc_norm": 0.3510204081632653, |
|
"acc_norm_stderr": 0.03055531675557364 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.4008438818565401, |
|
"acc_stderr": 0.031900803894732356, |
|
"acc_norm": 0.4008438818565401, |
|
"acc_norm_stderr": 0.031900803894732356 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.32529335071707954, |
|
"acc_stderr": 0.01196531153657153, |
|
"acc_norm": 0.32529335071707954, |
|
"acc_norm_stderr": 0.01196531153657153 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.35784313725490197, |
|
"acc_stderr": 0.033644872860882996, |
|
"acc_norm": 0.35784313725490197, |
|
"acc_norm_stderr": 0.033644872860882996 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.3393939393939394, |
|
"acc_stderr": 0.03697442205031596, |
|
"acc_norm": 0.3393939393939394, |
|
"acc_norm_stderr": 0.03697442205031596 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2802937576499388, |
|
"mc1_stderr": 0.01572313952460875, |
|
"mc2": 0.44624551916312966, |
|
"mc2_stderr": 0.015796983100879885 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.15820543093270367, |
|
"acc_stderr": 0.012546672797728753, |
|
"acc_norm": 0.179456906729634, |
|
"acc_norm_stderr": 0.013193062031400433 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "MNCLLM/Mistral-7B-KoCot-Platypus-4096", |
|
"model_sha": "bbb51b457200947001a0dc6e318a7d2d7e717197", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |