|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3225255972696246, |
|
"acc_stderr": 0.013659980894277373, |
|
"acc_norm": 0.38054607508532423, |
|
"acc_norm_stderr": 0.014188277712349814 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4010157339175463, |
|
"acc_stderr": 0.004891025533633028, |
|
"acc_norm": 0.5273849830711014, |
|
"acc_norm_stderr": 0.004982291744069926 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.36257309941520466, |
|
"acc_stderr": 0.0368713061556206, |
|
"acc_norm": 0.36257309941520466, |
|
"acc_norm_stderr": 0.0368713061556206 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.21359223300970873, |
|
"acc_stderr": 0.040580420156460344, |
|
"acc_norm": 0.21359223300970873, |
|
"acc_norm_stderr": 0.040580420156460344 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.34099616858237547, |
|
"acc_stderr": 0.016951781383223313, |
|
"acc_norm": 0.34099616858237547, |
|
"acc_norm_stderr": 0.016951781383223313 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4074074074074074, |
|
"acc_stderr": 0.04244633238353228, |
|
"acc_norm": 0.4074074074074074, |
|
"acc_norm_stderr": 0.04244633238353228 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04351941398892446, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04351941398892446 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.2170212765957447, |
|
"acc_stderr": 0.02694748312149625, |
|
"acc_norm": 0.2170212765957447, |
|
"acc_norm_stderr": 0.02694748312149625 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.22289156626506024, |
|
"acc_stderr": 0.03240004825594688, |
|
"acc_norm": 0.22289156626506024, |
|
"acc_norm_stderr": 0.03240004825594688 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.31511254019292606, |
|
"acc_stderr": 0.026385273703464496, |
|
"acc_norm": 0.31511254019292606, |
|
"acc_norm_stderr": 0.026385273703464496 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.2914798206278027, |
|
"acc_stderr": 0.030500283176545906, |
|
"acc_norm": 0.2914798206278027, |
|
"acc_norm_stderr": 0.030500283176545906 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.3435114503816794, |
|
"acc_stderr": 0.041649760719448786, |
|
"acc_norm": 0.3435114503816794, |
|
"acc_norm_stderr": 0.041649760719448786 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909283, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909283 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.2474747474747475, |
|
"acc_stderr": 0.03074630074212451, |
|
"acc_norm": 0.2474747474747475, |
|
"acc_norm_stderr": 0.03074630074212451 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3448275862068966, |
|
"acc_stderr": 0.03960933549451207, |
|
"acc_norm": 0.3448275862068966, |
|
"acc_norm_stderr": 0.03960933549451207 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.16666666666666666, |
|
"acc_stderr": 0.03708284662416544, |
|
"acc_norm": 0.16666666666666666, |
|
"acc_norm_stderr": 0.03708284662416544 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.24789915966386555, |
|
"acc_stderr": 0.028047967224176892, |
|
"acc_norm": 0.24789915966386555, |
|
"acc_norm_stderr": 0.028047967224176892 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.19743589743589743, |
|
"acc_stderr": 0.02018264696867484, |
|
"acc_norm": 0.19743589743589743, |
|
"acc_norm_stderr": 0.02018264696867484 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909282, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909282 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.26851851851851855, |
|
"acc_stderr": 0.04284467968052191, |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.04284467968052191 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.29064039408866993, |
|
"acc_stderr": 0.031947400722655395, |
|
"acc_norm": 0.29064039408866993, |
|
"acc_norm_stderr": 0.031947400722655395 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.26129032258064516, |
|
"acc_stderr": 0.024993053397764815, |
|
"acc_norm": 0.26129032258064516, |
|
"acc_norm_stderr": 0.024993053397764815 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.34615384615384615, |
|
"acc_stderr": 0.0311669573672359, |
|
"acc_norm": 0.34615384615384615, |
|
"acc_norm_stderr": 0.0311669573672359 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.2830188679245283, |
|
"acc_stderr": 0.027724236492700897, |
|
"acc_norm": 0.2830188679245283, |
|
"acc_norm_stderr": 0.027724236492700897 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.2636363636363636, |
|
"acc_stderr": 0.04220224692971987, |
|
"acc_norm": 0.2636363636363636, |
|
"acc_norm_stderr": 0.04220224692971987 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.25555555555555554, |
|
"acc_stderr": 0.026593939101844072, |
|
"acc_norm": 0.25555555555555554, |
|
"acc_norm_stderr": 0.026593939101844072 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.2781456953642384, |
|
"acc_stderr": 0.03658603262763743, |
|
"acc_norm": 0.2781456953642384, |
|
"acc_norm_stderr": 0.03658603262763743 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.2835820895522388, |
|
"acc_stderr": 0.03187187537919798, |
|
"acc_norm": 0.2835820895522388, |
|
"acc_norm_stderr": 0.03187187537919798 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.2543352601156069, |
|
"acc_stderr": 0.0332055644308557, |
|
"acc_norm": 0.2543352601156069, |
|
"acc_norm_stderr": 0.0332055644308557 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.25396825396825395, |
|
"acc_stderr": 0.022418042891113932, |
|
"acc_norm": 0.25396825396825395, |
|
"acc_norm_stderr": 0.022418042891113932 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2708333333333333, |
|
"acc_stderr": 0.037161774375660164, |
|
"acc_norm": 0.2708333333333333, |
|
"acc_norm_stderr": 0.037161774375660164 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.14, |
|
"acc_stderr": 0.03487350880197771, |
|
"acc_norm": 0.14, |
|
"acc_norm_stderr": 0.03487350880197771 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.24, |
|
"acc_stderr": 0.04292346959909284, |
|
"acc_norm": 0.24, |
|
"acc_norm_stderr": 0.04292346959909284 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.27167630057803466, |
|
"acc_stderr": 0.023948512905468376, |
|
"acc_norm": 0.27167630057803466, |
|
"acc_norm_stderr": 0.023948512905468376 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.2822085889570552, |
|
"acc_stderr": 0.03536117886664743, |
|
"acc_norm": 0.2822085889570552, |
|
"acc_norm_stderr": 0.03536117886664743 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.32098765432098764, |
|
"acc_stderr": 0.02597656601086274, |
|
"acc_norm": 0.32098765432098764, |
|
"acc_norm_stderr": 0.02597656601086274 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.04725815626252606, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.04725815626252606 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.2538860103626943, |
|
"acc_stderr": 0.03141024780565318, |
|
"acc_norm": 0.2538860103626943, |
|
"acc_norm_stderr": 0.03141024780565318 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2631578947368421, |
|
"acc_stderr": 0.041424397194893624, |
|
"acc_norm": 0.2631578947368421, |
|
"acc_norm_stderr": 0.041424397194893624 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.26422018348623855, |
|
"acc_stderr": 0.018904164171510196, |
|
"acc_norm": 0.26422018348623855, |
|
"acc_norm_stderr": 0.018904164171510196 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.19047619047619047, |
|
"acc_stderr": 0.03512207412302052, |
|
"acc_norm": 0.19047619047619047, |
|
"acc_norm_stderr": 0.03512207412302052 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.28104575163398693, |
|
"acc_stderr": 0.02573885479781873, |
|
"acc_norm": 0.28104575163398693, |
|
"acc_norm_stderr": 0.02573885479781873 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.27, |
|
"acc_stderr": 0.0446196043338474, |
|
"acc_norm": 0.27, |
|
"acc_norm_stderr": 0.0446196043338474 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.3884297520661157, |
|
"acc_stderr": 0.04449270350068382, |
|
"acc_norm": 0.3884297520661157, |
|
"acc_norm_stderr": 0.04449270350068382 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.23684210526315788, |
|
"acc_stderr": 0.03459777606810537, |
|
"acc_norm": 0.23684210526315788, |
|
"acc_norm_stderr": 0.03459777606810537 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.2875816993464052, |
|
"acc_stderr": 0.018311653053648222, |
|
"acc_norm": 0.2875816993464052, |
|
"acc_norm_stderr": 0.018311653053648222 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.2695035460992908, |
|
"acc_stderr": 0.026469036818590634, |
|
"acc_norm": 0.2695035460992908, |
|
"acc_norm_stderr": 0.026469036818590634 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.25, |
|
"acc_stderr": 0.04109974682633932, |
|
"acc_norm": 0.25, |
|
"acc_norm_stderr": 0.04109974682633932 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.26851851851851855, |
|
"acc_stderr": 0.030225226160012393, |
|
"acc_norm": 0.26851851851851855, |
|
"acc_norm_stderr": 0.030225226160012393 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2435754189944134, |
|
"acc_stderr": 0.01435591196476786, |
|
"acc_norm": 0.2435754189944134, |
|
"acc_norm_stderr": 0.01435591196476786 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.049431107042371025, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.049431107042371025 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.18382352941176472, |
|
"acc_stderr": 0.023529242185193106, |
|
"acc_norm": 0.18382352941176472, |
|
"acc_norm_stderr": 0.023529242185193106 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.27755102040816326, |
|
"acc_stderr": 0.02866685779027465, |
|
"acc_norm": 0.27755102040816326, |
|
"acc_norm_stderr": 0.02866685779027465 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.31223628691983124, |
|
"acc_stderr": 0.03016513786784701, |
|
"acc_norm": 0.31223628691983124, |
|
"acc_norm_stderr": 0.03016513786784701 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.27183833116036504, |
|
"acc_stderr": 0.011363135278651411, |
|
"acc_norm": 0.27183833116036504, |
|
"acc_norm_stderr": 0.011363135278651411 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.2696078431372549, |
|
"acc_stderr": 0.031145570659486782, |
|
"acc_norm": 0.2696078431372549, |
|
"acc_norm_stderr": 0.031145570659486782 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.2909090909090909, |
|
"acc_stderr": 0.03546563019624336, |
|
"acc_norm": 0.2909090909090909, |
|
"acc_norm_stderr": 0.03546563019624336 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.27050183598531213, |
|
"mc1_stderr": 0.015550778332842888, |
|
"mc2": 0.4179272559309221, |
|
"mc2_stderr": 0.01511034311354295 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.3227699530516432, |
|
"acc_stderr": 0.016026912972697233, |
|
"acc_norm": 0.3884976525821596, |
|
"acc_norm_stderr": 0.01670815454631332 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "lcw99/llama2-ko-chang-instruct-chat", |
|
"model_sha": "c4acb327349f29d8106c1d22f8d8feed643fe9b7", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |