|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3993174061433447, |
|
"acc_stderr": 0.014312094557946704, |
|
"acc_norm": 0.4658703071672355, |
|
"acc_norm_stderr": 0.014577311315231099 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.42561242780322645, |
|
"acc_stderr": 0.00493425039087978, |
|
"acc_norm": 0.5683130850428202, |
|
"acc_norm_stderr": 0.004942990623131125 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.5087719298245614, |
|
"acc_stderr": 0.038342347441649924, |
|
"acc_norm": 0.5087719298245614, |
|
"acc_norm_stderr": 0.038342347441649924 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.5436893203883495, |
|
"acc_stderr": 0.049318019942204146, |
|
"acc_norm": 0.5436893203883495, |
|
"acc_norm_stderr": 0.049318019942204146 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.5568326947637292, |
|
"acc_stderr": 0.017764085035348418, |
|
"acc_norm": 0.5568326947637292, |
|
"acc_norm_stderr": 0.017764085035348418 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4666666666666667, |
|
"acc_stderr": 0.04309732901036354, |
|
"acc_norm": 0.4666666666666667, |
|
"acc_norm_stderr": 0.04309732901036354 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.4127659574468085, |
|
"acc_stderr": 0.03218471141400351, |
|
"acc_norm": 0.4127659574468085, |
|
"acc_norm_stderr": 0.03218471141400351 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.42771084337349397, |
|
"acc_stderr": 0.038515976837185335, |
|
"acc_norm": 0.42771084337349397, |
|
"acc_norm_stderr": 0.038515976837185335 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.5080385852090032, |
|
"acc_stderr": 0.028394421370984545, |
|
"acc_norm": 0.5080385852090032, |
|
"acc_norm_stderr": 0.028394421370984545 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.515695067264574, |
|
"acc_stderr": 0.0335412657542081, |
|
"acc_norm": 0.515695067264574, |
|
"acc_norm_stderr": 0.0335412657542081 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.48854961832061067, |
|
"acc_stderr": 0.04384140024078016, |
|
"acc_norm": 0.48854961832061067, |
|
"acc_norm_stderr": 0.04384140024078016 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.4, |
|
"acc_stderr": 0.049236596391733084, |
|
"acc_norm": 0.4, |
|
"acc_norm_stderr": 0.049236596391733084 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5606060606060606, |
|
"acc_stderr": 0.03536085947529482, |
|
"acc_norm": 0.5606060606060606, |
|
"acc_norm_stderr": 0.03536085947529482 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.3793103448275862, |
|
"acc_stderr": 0.04043461861916747, |
|
"acc_norm": 0.3793103448275862, |
|
"acc_norm_stderr": 0.04043461861916747 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.22549019607843138, |
|
"acc_stderr": 0.041583075330832865, |
|
"acc_norm": 0.22549019607843138, |
|
"acc_norm_stderr": 0.041583075330832865 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.4831932773109244, |
|
"acc_stderr": 0.03246013680375308, |
|
"acc_norm": 0.4831932773109244, |
|
"acc_norm_stderr": 0.03246013680375308 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.4358974358974359, |
|
"acc_stderr": 0.025141801511177498, |
|
"acc_norm": 0.4358974358974359, |
|
"acc_norm_stderr": 0.025141801511177498 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.050251890762960605, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.050251890762960605 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.04852365870939099, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.04852365870939099 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.49074074074074076, |
|
"acc_stderr": 0.04832853553437055, |
|
"acc_norm": 0.49074074074074076, |
|
"acc_norm_stderr": 0.04832853553437055 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.43349753694581283, |
|
"acc_stderr": 0.034867317274198714, |
|
"acc_norm": 0.43349753694581283, |
|
"acc_norm_stderr": 0.034867317274198714 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.4645161290322581, |
|
"acc_stderr": 0.028372287797962952, |
|
"acc_norm": 0.4645161290322581, |
|
"acc_norm_stderr": 0.028372287797962952 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6025641025641025, |
|
"acc_stderr": 0.03205953453789293, |
|
"acc_norm": 0.6025641025641025, |
|
"acc_norm_stderr": 0.03205953453789293 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.4490566037735849, |
|
"acc_stderr": 0.030612730713641092, |
|
"acc_norm": 0.4490566037735849, |
|
"acc_norm_stderr": 0.030612730713641092 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5454545454545454, |
|
"acc_stderr": 0.04769300568972744, |
|
"acc_norm": 0.5454545454545454, |
|
"acc_norm_stderr": 0.04769300568972744 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.26666666666666666, |
|
"acc_stderr": 0.026962424325073835, |
|
"acc_norm": 0.26666666666666666, |
|
"acc_norm_stderr": 0.026962424325073835 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.31125827814569534, |
|
"acc_stderr": 0.03780445850526733, |
|
"acc_norm": 0.31125827814569534, |
|
"acc_norm_stderr": 0.03780445850526733 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.5671641791044776, |
|
"acc_stderr": 0.03503490923673282, |
|
"acc_norm": 0.5671641791044776, |
|
"acc_norm_stderr": 0.03503490923673282 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4277456647398844, |
|
"acc_stderr": 0.03772446857518027, |
|
"acc_norm": 0.4277456647398844, |
|
"acc_norm_stderr": 0.03772446857518027 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.2804232804232804, |
|
"acc_stderr": 0.02313528797432563, |
|
"acc_norm": 0.2804232804232804, |
|
"acc_norm_stderr": 0.02313528797432563 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.3611111111111111, |
|
"acc_stderr": 0.040166600304512336, |
|
"acc_norm": 0.3611111111111111, |
|
"acc_norm_stderr": 0.040166600304512336 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.32, |
|
"acc_stderr": 0.046882617226215034, |
|
"acc_norm": 0.32, |
|
"acc_norm_stderr": 0.046882617226215034 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.62, |
|
"acc_stderr": 0.04878317312145634, |
|
"acc_norm": 0.62, |
|
"acc_norm_stderr": 0.04878317312145634 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.4884393063583815, |
|
"acc_stderr": 0.02691189868637792, |
|
"acc_norm": 0.4884393063583815, |
|
"acc_norm_stderr": 0.02691189868637792 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.49693251533742333, |
|
"acc_stderr": 0.03928297078179662, |
|
"acc_norm": 0.49693251533742333, |
|
"acc_norm_stderr": 0.03928297078179662 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.5030864197530864, |
|
"acc_stderr": 0.02782021415859437, |
|
"acc_norm": 0.5030864197530864, |
|
"acc_norm_stderr": 0.02782021415859437 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.5595854922279793, |
|
"acc_stderr": 0.03582724530036094, |
|
"acc_norm": 0.5595854922279793, |
|
"acc_norm_stderr": 0.03582724530036094 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.2543859649122807, |
|
"acc_stderr": 0.040969851398436695, |
|
"acc_norm": 0.2543859649122807, |
|
"acc_norm_stderr": 0.040969851398436695 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.5963302752293578, |
|
"acc_stderr": 0.02103570485657497, |
|
"acc_norm": 0.5963302752293578, |
|
"acc_norm_stderr": 0.02103570485657497 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3412698412698413, |
|
"acc_stderr": 0.04240799327574924, |
|
"acc_norm": 0.3412698412698413, |
|
"acc_norm_stderr": 0.04240799327574924 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.42483660130718953, |
|
"acc_stderr": 0.028304576673141107, |
|
"acc_norm": 0.42483660130718953, |
|
"acc_norm_stderr": 0.028304576673141107 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.04878317312145633, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.04878317312145633 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6198347107438017, |
|
"acc_stderr": 0.04431324501968431, |
|
"acc_norm": 0.6198347107438017, |
|
"acc_norm_stderr": 0.04431324501968431 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.375, |
|
"acc_stderr": 0.039397364351956274, |
|
"acc_norm": 0.375, |
|
"acc_norm_stderr": 0.039397364351956274 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3888888888888889, |
|
"acc_stderr": 0.019722058939618068, |
|
"acc_norm": 0.3888888888888889, |
|
"acc_norm_stderr": 0.019722058939618068 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.33687943262411346, |
|
"acc_stderr": 0.02819553487396673, |
|
"acc_norm": 0.33687943262411346, |
|
"acc_norm_stderr": 0.02819553487396673 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.22321428571428573, |
|
"acc_stderr": 0.039523019677025116, |
|
"acc_norm": 0.22321428571428573, |
|
"acc_norm_stderr": 0.039523019677025116 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.36574074074074076, |
|
"acc_stderr": 0.03284738857647206, |
|
"acc_norm": 0.36574074074074076, |
|
"acc_norm_stderr": 0.03284738857647206 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2424581005586592, |
|
"acc_stderr": 0.01433352205921789, |
|
"acc_norm": 0.2424581005586592, |
|
"acc_norm_stderr": 0.01433352205921789 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.33, |
|
"acc_stderr": 0.047258156262526045, |
|
"acc_norm": 0.33, |
|
"acc_norm_stderr": 0.047258156262526045 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.4227941176470588, |
|
"acc_stderr": 0.030008562845003472, |
|
"acc_norm": 0.4227941176470588, |
|
"acc_norm_stderr": 0.030008562845003472 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5102040816326531, |
|
"acc_stderr": 0.03200255347893783, |
|
"acc_norm": 0.5102040816326531, |
|
"acc_norm_stderr": 0.03200255347893783 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.6329113924050633, |
|
"acc_stderr": 0.03137624072561619, |
|
"acc_norm": 0.6329113924050633, |
|
"acc_norm_stderr": 0.03137624072561619 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.35919165580182527, |
|
"acc_stderr": 0.01225338618758425, |
|
"acc_norm": 0.35919165580182527, |
|
"acc_norm_stderr": 0.01225338618758425 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5, |
|
"acc_stderr": 0.03509312031717982, |
|
"acc_norm": 0.5, |
|
"acc_norm_stderr": 0.03509312031717982 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.5393939393939394, |
|
"acc_stderr": 0.03892207016552012, |
|
"acc_norm": 0.5393939393939394, |
|
"acc_norm_stderr": 0.03892207016552012 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.2631578947368421, |
|
"mc1_stderr": 0.015415241740237033, |
|
"mc2": 0.42117238466385504, |
|
"mc2_stderr": 0.01460128908268072 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4769775678866588, |
|
"acc_stderr": 0.017172121546727637, |
|
"acc_norm": 0.5489964580873672, |
|
"acc_norm_stderr": 0.017107618859549346 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.1", |
|
"model_sha": "48bfd4b2fa3fbb12ba5cf4a7b07195f65c998aa7", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |