|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.3225255972696246, |
|
"acc_stderr": 0.01365998089427737, |
|
"acc_norm": 0.3703071672354949, |
|
"acc_norm_stderr": 0.01411129875167495 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.36058554072893845, |
|
"acc_stderr": 0.004791890625834196, |
|
"acc_norm": 0.4471220872336188, |
|
"acc_norm_stderr": 0.004961799358836431 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.4093567251461988, |
|
"acc_stderr": 0.03771283107626545, |
|
"acc_norm": 0.4093567251461988, |
|
"acc_norm_stderr": 0.03771283107626545 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.4174757281553398, |
|
"acc_stderr": 0.048828405482122375, |
|
"acc_norm": 0.4174757281553398, |
|
"acc_norm_stderr": 0.048828405482122375 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.41379310344827586, |
|
"acc_stderr": 0.017612204084663775, |
|
"acc_norm": 0.41379310344827586, |
|
"acc_norm_stderr": 0.017612204084663775 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.35555555555555557, |
|
"acc_stderr": 0.04135176749720386, |
|
"acc_norm": 0.35555555555555557, |
|
"acc_norm_stderr": 0.04135176749720386 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.31, |
|
"acc_stderr": 0.04648231987117316, |
|
"acc_norm": 0.31, |
|
"acc_norm_stderr": 0.04648231987117316 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.34893617021276596, |
|
"acc_stderr": 0.031158522131357766, |
|
"acc_norm": 0.34893617021276596, |
|
"acc_norm_stderr": 0.031158522131357766 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.3373493975903614, |
|
"acc_stderr": 0.03680783690727581, |
|
"acc_norm": 0.3373493975903614, |
|
"acc_norm_stderr": 0.03680783690727581 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.39228295819935693, |
|
"acc_stderr": 0.027731258647011994, |
|
"acc_norm": 0.39228295819935693, |
|
"acc_norm_stderr": 0.027731258647011994 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.42152466367713004, |
|
"acc_stderr": 0.03314190222110656, |
|
"acc_norm": 0.42152466367713004, |
|
"acc_norm_stderr": 0.03314190222110656 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.4122137404580153, |
|
"acc_stderr": 0.04317171194870254, |
|
"acc_norm": 0.4122137404580153, |
|
"acc_norm_stderr": 0.04317171194870254 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.42, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.42, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.5050505050505051, |
|
"acc_stderr": 0.035621707606254015, |
|
"acc_norm": 0.5050505050505051, |
|
"acc_norm_stderr": 0.035621707606254015 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.4689655172413793, |
|
"acc_stderr": 0.04158632762097828, |
|
"acc_norm": 0.4689655172413793, |
|
"acc_norm_stderr": 0.04158632762097828 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.19607843137254902, |
|
"acc_stderr": 0.039505818611799616, |
|
"acc_norm": 0.19607843137254902, |
|
"acc_norm_stderr": 0.039505818611799616 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.41596638655462187, |
|
"acc_stderr": 0.03201650100739615, |
|
"acc_norm": 0.41596638655462187, |
|
"acc_norm_stderr": 0.03201650100739615 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.3769230769230769, |
|
"acc_stderr": 0.024570975364225995, |
|
"acc_norm": 0.3769230769230769, |
|
"acc_norm_stderr": 0.024570975364225995 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.55, |
|
"acc_stderr": 0.04999999999999999, |
|
"acc_norm": 0.55, |
|
"acc_norm_stderr": 0.04999999999999999 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.42592592592592593, |
|
"acc_stderr": 0.0478034362693679, |
|
"acc_norm": 0.42592592592592593, |
|
"acc_norm_stderr": 0.0478034362693679 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.37438423645320196, |
|
"acc_stderr": 0.03405155380561952, |
|
"acc_norm": 0.37438423645320196, |
|
"acc_norm_stderr": 0.03405155380561952 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.3870967741935484, |
|
"acc_stderr": 0.027709359675032495, |
|
"acc_norm": 0.3870967741935484, |
|
"acc_norm_stderr": 0.027709359675032495 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.6410256410256411, |
|
"acc_stderr": 0.03142616993791923, |
|
"acc_norm": 0.6410256410256411, |
|
"acc_norm_stderr": 0.03142616993791923 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.44150943396226416, |
|
"acc_stderr": 0.03056159042673183, |
|
"acc_norm": 0.44150943396226416, |
|
"acc_norm_stderr": 0.03056159042673183 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.39090909090909093, |
|
"acc_stderr": 0.04673752333670238, |
|
"acc_norm": 0.39090909090909093, |
|
"acc_norm_stderr": 0.04673752333670238 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.3111111111111111, |
|
"acc_stderr": 0.02822644674968352, |
|
"acc_norm": 0.3111111111111111, |
|
"acc_norm_stderr": 0.02822644674968352 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3509933774834437, |
|
"acc_stderr": 0.03896981964257375, |
|
"acc_norm": 0.3509933774834437, |
|
"acc_norm_stderr": 0.03896981964257375 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.4975124378109453, |
|
"acc_stderr": 0.03535490150137288, |
|
"acc_norm": 0.4975124378109453, |
|
"acc_norm_stderr": 0.03535490150137288 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.3468208092485549, |
|
"acc_stderr": 0.036291466701596636, |
|
"acc_norm": 0.3468208092485549, |
|
"acc_norm_stderr": 0.036291466701596636 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.30158730158730157, |
|
"acc_stderr": 0.023636975996101806, |
|
"acc_norm": 0.30158730158730157, |
|
"acc_norm_stderr": 0.023636975996101806 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.2708333333333333, |
|
"acc_stderr": 0.03716177437566017, |
|
"acc_norm": 0.2708333333333333, |
|
"acc_norm_stderr": 0.03716177437566017 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.29, |
|
"acc_stderr": 0.045604802157206845, |
|
"acc_norm": 0.29, |
|
"acc_norm_stderr": 0.045604802157206845 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.53, |
|
"acc_stderr": 0.050161355804659205, |
|
"acc_norm": 0.53, |
|
"acc_norm_stderr": 0.050161355804659205 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.4682080924855491, |
|
"acc_stderr": 0.02686462436675664, |
|
"acc_norm": 0.4682080924855491, |
|
"acc_norm_stderr": 0.02686462436675664 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.4110429447852761, |
|
"acc_stderr": 0.038656978537853624, |
|
"acc_norm": 0.4110429447852761, |
|
"acc_norm_stderr": 0.038656978537853624 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.39197530864197533, |
|
"acc_stderr": 0.027163686038271233, |
|
"acc_norm": 0.39197530864197533, |
|
"acc_norm_stderr": 0.027163686038271233 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.34, |
|
"acc_stderr": 0.04760952285695235, |
|
"acc_norm": 0.34, |
|
"acc_norm_stderr": 0.04760952285695235 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.41450777202072536, |
|
"acc_stderr": 0.03555300319557672, |
|
"acc_norm": 0.41450777202072536, |
|
"acc_norm_stderr": 0.03555300319557672 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.04434600701584925, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.04434600701584925 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.3963302752293578, |
|
"acc_stderr": 0.020971469947900525, |
|
"acc_norm": 0.3963302752293578, |
|
"acc_norm_stderr": 0.020971469947900525 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.3333333333333333, |
|
"acc_stderr": 0.04216370213557836, |
|
"acc_norm": 0.3333333333333333, |
|
"acc_norm_stderr": 0.04216370213557836 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.38235294117647056, |
|
"acc_stderr": 0.027826109307283683, |
|
"acc_norm": 0.38235294117647056, |
|
"acc_norm_stderr": 0.027826109307283683 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.44, |
|
"acc_stderr": 0.04988876515698589, |
|
"acc_norm": 0.44, |
|
"acc_norm_stderr": 0.04988876515698589 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.5702479338842975, |
|
"acc_stderr": 0.04519082021319773, |
|
"acc_norm": 0.5702479338842975, |
|
"acc_norm_stderr": 0.04519082021319773 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.34868421052631576, |
|
"acc_stderr": 0.03878139888797611, |
|
"acc_norm": 0.34868421052631576, |
|
"acc_norm_stderr": 0.03878139888797611 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.3366013071895425, |
|
"acc_stderr": 0.01911721391149516, |
|
"acc_norm": 0.3366013071895425, |
|
"acc_norm_stderr": 0.01911721391149516 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3120567375886525, |
|
"acc_stderr": 0.027640120545169927, |
|
"acc_norm": 0.3120567375886525, |
|
"acc_norm_stderr": 0.027640120545169927 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.2767857142857143, |
|
"acc_stderr": 0.04246624336697624, |
|
"acc_norm": 0.2767857142857143, |
|
"acc_norm_stderr": 0.04246624336697624 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.35648148148148145, |
|
"acc_stderr": 0.032664783315272714, |
|
"acc_norm": 0.35648148148148145, |
|
"acc_norm_stderr": 0.032664783315272714 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.2737430167597765, |
|
"acc_stderr": 0.014912413096372432, |
|
"acc_norm": 0.2737430167597765, |
|
"acc_norm_stderr": 0.014912413096372432 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.38, |
|
"acc_stderr": 0.048783173121456316, |
|
"acc_norm": 0.38, |
|
"acc_norm_stderr": 0.048783173121456316 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.04975698519562428, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.04975698519562428 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.30514705882352944, |
|
"acc_stderr": 0.027971541370170598, |
|
"acc_norm": 0.30514705882352944, |
|
"acc_norm_stderr": 0.027971541370170598 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.3795918367346939, |
|
"acc_stderr": 0.031067211262872492, |
|
"acc_norm": 0.3795918367346939, |
|
"acc_norm_stderr": 0.031067211262872492 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.5569620253164557, |
|
"acc_stderr": 0.03233532777533484, |
|
"acc_norm": 0.5569620253164557, |
|
"acc_norm_stderr": 0.03233532777533484 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.288135593220339, |
|
"acc_stderr": 0.011567140661324563, |
|
"acc_norm": 0.288135593220339, |
|
"acc_norm_stderr": 0.011567140661324563 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.39215686274509803, |
|
"acc_stderr": 0.03426712349247272, |
|
"acc_norm": 0.39215686274509803, |
|
"acc_norm_stderr": 0.03426712349247272 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.42424242424242425, |
|
"acc_stderr": 0.038592681420702615, |
|
"acc_norm": 0.42424242424242425, |
|
"acc_norm_stderr": 0.038592681420702615 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.3108935128518972, |
|
"mc1_stderr": 0.016203316673559696, |
|
"mc2": 0.5058382452993124, |
|
"mc2_stderr": 0.015661402852943502 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4002361275088548, |
|
"acc_stderr": 0.016844693510505056, |
|
"acc_norm": 0.4911452184179457, |
|
"acc_norm_stderr": 0.01718765819933673 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "dltjdgh0928/lsh_finetune_v0.11", |
|
"model_sha": "37760736eef6004ed416dd27ffaaad7cfe5da106", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |