|
{ |
|
"results": { |
|
"harness|ko_arc_challenge|25": { |
|
"acc": 0.6749146757679181, |
|
"acc_stderr": 0.013688147309729129, |
|
"acc_norm": 0.7303754266211604, |
|
"acc_norm_stderr": 0.012968040686869148 |
|
}, |
|
"harness|ko_hellaswag|10": { |
|
"acc": 0.4342760406293567, |
|
"acc_stderr": 0.004946485466544623, |
|
"acc_norm": 0.5824536944831706, |
|
"acc_norm_stderr": 0.004921466591335048 |
|
}, |
|
"harness|ko_mmlu_world_religions|5": { |
|
"acc": 0.6666666666666666, |
|
"acc_stderr": 0.03615507630310935, |
|
"acc_norm": 0.6666666666666666, |
|
"acc_norm_stderr": 0.03615507630310935 |
|
}, |
|
"harness|ko_mmlu_management|5": { |
|
"acc": 0.7281553398058253, |
|
"acc_stderr": 0.044052680241409216, |
|
"acc_norm": 0.7281553398058253, |
|
"acc_norm_stderr": 0.044052680241409216 |
|
}, |
|
"harness|ko_mmlu_miscellaneous|5": { |
|
"acc": 0.7075351213282248, |
|
"acc_stderr": 0.016267000684598645, |
|
"acc_norm": 0.7075351213282248, |
|
"acc_norm_stderr": 0.016267000684598645 |
|
}, |
|
"harness|ko_mmlu_anatomy|5": { |
|
"acc": 0.4444444444444444, |
|
"acc_stderr": 0.042925967182569816, |
|
"acc_norm": 0.4444444444444444, |
|
"acc_norm_stderr": 0.042925967182569816 |
|
}, |
|
"harness|ko_mmlu_abstract_algebra|5": { |
|
"acc": 0.36, |
|
"acc_stderr": 0.048241815132442176, |
|
"acc_norm": 0.36, |
|
"acc_norm_stderr": 0.048241815132442176 |
|
}, |
|
"harness|ko_mmlu_conceptual_physics|5": { |
|
"acc": 0.5191489361702127, |
|
"acc_stderr": 0.032662042990646796, |
|
"acc_norm": 0.5191489361702127, |
|
"acc_norm_stderr": 0.032662042990646796 |
|
}, |
|
"harness|ko_mmlu_virology|5": { |
|
"acc": 0.5481927710843374, |
|
"acc_stderr": 0.03874371556587952, |
|
"acc_norm": 0.5481927710843374, |
|
"acc_norm_stderr": 0.03874371556587952 |
|
}, |
|
"harness|ko_mmlu_philosophy|5": { |
|
"acc": 0.6141479099678456, |
|
"acc_stderr": 0.027648149599751464, |
|
"acc_norm": 0.6141479099678456, |
|
"acc_norm_stderr": 0.027648149599751464 |
|
}, |
|
"harness|ko_mmlu_human_aging|5": { |
|
"acc": 0.5829596412556054, |
|
"acc_stderr": 0.03309266936071721, |
|
"acc_norm": 0.5829596412556054, |
|
"acc_norm_stderr": 0.03309266936071721 |
|
}, |
|
"harness|ko_mmlu_human_sexuality|5": { |
|
"acc": 0.6335877862595419, |
|
"acc_stderr": 0.042258754519696386, |
|
"acc_norm": 0.6335877862595419, |
|
"acc_norm_stderr": 0.042258754519696386 |
|
}, |
|
"harness|ko_mmlu_medical_genetics|5": { |
|
"acc": 0.52, |
|
"acc_stderr": 0.050211673156867795, |
|
"acc_norm": 0.52, |
|
"acc_norm_stderr": 0.050211673156867795 |
|
}, |
|
"harness|ko_mmlu_high_school_geography|5": { |
|
"acc": 0.7323232323232324, |
|
"acc_stderr": 0.03154449888270286, |
|
"acc_norm": 0.7323232323232324, |
|
"acc_norm_stderr": 0.03154449888270286 |
|
}, |
|
"harness|ko_mmlu_electrical_engineering|5": { |
|
"acc": 0.5103448275862069, |
|
"acc_stderr": 0.04165774775728763, |
|
"acc_norm": 0.5103448275862069, |
|
"acc_norm_stderr": 0.04165774775728763 |
|
}, |
|
"harness|ko_mmlu_college_physics|5": { |
|
"acc": 0.3627450980392157, |
|
"acc_stderr": 0.047840607041056527, |
|
"acc_norm": 0.3627450980392157, |
|
"acc_norm_stderr": 0.047840607041056527 |
|
}, |
|
"harness|ko_mmlu_high_school_microeconomics|5": { |
|
"acc": 0.6176470588235294, |
|
"acc_stderr": 0.03156663099215415, |
|
"acc_norm": 0.6176470588235294, |
|
"acc_norm_stderr": 0.03156663099215415 |
|
}, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": { |
|
"acc": 0.6, |
|
"acc_stderr": 0.02483881198803317, |
|
"acc_norm": 0.6, |
|
"acc_norm_stderr": 0.02483881198803317 |
|
}, |
|
"harness|ko_mmlu_computer_security|5": { |
|
"acc": 0.58, |
|
"acc_stderr": 0.04960449637488583, |
|
"acc_norm": 0.58, |
|
"acc_norm_stderr": 0.04960449637488583 |
|
}, |
|
"harness|ko_mmlu_global_facts|5": { |
|
"acc": 0.37, |
|
"acc_stderr": 0.048523658709391, |
|
"acc_norm": 0.37, |
|
"acc_norm_stderr": 0.048523658709391 |
|
}, |
|
"harness|ko_mmlu_jurisprudence|5": { |
|
"acc": 0.5740740740740741, |
|
"acc_stderr": 0.0478034362693679, |
|
"acc_norm": 0.5740740740740741, |
|
"acc_norm_stderr": 0.0478034362693679 |
|
}, |
|
"harness|ko_mmlu_high_school_chemistry|5": { |
|
"acc": 0.4236453201970443, |
|
"acc_stderr": 0.034767257476490385, |
|
"acc_norm": 0.4236453201970443, |
|
"acc_norm_stderr": 0.034767257476490385 |
|
}, |
|
"harness|ko_mmlu_high_school_biology|5": { |
|
"acc": 0.632258064516129, |
|
"acc_stderr": 0.02743086657997347, |
|
"acc_norm": 0.632258064516129, |
|
"acc_norm_stderr": 0.02743086657997347 |
|
}, |
|
"harness|ko_mmlu_marketing|5": { |
|
"acc": 0.7606837606837606, |
|
"acc_stderr": 0.027951826808924333, |
|
"acc_norm": 0.7606837606837606, |
|
"acc_norm_stderr": 0.027951826808924333 |
|
}, |
|
"harness|ko_mmlu_clinical_knowledge|5": { |
|
"acc": 0.6150943396226415, |
|
"acc_stderr": 0.02994649856769995, |
|
"acc_norm": 0.6150943396226415, |
|
"acc_norm_stderr": 0.02994649856769995 |
|
}, |
|
"harness|ko_mmlu_public_relations|5": { |
|
"acc": 0.5818181818181818, |
|
"acc_stderr": 0.04724577405731572, |
|
"acc_norm": 0.5818181818181818, |
|
"acc_norm_stderr": 0.04724577405731572 |
|
}, |
|
"harness|ko_mmlu_high_school_mathematics|5": { |
|
"acc": 0.31851851851851853, |
|
"acc_stderr": 0.028406533090608466, |
|
"acc_norm": 0.31851851851851853, |
|
"acc_norm_stderr": 0.028406533090608466 |
|
}, |
|
"harness|ko_mmlu_high_school_physics|5": { |
|
"acc": 0.3708609271523179, |
|
"acc_stderr": 0.03943966699183629, |
|
"acc_norm": 0.3708609271523179, |
|
"acc_norm_stderr": 0.03943966699183629 |
|
}, |
|
"harness|ko_mmlu_sociology|5": { |
|
"acc": 0.7164179104477612, |
|
"acc_stderr": 0.03187187537919796, |
|
"acc_norm": 0.7164179104477612, |
|
"acc_norm_stderr": 0.03187187537919796 |
|
}, |
|
"harness|ko_mmlu_college_medicine|5": { |
|
"acc": 0.4913294797687861, |
|
"acc_stderr": 0.038118909889404105, |
|
"acc_norm": 0.4913294797687861, |
|
"acc_norm_stderr": 0.038118909889404105 |
|
}, |
|
"harness|ko_mmlu_elementary_mathematics|5": { |
|
"acc": 0.41005291005291006, |
|
"acc_stderr": 0.02533120243894442, |
|
"acc_norm": 0.41005291005291006, |
|
"acc_norm_stderr": 0.02533120243894442 |
|
}, |
|
"harness|ko_mmlu_college_biology|5": { |
|
"acc": 0.5486111111111112, |
|
"acc_stderr": 0.04161402398403279, |
|
"acc_norm": 0.5486111111111112, |
|
"acc_norm_stderr": 0.04161402398403279 |
|
}, |
|
"harness|ko_mmlu_college_chemistry|5": { |
|
"acc": 0.43, |
|
"acc_stderr": 0.049756985195624284, |
|
"acc_norm": 0.43, |
|
"acc_norm_stderr": 0.049756985195624284 |
|
}, |
|
"harness|ko_mmlu_us_foreign_policy|5": { |
|
"acc": 0.79, |
|
"acc_stderr": 0.04093601807403326, |
|
"acc_norm": 0.79, |
|
"acc_norm_stderr": 0.04093601807403326 |
|
}, |
|
"harness|ko_mmlu_moral_disputes|5": { |
|
"acc": 0.5520231213872833, |
|
"acc_stderr": 0.02677299065336182, |
|
"acc_norm": 0.5520231213872833, |
|
"acc_norm_stderr": 0.02677299065336182 |
|
}, |
|
"harness|ko_mmlu_logical_fallacies|5": { |
|
"acc": 0.5337423312883436, |
|
"acc_stderr": 0.039194155450484096, |
|
"acc_norm": 0.5337423312883436, |
|
"acc_norm_stderr": 0.039194155450484096 |
|
}, |
|
"harness|ko_mmlu_prehistory|5": { |
|
"acc": 0.5925925925925926, |
|
"acc_stderr": 0.027339546640662734, |
|
"acc_norm": 0.5925925925925926, |
|
"acc_norm_stderr": 0.027339546640662734 |
|
}, |
|
"harness|ko_mmlu_college_mathematics|5": { |
|
"acc": 0.3, |
|
"acc_stderr": 0.046056618647183814, |
|
"acc_norm": 0.3, |
|
"acc_norm_stderr": 0.046056618647183814 |
|
}, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": { |
|
"acc": 0.6839378238341969, |
|
"acc_stderr": 0.033553973696861736, |
|
"acc_norm": 0.6839378238341969, |
|
"acc_norm_stderr": 0.033553973696861736 |
|
}, |
|
"harness|ko_mmlu_econometrics|5": { |
|
"acc": 0.37719298245614036, |
|
"acc_stderr": 0.04559522141958215, |
|
"acc_norm": 0.37719298245614036, |
|
"acc_norm_stderr": 0.04559522141958215 |
|
}, |
|
"harness|ko_mmlu_high_school_psychology|5": { |
|
"acc": 0.7651376146788991, |
|
"acc_stderr": 0.018175110510343602, |
|
"acc_norm": 0.7651376146788991, |
|
"acc_norm_stderr": 0.018175110510343602 |
|
}, |
|
"harness|ko_mmlu_formal_logic|5": { |
|
"acc": 0.36507936507936506, |
|
"acc_stderr": 0.04306241259127154, |
|
"acc_norm": 0.36507936507936506, |
|
"acc_norm_stderr": 0.04306241259127154 |
|
}, |
|
"harness|ko_mmlu_nutrition|5": { |
|
"acc": 0.6013071895424836, |
|
"acc_stderr": 0.02803609227389176, |
|
"acc_norm": 0.6013071895424836, |
|
"acc_norm_stderr": 0.02803609227389176 |
|
}, |
|
"harness|ko_mmlu_business_ethics|5": { |
|
"acc": 0.58, |
|
"acc_stderr": 0.049604496374885836, |
|
"acc_norm": 0.58, |
|
"acc_norm_stderr": 0.049604496374885836 |
|
}, |
|
"harness|ko_mmlu_international_law|5": { |
|
"acc": 0.6694214876033058, |
|
"acc_stderr": 0.042943408452120926, |
|
"acc_norm": 0.6694214876033058, |
|
"acc_norm_stderr": 0.042943408452120926 |
|
}, |
|
"harness|ko_mmlu_astronomy|5": { |
|
"acc": 0.5986842105263158, |
|
"acc_stderr": 0.039889037033362836, |
|
"acc_norm": 0.5986842105263158, |
|
"acc_norm_stderr": 0.039889037033362836 |
|
}, |
|
"harness|ko_mmlu_professional_psychology|5": { |
|
"acc": 0.48856209150326796, |
|
"acc_stderr": 0.02022254151561086, |
|
"acc_norm": 0.48856209150326796, |
|
"acc_norm_stderr": 0.02022254151561086 |
|
}, |
|
"harness|ko_mmlu_professional_accounting|5": { |
|
"acc": 0.3900709219858156, |
|
"acc_stderr": 0.029097675599463926, |
|
"acc_norm": 0.3900709219858156, |
|
"acc_norm_stderr": 0.029097675599463926 |
|
}, |
|
"harness|ko_mmlu_machine_learning|5": { |
|
"acc": 0.4375, |
|
"acc_stderr": 0.04708567521880525, |
|
"acc_norm": 0.4375, |
|
"acc_norm_stderr": 0.04708567521880525 |
|
}, |
|
"harness|ko_mmlu_high_school_statistics|5": { |
|
"acc": 0.5092592592592593, |
|
"acc_stderr": 0.034093869469927006, |
|
"acc_norm": 0.5092592592592593, |
|
"acc_norm_stderr": 0.034093869469927006 |
|
}, |
|
"harness|ko_mmlu_moral_scenarios|5": { |
|
"acc": 0.26927374301675977, |
|
"acc_stderr": 0.014835616582882617, |
|
"acc_norm": 0.26927374301675977, |
|
"acc_norm_stderr": 0.014835616582882617 |
|
}, |
|
"harness|ko_mmlu_college_computer_science|5": { |
|
"acc": 0.41, |
|
"acc_stderr": 0.04943110704237102, |
|
"acc_norm": 0.41, |
|
"acc_norm_stderr": 0.04943110704237102 |
|
}, |
|
"harness|ko_mmlu_high_school_computer_science|5": { |
|
"acc": 0.64, |
|
"acc_stderr": 0.04824181513244218, |
|
"acc_norm": 0.64, |
|
"acc_norm_stderr": 0.04824181513244218 |
|
}, |
|
"harness|ko_mmlu_professional_medicine|5": { |
|
"acc": 0.5661764705882353, |
|
"acc_stderr": 0.03010563657001663, |
|
"acc_norm": 0.5661764705882353, |
|
"acc_norm_stderr": 0.03010563657001663 |
|
}, |
|
"harness|ko_mmlu_security_studies|5": { |
|
"acc": 0.5795918367346938, |
|
"acc_stderr": 0.031601069934496004, |
|
"acc_norm": 0.5795918367346938, |
|
"acc_norm_stderr": 0.031601069934496004 |
|
}, |
|
"harness|ko_mmlu_high_school_world_history|5": { |
|
"acc": 0.679324894514768, |
|
"acc_stderr": 0.03038193194999041, |
|
"acc_norm": 0.679324894514768, |
|
"acc_norm_stderr": 0.03038193194999041 |
|
}, |
|
"harness|ko_mmlu_professional_law|5": { |
|
"acc": 0.3970013037809648, |
|
"acc_stderr": 0.012496346982909556, |
|
"acc_norm": 0.3970013037809648, |
|
"acc_norm_stderr": 0.012496346982909556 |
|
}, |
|
"harness|ko_mmlu_high_school_us_history|5": { |
|
"acc": 0.5931372549019608, |
|
"acc_stderr": 0.03447891136353382, |
|
"acc_norm": 0.5931372549019608, |
|
"acc_norm_stderr": 0.03447891136353382 |
|
}, |
|
"harness|ko_mmlu_high_school_european_history|5": { |
|
"acc": 0.593939393939394, |
|
"acc_stderr": 0.03834816355401181, |
|
"acc_norm": 0.593939393939394, |
|
"acc_norm_stderr": 0.03834816355401181 |
|
}, |
|
"harness|ko_truthfulqa_mc|0": { |
|
"mc1": 0.49938800489596086, |
|
"mc1_stderr": 0.01750348793889251, |
|
"mc2": 0.6218423545848288, |
|
"mc2_stderr": 0.014757889508056288 |
|
}, |
|
"harness|ko_commongen_v2|2": { |
|
"acc": 0.4344746162927981, |
|
"acc_stderr": 0.017042098620824935, |
|
"acc_norm": 0.4781582054309327, |
|
"acc_norm_stderr": 0.017173944474294375 |
|
} |
|
}, |
|
"versions": { |
|
"all": 0, |
|
"harness|ko_arc_challenge|25": 0, |
|
"harness|ko_hellaswag|10": 0, |
|
"harness|ko_mmlu_world_religions|5": 1, |
|
"harness|ko_mmlu_management|5": 1, |
|
"harness|ko_mmlu_miscellaneous|5": 1, |
|
"harness|ko_mmlu_anatomy|5": 1, |
|
"harness|ko_mmlu_abstract_algebra|5": 1, |
|
"harness|ko_mmlu_conceptual_physics|5": 1, |
|
"harness|ko_mmlu_virology|5": 1, |
|
"harness|ko_mmlu_philosophy|5": 1, |
|
"harness|ko_mmlu_human_aging|5": 1, |
|
"harness|ko_mmlu_human_sexuality|5": 1, |
|
"harness|ko_mmlu_medical_genetics|5": 1, |
|
"harness|ko_mmlu_high_school_geography|5": 1, |
|
"harness|ko_mmlu_electrical_engineering|5": 1, |
|
"harness|ko_mmlu_college_physics|5": 1, |
|
"harness|ko_mmlu_high_school_microeconomics|5": 1, |
|
"harness|ko_mmlu_high_school_macroeconomics|5": 1, |
|
"harness|ko_mmlu_computer_security|5": 1, |
|
"harness|ko_mmlu_global_facts|5": 1, |
|
"harness|ko_mmlu_jurisprudence|5": 1, |
|
"harness|ko_mmlu_high_school_chemistry|5": 1, |
|
"harness|ko_mmlu_high_school_biology|5": 1, |
|
"harness|ko_mmlu_marketing|5": 1, |
|
"harness|ko_mmlu_clinical_knowledge|5": 1, |
|
"harness|ko_mmlu_public_relations|5": 1, |
|
"harness|ko_mmlu_high_school_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_physics|5": 1, |
|
"harness|ko_mmlu_sociology|5": 1, |
|
"harness|ko_mmlu_college_medicine|5": 1, |
|
"harness|ko_mmlu_elementary_mathematics|5": 1, |
|
"harness|ko_mmlu_college_biology|5": 1, |
|
"harness|ko_mmlu_college_chemistry|5": 1, |
|
"harness|ko_mmlu_us_foreign_policy|5": 1, |
|
"harness|ko_mmlu_moral_disputes|5": 1, |
|
"harness|ko_mmlu_logical_fallacies|5": 1, |
|
"harness|ko_mmlu_prehistory|5": 1, |
|
"harness|ko_mmlu_college_mathematics|5": 1, |
|
"harness|ko_mmlu_high_school_government_and_politics|5": 1, |
|
"harness|ko_mmlu_econometrics|5": 1, |
|
"harness|ko_mmlu_high_school_psychology|5": 1, |
|
"harness|ko_mmlu_formal_logic|5": 1, |
|
"harness|ko_mmlu_nutrition|5": 1, |
|
"harness|ko_mmlu_business_ethics|5": 1, |
|
"harness|ko_mmlu_international_law|5": 1, |
|
"harness|ko_mmlu_astronomy|5": 1, |
|
"harness|ko_mmlu_professional_psychology|5": 1, |
|
"harness|ko_mmlu_professional_accounting|5": 1, |
|
"harness|ko_mmlu_machine_learning|5": 1, |
|
"harness|ko_mmlu_high_school_statistics|5": 1, |
|
"harness|ko_mmlu_moral_scenarios|5": 1, |
|
"harness|ko_mmlu_college_computer_science|5": 1, |
|
"harness|ko_mmlu_high_school_computer_science|5": 1, |
|
"harness|ko_mmlu_professional_medicine|5": 1, |
|
"harness|ko_mmlu_security_studies|5": 1, |
|
"harness|ko_mmlu_high_school_world_history|5": 1, |
|
"harness|ko_mmlu_professional_law|5": 1, |
|
"harness|ko_mmlu_high_school_us_history|5": 1, |
|
"harness|ko_mmlu_high_school_european_history|5": 1, |
|
"harness|ko_truthfulqa_mc|0": 0, |
|
"harness|ko_commongen_v2|2": 1 |
|
}, |
|
"config_general": { |
|
"model_name": "Deepnoid/deep-solar-v2.0.7", |
|
"model_sha": "e3c320ee6a4e5b554d34e1e9b1c299e96a6fecf8", |
|
"model_dtype": "torch.float16", |
|
"lighteval_sha": "", |
|
"num_few_shot_default": 0, |
|
"num_fewshot_seeds": 1, |
|
"override_batch_size": 1, |
|
"max_samples": null |
|
} |
|
} |